diff --git a/.gitignore b/.gitignore index 7ed1fe2..7bae5d0 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,11 @@ # emacs *~ +\#* + +# python +__pycache__ +*.pyc + +# other +*.ics + diff --git a/libs/PyYAML-3.10/CHANGES b/libs/PyYAML-3.10/CHANGES deleted file mode 100644 index 8bc18b2..0000000 --- a/libs/PyYAML-3.10/CHANGES +++ /dev/null @@ -1,140 +0,0 @@ - -For a complete Subversion changelog, see 'http://pyyaml.org/log/pyyaml'. - -3.10 (2011-05-30) ------------------ - -* Do not try to build LibYAML bindings on platforms other than CPython - (Thank to olt(at)bogosoft(dot)com). -* Clear cyclic references in the parser and the emitter - (Thank to kristjan(at)ccpgames(dot)com). -* Dropped support for Python 2.3 and 2.4. - -3.09 (2009-08-31) ------------------ - -* Fixed an obscure scanner error not reported when there is - no line break at the end of the stream (Thank to Ingy). -* Fixed use of uninitialized memory when emitting anchors with - LibYAML bindings (Thank to cegner(at)yahoo-inc(dot)com). -* Fixed emitting incorrect BOM characters for UTF-16 (Thank to - Valentin Nechayev) -* Fixed the emitter for folded scalars not respecting the preferred - line width (Thank to Ingy). -* Fixed a subtle ordering issue with emitting '%TAG' directives - (Thank to Andrey Somov). -* Fixed performance regression with LibYAML bindings. - - -3.08 (2008-12-31) ------------------ - -* Python 3 support (Thank to Erick Tryzelaar). -* Use Cython instead of Pyrex to build LibYAML bindings. -* Refactored support for unicode and byte input/output streams. - - -3.07 (2008-12-29) ------------------ - -* The emitter learned to use an optional indentation indicator - for block scalar; thus scalars with leading whitespaces - could now be represented in a literal or folded style. -* The test suite is now included in the source distribution. - To run the tests, type 'python setup.py test'. -* Refactored the test suite: dropped unittest in favor of - a custom test appliance. -* Fixed the path resolver in CDumper. -* Forced an explicit document end indicator when there is - a possibility of parsing ambiguity. -* More setup.py improvements: the package should be usable - when any combination of setuptools, Pyrex and LibYAML - is installed. -* Windows binary packages are built against LibYAML-0.1.2. -* Minor typos and corrections (Thank to Ingy dot Net - and Andrey Somov). - - -3.06 (2008-10-03) ------------------ - -* setup.py checks whether LibYAML is installed and if so, builds - and installs LibYAML bindings. To force or disable installation - of LibYAML bindings, use '--with-libyaml' or '--without-libyaml' - respectively. -* The source distribution includes compiled Pyrex sources so - building LibYAML bindings no longer requires Pyrex installed. -* 'yaml.load()' raises an exception if the input stream contains - more than one YAML document. -* Fixed exceptions produced by LibYAML bindings. -* Fixed a dot '.' character being recognized as !!float. -* Fixed Python 2.3 compatibility issue in constructing !!timestamp values. -* Windows binary packages are built against the LibYAML stable branch. -* Added attributes 'yaml.__version__' and 'yaml.__with_libyaml__'. - - -3.05 (2007-05-13) ------------------ - -* Windows binary packages were built with LibYAML trunk. -* Fixed a bug that prevent processing a live stream of YAML documents in - timely manner (Thanks edward(at)sweetbytes(dot)net). -* Fixed a bug when the path in add_path_resolver contains boolean values - (Thanks jstroud(at)mbi(dot)ucla(dot)edu). -* Fixed loss of microsecond precision in timestamps - (Thanks edemaine(at)mit(dot)edu). -* Fixed loading an empty YAML stream. -* Allowed immutable subclasses of YAMLObject. -* Made the encoding of the unicode->str conversion explicit so that - the conversion does not depend on the default Python encoding. -* Forced emitting float values in a YAML compatible form. - - -3.04 (2006-08-20) ------------------ - -* Include experimental LibYAML bindings. -* Fully support recursive structures. -* Sort dictionary keys. Mapping node values are now represented - as lists of pairs instead of dictionaries. No longer check - for duplicate mapping keys as it didn't work correctly anyway. -* Fix invalid output of single-quoted scalars in cases when a single - quote is not escaped when preceeded by whitespaces or line breaks. -* To make porting easier, rewrite Parser not using generators. -* Fix handling of unexpected block mapping values. -* Fix a bug in Representer.represent_object: copy_reg.dispatch_table - was not correctly handled. -* Fix a bug when a block scalar is incorrectly emitted in the simple - key context. -* Hold references to the objects being represented. -* Make Representer not try to guess !!pairs when a list is represented. -* Fix timestamp constructing and representing. -* Fix the 'N' plain scalar being incorrectly recognized as !!bool. - - -3.03 (2006-06-19) ------------------ - -* Fix Python 2.5 compatibility issues. -* Fix numerous bugs in the float handling. -* Fix scanning some ill-formed documents. -* Other minor fixes. - - -3.02 (2006-05-15) ------------------ - -* Fix win32 installer. Apparently bdist_wininst does not work well - under Linux. -* Fix a bug in add_path_resolver. -* Add the yaml-highlight example. Try to run on a color terminal: - `python yaml_hl.py >> yaml.load(stream, Loader=yaml.CLoader) - >>> yaml.dump(data, Dumper=yaml.CDumper) - -PyYAML includes a comprehensive test suite. To run the tests, -type 'python setup.py test'. - -For more information, check the PyYAML homepage: -'http://pyyaml.org/wiki/PyYAML'. - -For PyYAML tutorial and reference, see: -'http://pyyaml.org/wiki/PyYAMLDocumentation'. - -Post your questions and opinions to the YAML-Core mailing list: -'http://lists.sourceforge.net/lists/listinfo/yaml-core'. - -Submit bug reports and feature requests to the PyYAML bug tracker: -'http://pyyaml.org/newticket?component=pyyaml'. - -PyYAML is written by Kirill Simonov . It is released -under the MIT license. See the file LICENSE for more details. - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/__init__.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/__init__.py deleted file mode 100644 index f977f46..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/__init__.py +++ /dev/null @@ -1,315 +0,0 @@ - -from error import * - -from tokens import * -from events import * -from nodes import * - -from loader import * -from dumper import * - -__version__ = '3.10' - -try: - from cyaml import * - __with_libyaml__ = True -except ImportError: - __with_libyaml__ = False - -def scan(stream, Loader=Loader): - """ - Scan a YAML stream and produce scanning tokens. - """ - loader = Loader(stream) - try: - while loader.check_token(): - yield loader.get_token() - finally: - loader.dispose() - -def parse(stream, Loader=Loader): - """ - Parse a YAML stream and produce parsing events. - """ - loader = Loader(stream) - try: - while loader.check_event(): - yield loader.get_event() - finally: - loader.dispose() - -def compose(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding representation tree. - """ - loader = Loader(stream) - try: - return loader.get_single_node() - finally: - loader.dispose() - -def compose_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding representation trees. - """ - loader = Loader(stream) - try: - while loader.check_node(): - yield loader.get_node() - finally: - loader.dispose() - -def load(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - """ - loader = Loader(stream) - try: - return loader.get_single_data() - finally: - loader.dispose() - -def load_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - """ - loader = Loader(stream) - try: - while loader.check_data(): - yield loader.get_data() - finally: - loader.dispose() - -def safe_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - Resolve only basic YAML tags. - """ - return load(stream, SafeLoader) - -def safe_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - Resolve only basic YAML tags. - """ - return load_all(stream, SafeLoader) - -def emit(events, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - """ - Emit YAML parsing events into a stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - from StringIO import StringIO - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - try: - for event in events: - dumper.emit(event) - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize_all(nodes, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding='utf-8', explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of representation trees into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - from StringIO import StringIO - else: - from cStringIO import StringIO - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for node in nodes: - dumper.serialize(node) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize(node, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a representation tree into a YAML stream. - If stream is None, return the produced string instead. - """ - return serialize_all([node], stream, Dumper=Dumper, **kwds) - -def dump_all(documents, stream=None, Dumper=Dumper, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding='utf-8', explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of Python objects into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - from StringIO import StringIO - else: - from cStringIO import StringIO - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for data in documents: - dumper.represent(data) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def dump(data, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a Python object into a YAML stream. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=Dumper, **kwds) - -def safe_dump_all(documents, stream=None, **kwds): - """ - Serialize a sequence of Python objects into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all(documents, stream, Dumper=SafeDumper, **kwds) - -def safe_dump(data, stream=None, **kwds): - """ - Serialize a Python object into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=SafeDumper, **kwds) - -def add_implicit_resolver(tag, regexp, first=None, - Loader=Loader, Dumper=Dumper): - """ - Add an implicit scalar detector. - If an implicit scalar value matches the given regexp, - the corresponding tag is assigned to the scalar. - first is a sequence of possible initial characters or None. - """ - Loader.add_implicit_resolver(tag, regexp, first) - Dumper.add_implicit_resolver(tag, regexp, first) - -def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): - """ - Add a path based resolver for the given tag. - A path is a list of keys that forms a path - to a node in the representation tree. - Keys can be string values, integers, or None. - """ - Loader.add_path_resolver(tag, path, kind) - Dumper.add_path_resolver(tag, path, kind) - -def add_constructor(tag, constructor, Loader=Loader): - """ - Add a constructor for the given tag. - Constructor is a function that accepts a Loader instance - and a node object and produces the corresponding Python object. - """ - Loader.add_constructor(tag, constructor) - -def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): - """ - Add a multi-constructor for the given tag prefix. - Multi-constructor is called for a node if its tag starts with tag_prefix. - Multi-constructor accepts a Loader instance, a tag suffix, - and a node object and produces the corresponding Python object. - """ - Loader.add_multi_constructor(tag_prefix, multi_constructor) - -def add_representer(data_type, representer, Dumper=Dumper): - """ - Add a representer for the given type. - Representer is a function accepting a Dumper instance - and an instance of the given data type - and producing the corresponding representation node. - """ - Dumper.add_representer(data_type, representer) - -def add_multi_representer(data_type, multi_representer, Dumper=Dumper): - """ - Add a representer for the given type. - Multi-representer is a function accepting a Dumper instance - and an instance of the given data type or subtype - and producing the corresponding representation node. - """ - Dumper.add_multi_representer(data_type, multi_representer) - -class YAMLObjectMetaclass(type): - """ - The metaclass for YAMLObject. - """ - def __init__(cls, name, bases, kwds): - super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) - if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: - cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) - cls.yaml_dumper.add_representer(cls, cls.to_yaml) - -class YAMLObject(object): - """ - An object that can dump itself to a YAML stream - and load itself from a YAML stream. - """ - - __metaclass__ = YAMLObjectMetaclass - __slots__ = () # no direct instantiation, so allow immutable subclasses - - yaml_loader = Loader - yaml_dumper = Dumper - - yaml_tag = None - yaml_flow_style = None - - def from_yaml(cls, loader, node): - """ - Convert a representation node to a Python object. - """ - return loader.construct_yaml_object(node, cls) - from_yaml = classmethod(from_yaml) - - def to_yaml(cls, dumper, data): - """ - Convert a Python object to a representation node. - """ - return dumper.represent_yaml_object(cls.yaml_tag, data, cls, - flow_style=cls.yaml_flow_style) - to_yaml = classmethod(to_yaml) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/composer.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/composer.py deleted file mode 100644 index 06e5ac7..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/composer.py +++ /dev/null @@ -1,139 +0,0 @@ - -__all__ = ['Composer', 'ComposerError'] - -from error import MarkedYAMLError -from events import * -from nodes import * - -class ComposerError(MarkedYAMLError): - pass - -class Composer(object): - - def __init__(self): - self.anchors = {} - - def check_node(self): - # Drop the STREAM-START event. - if self.check_event(StreamStartEvent): - self.get_event() - - # If there are more documents available? - return not self.check_event(StreamEndEvent) - - def get_node(self): - # Get the root node of the next document. - if not self.check_event(StreamEndEvent): - return self.compose_document() - - def get_single_node(self): - # Drop the STREAM-START event. - self.get_event() - - # Compose a document if the stream is not empty. - document = None - if not self.check_event(StreamEndEvent): - document = self.compose_document() - - # Ensure that the stream contains no more documents. - if not self.check_event(StreamEndEvent): - event = self.get_event() - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) - - # Drop the STREAM-END event. - self.get_event() - - return document - - def compose_document(self): - # Drop the DOCUMENT-START event. - self.get_event() - - # Compose the root node. - node = self.compose_node(None, None) - - # Drop the DOCUMENT-END event. - self.get_event() - - self.anchors = {} - return node - - def compose_node(self, parent, index): - if self.check_event(AliasEvent): - event = self.get_event() - anchor = event.anchor - if anchor not in self.anchors: - raise ComposerError(None, None, "found undefined alias %r" - % anchor.encode('utf-8'), event.start_mark) - return self.anchors[anchor] - event = self.peek_event() - anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: - raise ComposerError("found duplicate anchor %r; first occurence" - % anchor.encode('utf-8'), self.anchors[anchor].start_mark, - "second occurence", event.start_mark) - self.descend_resolver(parent, index) - if self.check_event(ScalarEvent): - node = self.compose_scalar_node(anchor) - elif self.check_event(SequenceStartEvent): - node = self.compose_sequence_node(anchor) - elif self.check_event(MappingStartEvent): - node = self.compose_mapping_node(anchor) - self.ascend_resolver() - return node - - def compose_scalar_node(self, anchor): - event = self.get_event() - tag = event.tag - if tag is None or tag == u'!': - tag = self.resolve(ScalarNode, event.value, event.implicit) - node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) - if anchor is not None: - self.anchors[anchor] = node - return node - - def compose_sequence_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == u'!': - tag = self.resolve(SequenceNode, None, start_event.implicit) - node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - index = 0 - while not self.check_event(SequenceEndEvent): - node.value.append(self.compose_node(node, index)) - index += 1 - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - - def compose_mapping_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == u'!': - tag = self.resolve(MappingNode, None, start_event.implicit) - node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() - item_key = self.compose_node(node, None) - #if item_key in node.value: - # raise ComposerError("while composing a mapping", start_event.start_mark, - # "found duplicate key", key_event.start_mark) - item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value - node.value.append((item_key, item_value)) - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/constructor.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/constructor.py deleted file mode 100644 index 635faac..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/constructor.py +++ /dev/null @@ -1,675 +0,0 @@ - -__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', - 'ConstructorError'] - -from error import * -from nodes import * - -import datetime - -import binascii, re, sys, types - -class ConstructorError(MarkedYAMLError): - pass - -class BaseConstructor(object): - - yaml_constructors = {} - yaml_multi_constructors = {} - - def __init__(self): - self.constructed_objects = {} - self.recursive_objects = {} - self.state_generators = [] - self.deep_construct = False - - def check_data(self): - # If there are more documents available? - return self.check_node() - - def get_data(self): - # Construct and return the next document. - if self.check_node(): - return self.construct_document(self.get_node()) - - def get_single_data(self): - # Ensure that the stream contains a single document and construct it. - node = self.get_single_node() - if node is not None: - return self.construct_document(node) - return None - - def construct_document(self, node): - data = self.construct_object(node) - while self.state_generators: - state_generators = self.state_generators - self.state_generators = [] - for generator in state_generators: - for dummy in generator: - pass - self.constructed_objects = {} - self.recursive_objects = {} - self.deep_construct = False - return data - - def construct_object(self, node, deep=False): - if node in self.constructed_objects: - return self.constructed_objects[node] - if deep: - old_deep = self.deep_construct - self.deep_construct = True - if node in self.recursive_objects: - raise ConstructorError(None, None, - "found unconstructable recursive node", node.start_mark) - self.recursive_objects[node] = None - constructor = None - tag_suffix = None - if node.tag in self.yaml_constructors: - constructor = self.yaml_constructors[node.tag] - else: - for tag_prefix in self.yaml_multi_constructors: - if node.tag.startswith(tag_prefix): - tag_suffix = node.tag[len(tag_prefix):] - constructor = self.yaml_multi_constructors[tag_prefix] - break - else: - if None in self.yaml_multi_constructors: - tag_suffix = node.tag - constructor = self.yaml_multi_constructors[None] - elif None in self.yaml_constructors: - constructor = self.yaml_constructors[None] - elif isinstance(node, ScalarNode): - constructor = self.__class__.construct_scalar - elif isinstance(node, SequenceNode): - constructor = self.__class__.construct_sequence - elif isinstance(node, MappingNode): - constructor = self.__class__.construct_mapping - if tag_suffix is None: - data = constructor(self, node) - else: - data = constructor(self, tag_suffix, node) - if isinstance(data, types.GeneratorType): - generator = data - data = generator.next() - if self.deep_construct: - for dummy in generator: - pass - else: - self.state_generators.append(generator) - self.constructed_objects[node] = data - del self.recursive_objects[node] - if deep: - self.deep_construct = old_deep - return data - - def construct_scalar(self, node): - if not isinstance(node, ScalarNode): - raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) - return node.value - - def construct_sequence(self, node, deep=False): - if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - return [self.construct_object(child, deep=deep) - for child in node.value] - - def construct_mapping(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - mapping = {} - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - try: - hash(key) - except TypeError, exc: - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unacceptable key (%s)" % exc, key_node.start_mark) - value = self.construct_object(value_node, deep=deep) - mapping[key] = value - return mapping - - def construct_pairs(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - pairs = [] - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - value = self.construct_object(value_node, deep=deep) - pairs.append((key, value)) - return pairs - - def add_constructor(cls, tag, constructor): - if not 'yaml_constructors' in cls.__dict__: - cls.yaml_constructors = cls.yaml_constructors.copy() - cls.yaml_constructors[tag] = constructor - add_constructor = classmethod(add_constructor) - - def add_multi_constructor(cls, tag_prefix, multi_constructor): - if not 'yaml_multi_constructors' in cls.__dict__: - cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() - cls.yaml_multi_constructors[tag_prefix] = multi_constructor - add_multi_constructor = classmethod(add_multi_constructor) - -class SafeConstructor(BaseConstructor): - - def construct_scalar(self, node): - if isinstance(node, MappingNode): - for key_node, value_node in node.value: - if key_node.tag == u'tag:yaml.org,2002:value': - return self.construct_scalar(value_node) - return BaseConstructor.construct_scalar(self, node) - - def flatten_mapping(self, node): - merge = [] - index = 0 - while index < len(node.value): - key_node, value_node = node.value[index] - if key_node.tag == u'tag:yaml.org,2002:merge': - del node.value[index] - if isinstance(value_node, MappingNode): - self.flatten_mapping(value_node) - merge.extend(value_node.value) - elif isinstance(value_node, SequenceNode): - submerge = [] - for subnode in value_node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) - self.flatten_mapping(subnode) - submerge.append(subnode.value) - submerge.reverse() - for value in submerge: - merge.extend(value) - else: - raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) - elif key_node.tag == u'tag:yaml.org,2002:value': - key_node.tag = u'tag:yaml.org,2002:str' - index += 1 - else: - index += 1 - if merge: - node.value = merge + node.value - - def construct_mapping(self, node, deep=False): - if isinstance(node, MappingNode): - self.flatten_mapping(node) - return BaseConstructor.construct_mapping(self, node, deep=deep) - - def construct_yaml_null(self, node): - self.construct_scalar(node) - return None - - bool_values = { - u'yes': True, - u'no': False, - u'true': True, - u'false': False, - u'on': True, - u'off': False, - } - - def construct_yaml_bool(self, node): - value = self.construct_scalar(node) - return self.bool_values[value.lower()] - - def construct_yaml_int(self, node): - value = str(self.construct_scalar(node)) - value = value.replace('_', '') - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '0': - return 0 - elif value.startswith('0b'): - return sign*int(value[2:], 2) - elif value.startswith('0x'): - return sign*int(value[2:], 16) - elif value[0] == '0': - return sign*int(value, 8) - elif ':' in value: - digits = [int(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*int(value) - - inf_value = 1e300 - while inf_value != inf_value*inf_value: - inf_value *= inf_value - nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). - - def construct_yaml_float(self, node): - value = str(self.construct_scalar(node)) - value = value.replace('_', '').lower() - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '.inf': - return sign*self.inf_value - elif value == '.nan': - return self.nan_value - elif ':' in value: - digits = [float(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0.0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*float(value) - - def construct_yaml_binary(self, node): - value = self.construct_scalar(node) - try: - return str(value).decode('base64') - except (binascii.Error, UnicodeEncodeError), exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - timestamp_regexp = re.compile( - ur'''^(?P[0-9][0-9][0-9][0-9]) - -(?P[0-9][0-9]?) - -(?P[0-9][0-9]?) - (?:(?:[Tt]|[ \t]+) - (?P[0-9][0-9]?) - :(?P[0-9][0-9]) - :(?P[0-9][0-9]) - (?:\.(?P[0-9]*))? - (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) - (?::(?P[0-9][0-9]))?))?)?$''', re.X) - - def construct_yaml_timestamp(self, node): - value = self.construct_scalar(node) - match = self.timestamp_regexp.match(node.value) - values = match.groupdict() - year = int(values['year']) - month = int(values['month']) - day = int(values['day']) - if not values['hour']: - return datetime.date(year, month, day) - hour = int(values['hour']) - minute = int(values['minute']) - second = int(values['second']) - fraction = 0 - if values['fraction']: - fraction = values['fraction'][:6] - while len(fraction) < 6: - fraction += '0' - fraction = int(fraction) - delta = None - if values['tz_sign']: - tz_hour = int(values['tz_hour']) - tz_minute = int(values['tz_minute'] or 0) - delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) - if values['tz_sign'] == '-': - delta = -delta - data = datetime.datetime(year, month, day, hour, minute, second, fraction) - if delta: - data -= delta - return data - - def construct_yaml_omap(self, node): - # Note: we do not check for duplicate keys, because it's too - # CPU-expensive. - omap = [] - yield omap - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - omap.append((key, value)) - - def construct_yaml_pairs(self, node): - # Note: the same code as `construct_yaml_omap`. - pairs = [] - yield pairs - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - pairs.append((key, value)) - - def construct_yaml_set(self, node): - data = set() - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_str(self, node): - value = self.construct_scalar(node) - try: - return value.encode('ascii') - except UnicodeEncodeError: - return value - - def construct_yaml_seq(self, node): - data = [] - yield data - data.extend(self.construct_sequence(node)) - - def construct_yaml_map(self, node): - data = {} - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_object(self, node, cls): - data = cls.__new__(cls) - yield data - if hasattr(data, '__setstate__'): - state = self.construct_mapping(node, deep=True) - data.__setstate__(state) - else: - state = self.construct_mapping(node) - data.__dict__.update(state) - - def construct_undefined(self, node): - raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'), - node.start_mark) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:null', - SafeConstructor.construct_yaml_null) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:bool', - SafeConstructor.construct_yaml_bool) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:int', - SafeConstructor.construct_yaml_int) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:float', - SafeConstructor.construct_yaml_float) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:binary', - SafeConstructor.construct_yaml_binary) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:timestamp', - SafeConstructor.construct_yaml_timestamp) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:omap', - SafeConstructor.construct_yaml_omap) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:pairs', - SafeConstructor.construct_yaml_pairs) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:set', - SafeConstructor.construct_yaml_set) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:str', - SafeConstructor.construct_yaml_str) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:seq', - SafeConstructor.construct_yaml_seq) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:map', - SafeConstructor.construct_yaml_map) - -SafeConstructor.add_constructor(None, - SafeConstructor.construct_undefined) - -class Constructor(SafeConstructor): - - def construct_python_str(self, node): - return self.construct_scalar(node).encode('utf-8') - - def construct_python_unicode(self, node): - return self.construct_scalar(node) - - def construct_python_long(self, node): - return long(self.construct_yaml_int(node)) - - def construct_python_complex(self, node): - return complex(self.construct_scalar(node)) - - def construct_python_tuple(self, node): - return tuple(self.construct_sequence(node)) - - def find_python_module(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python module", mark, - "expected non-empty name appended to the tag", mark) - try: - __import__(name) - except ImportError, exc: - raise ConstructorError("while constructing a Python module", mark, - "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark) - return sys.modules[name] - - def find_python_name(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python object", mark, - "expected non-empty name appended to the tag", mark) - if u'.' in name: - module_name, object_name = name.rsplit('.', 1) - else: - module_name = '__builtin__' - object_name = name - try: - __import__(module_name) - except ImportError, exc: - raise ConstructorError("while constructing a Python object", mark, - "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark) - module = sys.modules[module_name] - if not hasattr(module, object_name): - raise ConstructorError("while constructing a Python object", mark, - "cannot find %r in the module %r" % (object_name.encode('utf-8'), - module.__name__), mark) - return getattr(module, object_name) - - def construct_python_name(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python name", node.start_mark, - "expected the empty value, but found %r" % value.encode('utf-8'), - node.start_mark) - return self.find_python_name(suffix, node.start_mark) - - def construct_python_module(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python module", node.start_mark, - "expected the empty value, but found %r" % value.encode('utf-8'), - node.start_mark) - return self.find_python_module(suffix, node.start_mark) - - class classobj: pass - - def make_python_instance(self, suffix, node, - args=None, kwds=None, newobj=False): - if not args: - args = [] - if not kwds: - kwds = {} - cls = self.find_python_name(suffix, node.start_mark) - if newobj and isinstance(cls, type(self.classobj)) \ - and not args and not kwds: - instance = self.classobj() - instance.__class__ = cls - return instance - elif newobj and isinstance(cls, type): - return cls.__new__(cls, *args, **kwds) - else: - return cls(*args, **kwds) - - def set_python_instance_state(self, instance, state): - if hasattr(instance, '__setstate__'): - instance.__setstate__(state) - else: - slotstate = {} - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - if hasattr(instance, '__dict__'): - instance.__dict__.update(state) - elif state: - slotstate.update(state) - for key, value in slotstate.items(): - setattr(object, key, value) - - def construct_python_object(self, suffix, node): - # Format: - # !!python/object:module.name { ... state ... } - instance = self.make_python_instance(suffix, node, newobj=True) - yield instance - deep = hasattr(instance, '__setstate__') - state = self.construct_mapping(node, deep=deep) - self.set_python_instance_state(instance, state) - - def construct_python_object_apply(self, suffix, node, newobj=False): - # Format: - # !!python/object/apply # (or !!python/object/new) - # args: [ ... arguments ... ] - # kwds: { ... keywords ... } - # state: ... state ... - # listitems: [ ... listitems ... ] - # dictitems: { ... dictitems ... } - # or short format: - # !!python/object/apply [ ... arguments ... ] - # The difference between !!python/object/apply and !!python/object/new - # is how an object is created, check make_python_instance for details. - if isinstance(node, SequenceNode): - args = self.construct_sequence(node, deep=True) - kwds = {} - state = {} - listitems = [] - dictitems = {} - else: - value = self.construct_mapping(node, deep=True) - args = value.get('args', []) - kwds = value.get('kwds', {}) - state = value.get('state', {}) - listitems = value.get('listitems', []) - dictitems = value.get('dictitems', {}) - instance = self.make_python_instance(suffix, node, args, kwds, newobj) - if state: - self.set_python_instance_state(instance, state) - if listitems: - instance.extend(listitems) - if dictitems: - for key in dictitems: - instance[key] = dictitems[key] - return instance - - def construct_python_object_new(self, suffix, node): - return self.construct_python_object_apply(suffix, node, newobj=True) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/none', - Constructor.construct_yaml_null) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/bool', - Constructor.construct_yaml_bool) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/str', - Constructor.construct_python_str) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/unicode', - Constructor.construct_python_unicode) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/int', - Constructor.construct_yaml_int) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/long', - Constructor.construct_python_long) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/float', - Constructor.construct_yaml_float) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/complex', - Constructor.construct_python_complex) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/list', - Constructor.construct_yaml_seq) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/tuple', - Constructor.construct_python_tuple) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/dict', - Constructor.construct_yaml_map) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/name:', - Constructor.construct_python_name) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/module:', - Constructor.construct_python_module) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/object:', - Constructor.construct_python_object) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/object/apply:', - Constructor.construct_python_object_apply) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/object/new:', - Constructor.construct_python_object_new) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/cyaml.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/cyaml.py deleted file mode 100644 index 68dcd75..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/cyaml.py +++ /dev/null @@ -1,85 +0,0 @@ - -__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', - 'CBaseDumper', 'CSafeDumper', 'CDumper'] - -from _yaml import CParser, CEmitter - -from constructor import * - -from serializer import * -from representer import * - -from resolver import * - -class CBaseLoader(CParser, BaseConstructor, BaseResolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class CSafeLoader(CParser, SafeConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class CLoader(CParser, Constructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - Constructor.__init__(self) - Resolver.__init__(self) - -class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CSafeDumper(CEmitter, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CDumper(CEmitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/dumper.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/dumper.py deleted file mode 100644 index f811d2c..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/dumper.py +++ /dev/null @@ -1,62 +0,0 @@ - -__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] - -from emitter import * -from serializer import * -from representer import * -from resolver import * - -class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class Dumper(Emitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/emitter.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/emitter.py deleted file mode 100644 index e5bcdcc..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/emitter.py +++ /dev/null @@ -1,1140 +0,0 @@ - -# Emitter expects events obeying the following grammar: -# stream ::= STREAM-START document* STREAM-END -# document ::= DOCUMENT-START node DOCUMENT-END -# node ::= SCALAR | sequence | mapping -# sequence ::= SEQUENCE-START node* SEQUENCE-END -# mapping ::= MAPPING-START (node node)* MAPPING-END - -__all__ = ['Emitter', 'EmitterError'] - -from error import YAMLError -from events import * - -class EmitterError(YAMLError): - pass - -class ScalarAnalysis(object): - def __init__(self, scalar, empty, multiline, - allow_flow_plain, allow_block_plain, - allow_single_quoted, allow_double_quoted, - allow_block): - self.scalar = scalar - self.empty = empty - self.multiline = multiline - self.allow_flow_plain = allow_flow_plain - self.allow_block_plain = allow_block_plain - self.allow_single_quoted = allow_single_quoted - self.allow_double_quoted = allow_double_quoted - self.allow_block = allow_block - -class Emitter(object): - - DEFAULT_TAG_PREFIXES = { - u'!' : u'!', - u'tag:yaml.org,2002:' : u'!!', - } - - def __init__(self, stream, canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - - # The stream should have the methods `write` and possibly `flush`. - self.stream = stream - - # Encoding can be overriden by STREAM-START. - self.encoding = None - - # Emitter is a state machine with a stack of states to handle nested - # structures. - self.states = [] - self.state = self.expect_stream_start - - # Current event and the event queue. - self.events = [] - self.event = None - - # The current indentation level and the stack of previous indents. - self.indents = [] - self.indent = None - - # Flow level. - self.flow_level = 0 - - # Contexts. - self.root_context = False - self.sequence_context = False - self.mapping_context = False - self.simple_key_context = False - - # Characteristics of the last emitted character: - # - current position. - # - is it a whitespace? - # - is it an indention character - # (indentation space, '-', '?', or ':')? - self.line = 0 - self.column = 0 - self.whitespace = True - self.indention = True - - # Whether the document requires an explicit document indicator - self.open_ended = False - - # Formatting details. - self.canonical = canonical - self.allow_unicode = allow_unicode - self.best_indent = 2 - if indent and 1 < indent < 10: - self.best_indent = indent - self.best_width = 80 - if width and width > self.best_indent*2: - self.best_width = width - self.best_line_break = u'\n' - if line_break in [u'\r', u'\n', u'\r\n']: - self.best_line_break = line_break - - # Tag prefixes. - self.tag_prefixes = None - - # Prepared anchor and tag. - self.prepared_anchor = None - self.prepared_tag = None - - # Scalar analysis and style. - self.analysis = None - self.style = None - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def emit(self, event): - self.events.append(event) - while not self.need_more_events(): - self.event = self.events.pop(0) - self.state() - self.event = None - - # In some cases, we wait for a few next events before emitting. - - def need_more_events(self): - if not self.events: - return True - event = self.events[0] - if isinstance(event, DocumentStartEvent): - return self.need_events(1) - elif isinstance(event, SequenceStartEvent): - return self.need_events(2) - elif isinstance(event, MappingStartEvent): - return self.need_events(3) - else: - return False - - def need_events(self, count): - level = 0 - for event in self.events[1:]: - if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): - level += 1 - elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): - level -= 1 - elif isinstance(event, StreamEndEvent): - level = -1 - if level < 0: - return False - return (len(self.events) < count+1) - - def increase_indent(self, flow=False, indentless=False): - self.indents.append(self.indent) - if self.indent is None: - if flow: - self.indent = self.best_indent - else: - self.indent = 0 - elif not indentless: - self.indent += self.best_indent - - # States. - - # Stream handlers. - - def expect_stream_start(self): - if isinstance(self.event, StreamStartEvent): - if self.event.encoding and not getattr(self.stream, 'encoding', None): - self.encoding = self.event.encoding - self.write_stream_start() - self.state = self.expect_first_document_start - else: - raise EmitterError("expected StreamStartEvent, but got %s" - % self.event) - - def expect_nothing(self): - raise EmitterError("expected nothing, but got %s" % self.event) - - # Document handlers. - - def expect_first_document_start(self): - return self.expect_document_start(first=True) - - def expect_document_start(self, first=False): - if isinstance(self.event, DocumentStartEvent): - if (self.event.version or self.event.tags) and self.open_ended: - self.write_indicator(u'...', True) - self.write_indent() - if self.event.version: - version_text = self.prepare_version(self.event.version) - self.write_version_directive(version_text) - self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() - if self.event.tags: - handles = self.event.tags.keys() - handles.sort() - for handle in handles: - prefix = self.event.tags[handle] - self.tag_prefixes[prefix] = handle - handle_text = self.prepare_tag_handle(handle) - prefix_text = self.prepare_tag_prefix(prefix) - self.write_tag_directive(handle_text, prefix_text) - implicit = (first and not self.event.explicit and not self.canonical - and not self.event.version and not self.event.tags - and not self.check_empty_document()) - if not implicit: - self.write_indent() - self.write_indicator(u'---', True) - if self.canonical: - self.write_indent() - self.state = self.expect_document_root - elif isinstance(self.event, StreamEndEvent): - if self.open_ended: - self.write_indicator(u'...', True) - self.write_indent() - self.write_stream_end() - self.state = self.expect_nothing - else: - raise EmitterError("expected DocumentStartEvent, but got %s" - % self.event) - - def expect_document_end(self): - if isinstance(self.event, DocumentEndEvent): - self.write_indent() - if self.event.explicit: - self.write_indicator(u'...', True) - self.write_indent() - self.flush_stream() - self.state = self.expect_document_start - else: - raise EmitterError("expected DocumentEndEvent, but got %s" - % self.event) - - def expect_document_root(self): - self.states.append(self.expect_document_end) - self.expect_node(root=True) - - # Node handlers. - - def expect_node(self, root=False, sequence=False, mapping=False, - simple_key=False): - self.root_context = root - self.sequence_context = sequence - self.mapping_context = mapping - self.simple_key_context = simple_key - if isinstance(self.event, AliasEvent): - self.expect_alias() - elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): - self.process_anchor(u'&') - self.process_tag() - if isinstance(self.event, ScalarEvent): - self.expect_scalar() - elif isinstance(self.event, SequenceStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_sequence(): - self.expect_flow_sequence() - else: - self.expect_block_sequence() - elif isinstance(self.event, MappingStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_mapping(): - self.expect_flow_mapping() - else: - self.expect_block_mapping() - else: - raise EmitterError("expected NodeEvent, but got %s" % self.event) - - def expect_alias(self): - if self.event.anchor is None: - raise EmitterError("anchor is not specified for alias") - self.process_anchor(u'*') - self.state = self.states.pop() - - def expect_scalar(self): - self.increase_indent(flow=True) - self.process_scalar() - self.indent = self.indents.pop() - self.state = self.states.pop() - - # Flow sequence handlers. - - def expect_flow_sequence(self): - self.write_indicator(u'[', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_sequence_item - - def expect_first_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(u']', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - def expect_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(u',', False) - self.write_indent() - self.write_indicator(u']', False) - self.state = self.states.pop() - else: - self.write_indicator(u',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - # Flow mapping handlers. - - def expect_flow_mapping(self): - self.write_indicator(u'{', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_mapping_key - - def expect_first_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(u'}', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator(u'?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(u',', False) - self.write_indent() - self.write_indicator(u'}', False) - self.state = self.states.pop() - else: - self.write_indicator(u',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator(u'?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_simple_value(self): - self.write_indicator(u':', False) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - def expect_flow_mapping_value(self): - if self.canonical or self.column > self.best_width: - self.write_indent() - self.write_indicator(u':', True) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - # Block sequence handlers. - - def expect_block_sequence(self): - indentless = (self.mapping_context and not self.indention) - self.increase_indent(flow=False, indentless=indentless) - self.state = self.expect_first_block_sequence_item - - def expect_first_block_sequence_item(self): - return self.expect_block_sequence_item(first=True) - - def expect_block_sequence_item(self, first=False): - if not first and isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - self.write_indicator(u'-', True, indention=True) - self.states.append(self.expect_block_sequence_item) - self.expect_node(sequence=True) - - # Block mapping handlers. - - def expect_block_mapping(self): - self.increase_indent(flow=False) - self.state = self.expect_first_block_mapping_key - - def expect_first_block_mapping_key(self): - return self.expect_block_mapping_key(first=True) - - def expect_block_mapping_key(self, first=False): - if not first and isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - if self.check_simple_key(): - self.states.append(self.expect_block_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator(u'?', True, indention=True) - self.states.append(self.expect_block_mapping_value) - self.expect_node(mapping=True) - - def expect_block_mapping_simple_value(self): - self.write_indicator(u':', False) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - def expect_block_mapping_value(self): - self.write_indent() - self.write_indicator(u':', True, indention=True) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - # Checkers. - - def check_empty_sequence(self): - return (isinstance(self.event, SequenceStartEvent) and self.events - and isinstance(self.events[0], SequenceEndEvent)) - - def check_empty_mapping(self): - return (isinstance(self.event, MappingStartEvent) and self.events - and isinstance(self.events[0], MappingEndEvent)) - - def check_empty_document(self): - if not isinstance(self.event, DocumentStartEvent) or not self.events: - return False - event = self.events[0] - return (isinstance(event, ScalarEvent) and event.anchor is None - and event.tag is None and event.implicit and event.value == u'') - - def check_simple_key(self): - length = 0 - if isinstance(self.event, NodeEvent) and self.event.anchor is not None: - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - length += len(self.prepared_anchor) - if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ - and self.event.tag is not None: - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(self.event.tag) - length += len(self.prepared_tag) - if isinstance(self.event, ScalarEvent): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - length += len(self.analysis.scalar) - return (length < 128 and (isinstance(self.event, AliasEvent) - or (isinstance(self.event, ScalarEvent) - and not self.analysis.empty and not self.analysis.multiline) - or self.check_empty_sequence() or self.check_empty_mapping())) - - # Anchor, Tag, and Scalar processors. - - def process_anchor(self, indicator): - if self.event.anchor is None: - self.prepared_anchor = None - return - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - if self.prepared_anchor: - self.write_indicator(indicator+self.prepared_anchor, True) - self.prepared_anchor = None - - def process_tag(self): - tag = self.event.tag - if isinstance(self.event, ScalarEvent): - if self.style is None: - self.style = self.choose_scalar_style() - if ((not self.canonical or tag is None) and - ((self.style == '' and self.event.implicit[0]) - or (self.style != '' and self.event.implicit[1]))): - self.prepared_tag = None - return - if self.event.implicit[0] and tag is None: - tag = u'!' - self.prepared_tag = None - else: - if (not self.canonical or tag is None) and self.event.implicit: - self.prepared_tag = None - return - if tag is None: - raise EmitterError("tag is not specified") - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(tag) - if self.prepared_tag: - self.write_indicator(self.prepared_tag, True) - self.prepared_tag = None - - def choose_scalar_style(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.event.style == '"' or self.canonical: - return '"' - if not self.event.style and self.event.implicit[0]: - if (not (self.simple_key_context and - (self.analysis.empty or self.analysis.multiline)) - and (self.flow_level and self.analysis.allow_flow_plain - or (not self.flow_level and self.analysis.allow_block_plain))): - return '' - if self.event.style and self.event.style in '|>': - if (not self.flow_level and not self.simple_key_context - and self.analysis.allow_block): - return self.event.style - if not self.event.style or self.event.style == '\'': - if (self.analysis.allow_single_quoted and - not (self.simple_key_context and self.analysis.multiline)): - return '\'' - return '"' - - def process_scalar(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.style is None: - self.style = self.choose_scalar_style() - split = (not self.simple_key_context) - #if self.analysis.multiline and split \ - # and (not self.style or self.style in '\'\"'): - # self.write_indent() - if self.style == '"': - self.write_double_quoted(self.analysis.scalar, split) - elif self.style == '\'': - self.write_single_quoted(self.analysis.scalar, split) - elif self.style == '>': - self.write_folded(self.analysis.scalar) - elif self.style == '|': - self.write_literal(self.analysis.scalar) - else: - self.write_plain(self.analysis.scalar, split) - self.analysis = None - self.style = None - - # Analyzers. - - def prepare_version(self, version): - major, minor = version - if major != 1: - raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) - return u'%d.%d' % (major, minor) - - def prepare_tag_handle(self, handle): - if not handle: - raise EmitterError("tag handle must not be empty") - if handle[0] != u'!' or handle[-1] != u'!': - raise EmitterError("tag handle must start and end with '!': %r" - % (handle.encode('utf-8'))) - for ch in handle[1:-1]: - if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_'): - raise EmitterError("invalid character %r in the tag handle: %r" - % (ch.encode('utf-8'), handle.encode('utf-8'))) - return handle - - def prepare_tag_prefix(self, prefix): - if not prefix: - raise EmitterError("tag prefix must not be empty") - chunks = [] - start = end = 0 - if prefix[0] == u'!': - end = 1 - while end < len(prefix): - ch = prefix[end] - if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-;/?!:@&=+$,_.~*\'()[]': - end += 1 - else: - if start < end: - chunks.append(prefix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append(u'%%%02X' % ord(ch)) - if start < end: - chunks.append(prefix[start:end]) - return u''.join(chunks) - - def prepare_tag(self, tag): - if not tag: - raise EmitterError("tag must not be empty") - if tag == u'!': - return tag - handle = None - suffix = tag - prefixes = self.tag_prefixes.keys() - prefixes.sort() - for prefix in prefixes: - if tag.startswith(prefix) \ - and (prefix == u'!' or len(prefix) < len(tag)): - handle = self.tag_prefixes[prefix] - suffix = tag[len(prefix):] - chunks = [] - start = end = 0 - while end < len(suffix): - ch = suffix[end] - if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-;/?:@&=+$,_.~*\'()[]' \ - or (ch == u'!' and handle != u'!'): - end += 1 - else: - if start < end: - chunks.append(suffix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append(u'%%%02X' % ord(ch)) - if start < end: - chunks.append(suffix[start:end]) - suffix_text = u''.join(chunks) - if handle: - return u'%s%s' % (handle, suffix_text) - else: - return u'!<%s>' % suffix_text - - def prepare_anchor(self, anchor): - if not anchor: - raise EmitterError("anchor must not be empty") - for ch in anchor: - if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_'): - raise EmitterError("invalid character %r in the anchor: %r" - % (ch.encode('utf-8'), anchor.encode('utf-8'))) - return anchor - - def analyze_scalar(self, scalar): - - # Empty scalar is a special case. - if not scalar: - return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, - allow_flow_plain=False, allow_block_plain=True, - allow_single_quoted=True, allow_double_quoted=True, - allow_block=False) - - # Indicators and special characters. - block_indicators = False - flow_indicators = False - line_breaks = False - special_characters = False - - # Important whitespace combinations. - leading_space = False - leading_break = False - trailing_space = False - trailing_break = False - break_space = False - space_break = False - - # Check document indicators. - if scalar.startswith(u'---') or scalar.startswith(u'...'): - block_indicators = True - flow_indicators = True - - # First character or preceded by a whitespace. - preceeded_by_whitespace = True - - # Last character or followed by a whitespace. - followed_by_whitespace = (len(scalar) == 1 or - scalar[1] in u'\0 \t\r\n\x85\u2028\u2029') - - # The previous character is a space. - previous_space = False - - # The previous character is a break. - previous_break = False - - index = 0 - while index < len(scalar): - ch = scalar[index] - - # Check for indicators. - if index == 0: - # Leading indicators are special characters. - if ch in u'#,[]{}&*!|>\'\"%@`': - flow_indicators = True - block_indicators = True - if ch in u'?:': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == u'-' and followed_by_whitespace: - flow_indicators = True - block_indicators = True - else: - # Some indicators cannot appear within a scalar as well. - if ch in u',?[]{}': - flow_indicators = True - if ch == u':': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == u'#' and preceeded_by_whitespace: - flow_indicators = True - block_indicators = True - - # Check for line breaks, special, and unicode characters. - if ch in u'\n\x85\u2028\u2029': - line_breaks = True - if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'): - if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF' - or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF': - unicode_characters = True - if not self.allow_unicode: - special_characters = True - else: - special_characters = True - - # Detect important whitespace combinations. - if ch == u' ': - if index == 0: - leading_space = True - if index == len(scalar)-1: - trailing_space = True - if previous_break: - break_space = True - previous_space = True - previous_break = False - elif ch in u'\n\x85\u2028\u2029': - if index == 0: - leading_break = True - if index == len(scalar)-1: - trailing_break = True - if previous_space: - space_break = True - previous_space = False - previous_break = True - else: - previous_space = False - previous_break = False - - # Prepare for the next character. - index += 1 - preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029') - followed_by_whitespace = (index+1 >= len(scalar) or - scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029') - - # Let's decide what styles are allowed. - allow_flow_plain = True - allow_block_plain = True - allow_single_quoted = True - allow_double_quoted = True - allow_block = True - - # Leading and trailing whitespaces are bad for plain scalars. - if (leading_space or leading_break - or trailing_space or trailing_break): - allow_flow_plain = allow_block_plain = False - - # We do not permit trailing spaces for block scalars. - if trailing_space: - allow_block = False - - # Spaces at the beginning of a new line are only acceptable for block - # scalars. - if break_space: - allow_flow_plain = allow_block_plain = allow_single_quoted = False - - # Spaces followed by breaks, as well as special character are only - # allowed for double quoted scalars. - if space_break or special_characters: - allow_flow_plain = allow_block_plain = \ - allow_single_quoted = allow_block = False - - # Although the plain scalar writer supports breaks, we never emit - # multiline plain scalars. - if line_breaks: - allow_flow_plain = allow_block_plain = False - - # Flow indicators are forbidden for flow plain scalars. - if flow_indicators: - allow_flow_plain = False - - # Block indicators are forbidden for block plain scalars. - if block_indicators: - allow_block_plain = False - - return ScalarAnalysis(scalar=scalar, - empty=False, multiline=line_breaks, - allow_flow_plain=allow_flow_plain, - allow_block_plain=allow_block_plain, - allow_single_quoted=allow_single_quoted, - allow_double_quoted=allow_double_quoted, - allow_block=allow_block) - - # Writers. - - def flush_stream(self): - if hasattr(self.stream, 'flush'): - self.stream.flush() - - def write_stream_start(self): - # Write BOM if needed. - if self.encoding and self.encoding.startswith('utf-16'): - self.stream.write(u'\uFEFF'.encode(self.encoding)) - - def write_stream_end(self): - self.flush_stream() - - def write_indicator(self, indicator, need_whitespace, - whitespace=False, indention=False): - if self.whitespace or not need_whitespace: - data = indicator - else: - data = u' '+indicator - self.whitespace = whitespace - self.indention = self.indention and indention - self.column += len(data) - self.open_ended = False - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_indent(self): - indent = self.indent or 0 - if not self.indention or self.column > indent \ - or (self.column == indent and not self.whitespace): - self.write_line_break() - if self.column < indent: - self.whitespace = True - data = u' '*(indent-self.column) - self.column = indent - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_line_break(self, data=None): - if data is None: - data = self.best_line_break - self.whitespace = True - self.indention = True - self.line += 1 - self.column = 0 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_version_directive(self, version_text): - data = u'%%YAML %s' % version_text - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - def write_tag_directive(self, handle_text, prefix_text): - data = u'%%TAG %s %s' % (handle_text, prefix_text) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - # Scalar streams. - - def write_single_quoted(self, text, split=True): - self.write_indicator(u'\'', True) - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch is None or ch != u' ': - if start+1 == end and self.column > self.best_width and split \ - and start != 0 and end != len(text): - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch is None or ch not in u'\n\x85\u2028\u2029': - if text[start] == u'\n': - self.write_line_break() - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - start = end - else: - if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'': - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch == u'\'': - data = u'\'\'' - self.column += 2 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end + 1 - if ch is not None: - spaces = (ch == u' ') - breaks = (ch in u'\n\x85\u2028\u2029') - end += 1 - self.write_indicator(u'\'', False) - - ESCAPE_REPLACEMENTS = { - u'\0': u'0', - u'\x07': u'a', - u'\x08': u'b', - u'\x09': u't', - u'\x0A': u'n', - u'\x0B': u'v', - u'\x0C': u'f', - u'\x0D': u'r', - u'\x1B': u'e', - u'\"': u'\"', - u'\\': u'\\', - u'\x85': u'N', - u'\xA0': u'_', - u'\u2028': u'L', - u'\u2029': u'P', - } - - def write_double_quoted(self, text, split=True): - self.write_indicator(u'"', True) - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \ - or not (u'\x20' <= ch <= u'\x7E' - or (self.allow_unicode - and (u'\xA0' <= ch <= u'\uD7FF' - or u'\uE000' <= ch <= u'\uFFFD'))): - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - if ch in self.ESCAPE_REPLACEMENTS: - data = u'\\'+self.ESCAPE_REPLACEMENTS[ch] - elif ch <= u'\xFF': - data = u'\\x%02X' % ord(ch) - elif ch <= u'\uFFFF': - data = u'\\u%04X' % ord(ch) - else: - data = u'\\U%08X' % ord(ch) - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end+1 - if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \ - and self.column+(end-start) > self.best_width and split: - data = text[start:end]+u'\\' - if start < end: - start = end - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_indent() - self.whitespace = False - self.indention = False - if text[start] == u' ': - data = u'\\' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - end += 1 - self.write_indicator(u'"', False) - - def determine_block_hints(self, text): - hints = u'' - if text: - if text[0] in u' \n\x85\u2028\u2029': - hints += unicode(self.best_indent) - if text[-1] not in u'\n\x85\u2028\u2029': - hints += u'-' - elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029': - hints += u'+' - return hints - - def write_folded(self, text): - hints = self.determine_block_hints(text) - self.write_indicator(u'>'+hints, True) - if hints[-1:] == u'+': - self.open_ended = True - self.write_line_break() - leading_space = True - spaces = False - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in u'\n\x85\u2028\u2029': - if not leading_space and ch is not None and ch != u' ' \ - and text[start] == u'\n': - self.write_line_break() - leading_space = (ch == u' ') - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - elif spaces: - if ch != u' ': - if start+1 == end and self.column > self.best_width: - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - else: - if ch is None or ch in u' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in u'\n\x85\u2028\u2029') - spaces = (ch == u' ') - end += 1 - - def write_literal(self, text): - hints = self.determine_block_hints(text) - self.write_indicator(u'|'+hints, True) - if hints[-1:] == u'+': - self.open_ended = True - self.write_line_break() - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in u'\n\x85\u2028\u2029': - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - else: - if ch is None or ch in u'\n\x85\u2028\u2029': - data = text[start:end] - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in u'\n\x85\u2028\u2029') - end += 1 - - def write_plain(self, text, split=True): - if self.root_context: - self.open_ended = True - if not text: - return - if not self.whitespace: - data = u' ' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.whitespace = False - self.indention = False - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch != u' ': - if start+1 == end and self.column > self.best_width and split: - self.write_indent() - self.whitespace = False - self.indention = False - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch not in u'\n\x85\u2028\u2029': - if text[start] == u'\n': - self.write_line_break() - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - self.whitespace = False - self.indention = False - start = end - else: - if ch is None or ch in u' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - spaces = (ch == u' ') - breaks = (ch in u'\n\x85\u2028\u2029') - end += 1 - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/error.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/error.py deleted file mode 100644 index 577686d..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/error.py +++ /dev/null @@ -1,75 +0,0 @@ - -__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] - -class Mark(object): - - def __init__(self, name, index, line, column, buffer, pointer): - self.name = name - self.index = index - self.line = line - self.column = column - self.buffer = buffer - self.pointer = pointer - - def get_snippet(self, indent=4, max_length=75): - if self.buffer is None: - return None - head = '' - start = self.pointer - while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029': - start -= 1 - if self.pointer-start > max_length/2-1: - head = ' ... ' - start += 5 - break - tail = '' - end = self.pointer - while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029': - end += 1 - if end-self.pointer > max_length/2-1: - tail = ' ... ' - end -= 5 - break - snippet = self.buffer[start:end].encode('utf-8') - return ' '*indent + head + snippet + tail + '\n' \ - + ' '*(indent+self.pointer-start+len(head)) + '^' - - def __str__(self): - snippet = self.get_snippet() - where = " in \"%s\", line %d, column %d" \ - % (self.name, self.line+1, self.column+1) - if snippet is not None: - where += ":\n"+snippet - return where - -class YAMLError(Exception): - pass - -class MarkedYAMLError(YAMLError): - - def __init__(self, context=None, context_mark=None, - problem=None, problem_mark=None, note=None): - self.context = context - self.context_mark = context_mark - self.problem = problem - self.problem_mark = problem_mark - self.note = note - - def __str__(self): - lines = [] - if self.context is not None: - lines.append(self.context) - if self.context_mark is not None \ - and (self.problem is None or self.problem_mark is None - or self.context_mark.name != self.problem_mark.name - or self.context_mark.line != self.problem_mark.line - or self.context_mark.column != self.problem_mark.column): - lines.append(str(self.context_mark)) - if self.problem is not None: - lines.append(self.problem) - if self.problem_mark is not None: - lines.append(str(self.problem_mark)) - if self.note is not None: - lines.append(self.note) - return '\n'.join(lines) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/events.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/events.py deleted file mode 100644 index f79ad38..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/events.py +++ /dev/null @@ -1,86 +0,0 @@ - -# Abstract classes. - -class Event(object): - def __init__(self, start_mark=None, end_mark=None): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] - if hasattr(self, key)] - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -class NodeEvent(Event): - def __init__(self, anchor, start_mark=None, end_mark=None): - self.anchor = anchor - self.start_mark = start_mark - self.end_mark = end_mark - -class CollectionStartEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, - flow_style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class CollectionEndEvent(Event): - pass - -# Implementations. - -class StreamStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndEvent(Event): - pass - -class DocumentStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None, version=None, tags=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - self.version = version - self.tags = tags - -class DocumentEndEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - -class AliasEvent(NodeEvent): - pass - -class ScalarEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, value, - start_mark=None, end_mark=None, style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class SequenceStartEvent(CollectionStartEvent): - pass - -class SequenceEndEvent(CollectionEndEvent): - pass - -class MappingStartEvent(CollectionStartEvent): - pass - -class MappingEndEvent(CollectionEndEvent): - pass - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/loader.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/loader.py deleted file mode 100644 index 293ff46..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/loader.py +++ /dev/null @@ -1,40 +0,0 @@ - -__all__ = ['BaseLoader', 'SafeLoader', 'Loader'] - -from reader import * -from scanner import * -from parser import * -from composer import * -from constructor import * -from resolver import * - -class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - Constructor.__init__(self) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/nodes.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/nodes.py deleted file mode 100644 index c4f070c..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/nodes.py +++ /dev/null @@ -1,49 +0,0 @@ - -class Node(object): - def __init__(self, tag, value, start_mark, end_mark): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - value = self.value - #if isinstance(value, list): - # if len(value) == 0: - # value = '' - # elif len(value) == 1: - # value = '<1 item>' - # else: - # value = '<%d items>' % len(value) - #else: - # if len(value) > 75: - # value = repr(value[:70]+u' ... ') - # else: - # value = repr(value) - value = repr(value) - return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) - -class ScalarNode(Node): - id = 'scalar' - def __init__(self, tag, value, - start_mark=None, end_mark=None, style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class CollectionNode(Node): - def __init__(self, tag, value, - start_mark=None, end_mark=None, flow_style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class SequenceNode(CollectionNode): - id = 'sequence' - -class MappingNode(CollectionNode): - id = 'mapping' - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/parser.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/parser.py deleted file mode 100644 index f9e3057..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/parser.py +++ /dev/null @@ -1,589 +0,0 @@ - -# The following YAML grammar is LL(1) and is parsed by a recursive descent -# parser. -# -# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -# implicit_document ::= block_node DOCUMENT-END* -# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -# block_node_or_indentless_sequence ::= -# ALIAS -# | properties (block_content | indentless_block_sequence)? -# | block_content -# | indentless_block_sequence -# block_node ::= ALIAS -# | properties block_content? -# | block_content -# flow_node ::= ALIAS -# | properties flow_content? -# | flow_content -# properties ::= TAG ANCHOR? | ANCHOR TAG? -# block_content ::= block_collection | flow_collection | SCALAR -# flow_content ::= flow_collection | SCALAR -# block_collection ::= block_sequence | block_mapping -# flow_collection ::= flow_sequence | flow_mapping -# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -# block_mapping ::= BLOCK-MAPPING_START -# ((KEY block_node_or_indentless_sequence?)? -# (VALUE block_node_or_indentless_sequence?)?)* -# BLOCK-END -# flow_sequence ::= FLOW-SEQUENCE-START -# (flow_sequence_entry FLOW-ENTRY)* -# flow_sequence_entry? -# FLOW-SEQUENCE-END -# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# flow_mapping ::= FLOW-MAPPING-START -# (flow_mapping_entry FLOW-ENTRY)* -# flow_mapping_entry? -# FLOW-MAPPING-END -# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# -# FIRST sets: -# -# stream: { STREAM-START } -# explicit_document: { DIRECTIVE DOCUMENT-START } -# implicit_document: FIRST(block_node) -# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_sequence: { BLOCK-SEQUENCE-START } -# block_mapping: { BLOCK-MAPPING-START } -# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } -# indentless_sequence: { ENTRY } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_sequence: { FLOW-SEQUENCE-START } -# flow_mapping: { FLOW-MAPPING-START } -# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } -# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } - -__all__ = ['Parser', 'ParserError'] - -from error import MarkedYAMLError -from tokens import * -from events import * -from scanner import * - -class ParserError(MarkedYAMLError): - pass - -class Parser(object): - # Since writing a recursive-descendant parser is a straightforward task, we - # do not give many comments here. - - DEFAULT_TAGS = { - u'!': u'!', - u'!!': u'tag:yaml.org,2002:', - } - - def __init__(self): - self.current_event = None - self.yaml_version = None - self.tag_handles = {} - self.states = [] - self.marks = [] - self.state = self.parse_stream_start - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def check_event(self, *choices): - # Check the type of the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - if self.current_event is not None: - if not choices: - return True - for choice in choices: - if isinstance(self.current_event, choice): - return True - return False - - def peek_event(self): - # Get the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - return self.current_event - - def get_event(self): - # Get the next event and proceed further. - if self.current_event is None: - if self.state: - self.current_event = self.state() - value = self.current_event - self.current_event = None - return value - - # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END - # implicit_document ::= block_node DOCUMENT-END* - # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* - - def parse_stream_start(self): - - # Parse the stream start. - token = self.get_token() - event = StreamStartEvent(token.start_mark, token.end_mark, - encoding=token.encoding) - - # Prepare the next state. - self.state = self.parse_implicit_document_start - - return event - - def parse_implicit_document_start(self): - - # Parse an implicit document. - if not self.check_token(DirectiveToken, DocumentStartToken, - StreamEndToken): - self.tag_handles = self.DEFAULT_TAGS - token = self.peek_token() - start_mark = end_mark = token.start_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=False) - - # Prepare the next state. - self.states.append(self.parse_document_end) - self.state = self.parse_block_node - - return event - - else: - return self.parse_document_start() - - def parse_document_start(self): - - # Parse any extra document end indicators. - while self.check_token(DocumentEndToken): - self.get_token() - - # Parse an explicit document. - if not self.check_token(StreamEndToken): - token = self.peek_token() - start_mark = token.start_mark - version, tags = self.process_directives() - if not self.check_token(DocumentStartToken): - raise ParserError(None, None, - "expected '', but found %r" - % self.peek_token().id, - self.peek_token().start_mark) - token = self.get_token() - end_mark = token.end_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=True, version=version, tags=tags) - self.states.append(self.parse_document_end) - self.state = self.parse_document_content - else: - # Parse the end of the stream. - token = self.get_token() - event = StreamEndEvent(token.start_mark, token.end_mark) - assert not self.states - assert not self.marks - self.state = None - return event - - def parse_document_end(self): - - # Parse the document end. - token = self.peek_token() - start_mark = end_mark = token.start_mark - explicit = False - if self.check_token(DocumentEndToken): - token = self.get_token() - end_mark = token.end_mark - explicit = True - event = DocumentEndEvent(start_mark, end_mark, - explicit=explicit) - - # Prepare the next state. - self.state = self.parse_document_start - - return event - - def parse_document_content(self): - if self.check_token(DirectiveToken, - DocumentStartToken, DocumentEndToken, StreamEndToken): - event = self.process_empty_scalar(self.peek_token().start_mark) - self.state = self.states.pop() - return event - else: - return self.parse_block_node() - - def process_directives(self): - self.yaml_version = None - self.tag_handles = {} - while self.check_token(DirectiveToken): - token = self.get_token() - if token.name == u'YAML': - if self.yaml_version is not None: - raise ParserError(None, None, - "found duplicate YAML directive", token.start_mark) - major, minor = token.value - if major != 1: - raise ParserError(None, None, - "found incompatible YAML document (version 1.* is required)", - token.start_mark) - self.yaml_version = token.value - elif token.name == u'TAG': - handle, prefix = token.value - if handle in self.tag_handles: - raise ParserError(None, None, - "duplicate tag handle %r" % handle.encode('utf-8'), - token.start_mark) - self.tag_handles[handle] = prefix - if self.tag_handles: - value = self.yaml_version, self.tag_handles.copy() - else: - value = self.yaml_version, None - for key in self.DEFAULT_TAGS: - if key not in self.tag_handles: - self.tag_handles[key] = self.DEFAULT_TAGS[key] - return value - - # block_node_or_indentless_sequence ::= ALIAS - # | properties (block_content | indentless_block_sequence)? - # | block_content - # | indentless_block_sequence - # block_node ::= ALIAS - # | properties block_content? - # | block_content - # flow_node ::= ALIAS - # | properties flow_content? - # | flow_content - # properties ::= TAG ANCHOR? | ANCHOR TAG? - # block_content ::= block_collection | flow_collection | SCALAR - # flow_content ::= flow_collection | SCALAR - # block_collection ::= block_sequence | block_mapping - # flow_collection ::= flow_sequence | flow_mapping - - def parse_block_node(self): - return self.parse_node(block=True) - - def parse_flow_node(self): - return self.parse_node() - - def parse_block_node_or_indentless_sequence(self): - return self.parse_node(block=True, indentless_sequence=True) - - def parse_node(self, block=False, indentless_sequence=False): - if self.check_token(AliasToken): - token = self.get_token() - event = AliasEvent(token.value, token.start_mark, token.end_mark) - self.state = self.states.pop() - else: - anchor = None - tag = None - start_mark = end_mark = tag_mark = None - if self.check_token(AnchorToken): - token = self.get_token() - start_mark = token.start_mark - end_mark = token.end_mark - anchor = token.value - if self.check_token(TagToken): - token = self.get_token() - tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - elif self.check_token(TagToken): - token = self.get_token() - start_mark = tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - if self.check_token(AnchorToken): - token = self.get_token() - end_mark = token.end_mark - anchor = token.value - if tag is not None: - handle, suffix = tag - if handle is not None: - if handle not in self.tag_handles: - raise ParserError("while parsing a node", start_mark, - "found undefined tag handle %r" % handle.encode('utf-8'), - tag_mark) - tag = self.tag_handles[handle]+suffix - else: - tag = suffix - #if tag == u'!': - # raise ParserError("while parsing a node", start_mark, - # "found non-specific tag '!'", tag_mark, - # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") - if start_mark is None: - start_mark = end_mark = self.peek_token().start_mark - event = None - implicit = (tag is None or tag == u'!') - if indentless_sequence and self.check_token(BlockEntryToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark) - self.state = self.parse_indentless_sequence_entry - else: - if self.check_token(ScalarToken): - token = self.get_token() - end_mark = token.end_mark - if (token.plain and tag is None) or tag == u'!': - implicit = (True, False) - elif tag is None: - implicit = (False, True) - else: - implicit = (False, False) - event = ScalarEvent(anchor, tag, implicit, token.value, - start_mark, end_mark, style=token.style) - self.state = self.states.pop() - elif self.check_token(FlowSequenceStartToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_sequence_first_entry - elif self.check_token(FlowMappingStartToken): - end_mark = self.peek_token().end_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_mapping_first_key - elif block and self.check_token(BlockSequenceStartToken): - end_mark = self.peek_token().start_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_sequence_first_entry - elif block and self.check_token(BlockMappingStartToken): - end_mark = self.peek_token().start_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_mapping_first_key - elif anchor is not None or tag is not None: - # Empty scalars are allowed even if a tag or an anchor is - # specified. - event = ScalarEvent(anchor, tag, (implicit, False), u'', - start_mark, end_mark) - self.state = self.states.pop() - else: - if block: - node = 'block' - else: - node = 'flow' - token = self.peek_token() - raise ParserError("while parsing a %s node" % node, start_mark, - "expected the node content, but found %r" % token.id, - token.start_mark) - return event - - # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END - - def parse_block_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_sequence_entry() - - def parse_block_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, BlockEndToken): - self.states.append(self.parse_block_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_block_sequence_entry - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block collection", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ - - def parse_indentless_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, - KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_indentless_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_indentless_sequence_entry - return self.process_empty_scalar(token.end_mark) - token = self.peek_token() - event = SequenceEndEvent(token.start_mark, token.start_mark) - self.state = self.states.pop() - return event - - # block_mapping ::= BLOCK-MAPPING_START - # ((KEY block_node_or_indentless_sequence?)? - # (VALUE block_node_or_indentless_sequence?)?)* - # BLOCK-END - - def parse_block_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_mapping_key() - - def parse_block_mapping_key(self): - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_value) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_value - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block mapping", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_block_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_key) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_block_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - # flow_sequence ::= FLOW-SEQUENCE-START - # (flow_sequence_entry FLOW-ENTRY)* - # flow_sequence_entry? - # FLOW-SEQUENCE-END - # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - # - # Note that while production rules for both flow_sequence_entry and - # flow_mapping_entry are equal, their interpretations are different. - # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` - # generate an inline mapping (set syntax). - - def parse_flow_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_sequence_entry(first=True) - - def parse_flow_sequence_entry(self, first=False): - if not self.check_token(FlowSequenceEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow sequence", self.marks[-1], - "expected ',' or ']', but got %r" % token.id, token.start_mark) - - if self.check_token(KeyToken): - token = self.peek_token() - event = MappingStartEvent(None, None, True, - token.start_mark, token.end_mark, - flow_style=True) - self.state = self.parse_flow_sequence_entry_mapping_key - return event - elif not self.check_token(FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry) - return self.parse_flow_node() - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_sequence_entry_mapping_key(self): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_value - return self.process_empty_scalar(token.end_mark) - - def parse_flow_sequence_entry_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_end) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_end - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_sequence_entry_mapping_end - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_sequence_entry_mapping_end(self): - self.state = self.parse_flow_sequence_entry - token = self.peek_token() - return MappingEndEvent(token.start_mark, token.start_mark) - - # flow_mapping ::= FLOW-MAPPING-START - # (flow_mapping_entry FLOW-ENTRY)* - # flow_mapping_entry? - # FLOW-MAPPING-END - # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - - def parse_flow_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_mapping_key(first=True) - - def parse_flow_mapping_key(self, first=False): - if not self.check_token(FlowMappingEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ',' or '}', but got %r" % token.id, token.start_mark) - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_value - return self.process_empty_scalar(token.end_mark) - elif not self.check_token(FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_empty_value) - return self.parse_flow_node() - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_key) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_mapping_empty_value(self): - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(self.peek_token().start_mark) - - def process_empty_scalar(self, mark): - return ScalarEvent(None, None, (True, False), u'', mark, mark) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/reader.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/reader.py deleted file mode 100644 index 3249e6b..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/reader.py +++ /dev/null @@ -1,190 +0,0 @@ -# This module contains abstractions for the input stream. You don't have to -# looks further, there are no pretty code. -# -# We define two classes here. -# -# Mark(source, line, column) -# It's just a record and its only use is producing nice error messages. -# Parser does not use it for any other purposes. -# -# Reader(source, data) -# Reader determines the encoding of `data` and converts it to unicode. -# Reader provides the following methods and attributes: -# reader.peek(length=1) - return the next `length` characters -# reader.forward(length=1) - move the current position to `length` characters. -# reader.index - the number of the current character. -# reader.line, stream.column - the line and the column of the current character. - -__all__ = ['Reader', 'ReaderError'] - -from error import YAMLError, Mark - -import codecs, re - -class ReaderError(YAMLError): - - def __init__(self, name, position, character, encoding, reason): - self.name = name - self.character = character - self.position = position - self.encoding = encoding - self.reason = reason - - def __str__(self): - if isinstance(self.character, str): - return "'%s' codec can't decode byte #x%02x: %s\n" \ - " in \"%s\", position %d" \ - % (self.encoding, ord(self.character), self.reason, - self.name, self.position) - else: - return "unacceptable character #x%04x: %s\n" \ - " in \"%s\", position %d" \ - % (self.character, self.reason, - self.name, self.position) - -class Reader(object): - # Reader: - # - determines the data encoding and converts it to unicode, - # - checks if characters are in allowed range, - # - adds '\0' to the end. - - # Reader accepts - # - a `str` object, - # - a `unicode` object, - # - a file-like object with its `read` method returning `str`, - # - a file-like object with its `read` method returning `unicode`. - - # Yeah, it's ugly and slow. - - def __init__(self, stream): - self.name = None - self.stream = None - self.stream_pointer = 0 - self.eof = True - self.buffer = u'' - self.pointer = 0 - self.raw_buffer = None - self.raw_decode = None - self.encoding = None - self.index = 0 - self.line = 0 - self.column = 0 - if isinstance(stream, unicode): - self.name = "" - self.check_printable(stream) - self.buffer = stream+u'\0' - elif isinstance(stream, str): - self.name = "" - self.raw_buffer = stream - self.determine_encoding() - else: - self.stream = stream - self.name = getattr(stream, 'name', "") - self.eof = False - self.raw_buffer = '' - self.determine_encoding() - - def peek(self, index=0): - try: - return self.buffer[self.pointer+index] - except IndexError: - self.update(index+1) - return self.buffer[self.pointer+index] - - def prefix(self, length=1): - if self.pointer+length >= len(self.buffer): - self.update(length) - return self.buffer[self.pointer:self.pointer+length] - - def forward(self, length=1): - if self.pointer+length+1 >= len(self.buffer): - self.update(length+1) - while length: - ch = self.buffer[self.pointer] - self.pointer += 1 - self.index += 1 - if ch in u'\n\x85\u2028\u2029' \ - or (ch == u'\r' and self.buffer[self.pointer] != u'\n'): - self.line += 1 - self.column = 0 - elif ch != u'\uFEFF': - self.column += 1 - length -= 1 - - def get_mark(self): - if self.stream is None: - return Mark(self.name, self.index, self.line, self.column, - self.buffer, self.pointer) - else: - return Mark(self.name, self.index, self.line, self.column, - None, None) - - def determine_encoding(self): - while not self.eof and len(self.raw_buffer) < 2: - self.update_raw() - if not isinstance(self.raw_buffer, unicode): - if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): - self.raw_decode = codecs.utf_16_le_decode - self.encoding = 'utf-16-le' - elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): - self.raw_decode = codecs.utf_16_be_decode - self.encoding = 'utf-16-be' - else: - self.raw_decode = codecs.utf_8_decode - self.encoding = 'utf-8' - self.update(1) - - NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]') - def check_printable(self, data): - match = self.NON_PRINTABLE.search(data) - if match: - character = match.group() - position = self.index+(len(self.buffer)-self.pointer)+match.start() - raise ReaderError(self.name, position, ord(character), - 'unicode', "special characters are not allowed") - - def update(self, length): - if self.raw_buffer is None: - return - self.buffer = self.buffer[self.pointer:] - self.pointer = 0 - while len(self.buffer) < length: - if not self.eof: - self.update_raw() - if self.raw_decode is not None: - try: - data, converted = self.raw_decode(self.raw_buffer, - 'strict', self.eof) - except UnicodeDecodeError, exc: - character = exc.object[exc.start] - if self.stream is not None: - position = self.stream_pointer-len(self.raw_buffer)+exc.start - else: - position = exc.start - raise ReaderError(self.name, position, character, - exc.encoding, exc.reason) - else: - data = self.raw_buffer - converted = len(data) - self.check_printable(data) - self.buffer += data - self.raw_buffer = self.raw_buffer[converted:] - if self.eof: - self.buffer += u'\0' - self.raw_buffer = None - break - - def update_raw(self, size=1024): - data = self.stream.read(size) - if data: - self.raw_buffer += data - self.stream_pointer += len(data) - else: - self.eof = True - -#try: -# import psyco -# psyco.bind(Reader) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/representer.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/representer.py deleted file mode 100644 index 5f4fc70..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/representer.py +++ /dev/null @@ -1,484 +0,0 @@ - -__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', - 'RepresenterError'] - -from error import * -from nodes import * - -import datetime - -import sys, copy_reg, types - -class RepresenterError(YAMLError): - pass - -class BaseRepresenter(object): - - yaml_representers = {} - yaml_multi_representers = {} - - def __init__(self, default_style=None, default_flow_style=None): - self.default_style = default_style - self.default_flow_style = default_flow_style - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent(self, data): - node = self.represent_data(data) - self.serialize(node) - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def get_classobj_bases(self, cls): - bases = [cls] - for base in cls.__bases__: - bases.extend(self.get_classobj_bases(base)) - return bases - - def represent_data(self, data): - if self.ignore_aliases(data): - self.alias_key = None - else: - self.alias_key = id(data) - if self.alias_key is not None: - if self.alias_key in self.represented_objects: - node = self.represented_objects[self.alias_key] - #if node is None: - # raise RepresenterError("recursive objects are not allowed: %r" % data) - return node - #self.represented_objects[alias_key] = None - self.object_keeper.append(data) - data_types = type(data).__mro__ - if type(data) is types.InstanceType: - data_types = self.get_classobj_bases(data.__class__)+list(data_types) - if data_types[0] in self.yaml_representers: - node = self.yaml_representers[data_types[0]](self, data) - else: - for data_type in data_types: - if data_type in self.yaml_multi_representers: - node = self.yaml_multi_representers[data_type](self, data) - break - else: - if None in self.yaml_multi_representers: - node = self.yaml_multi_representers[None](self, data) - elif None in self.yaml_representers: - node = self.yaml_representers[None](self, data) - else: - node = ScalarNode(None, unicode(data)) - #if alias_key is not None: - # self.represented_objects[alias_key] = node - return node - - def add_representer(cls, data_type, representer): - if not 'yaml_representers' in cls.__dict__: - cls.yaml_representers = cls.yaml_representers.copy() - cls.yaml_representers[data_type] = representer - add_representer = classmethod(add_representer) - - def add_multi_representer(cls, data_type, representer): - if not 'yaml_multi_representers' in cls.__dict__: - cls.yaml_multi_representers = cls.yaml_multi_representers.copy() - cls.yaml_multi_representers[data_type] = representer - add_multi_representer = classmethod(add_multi_representer) - - def represent_scalar(self, tag, value, style=None): - if style is None: - style = self.default_style - node = ScalarNode(tag, value, style=style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - return node - - def represent_sequence(self, tag, sequence, flow_style=None): - value = [] - node = SequenceNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - for item in sequence: - node_item = self.represent_data(item) - if not (isinstance(node_item, ScalarNode) and not node_item.style): - best_style = False - value.append(node_item) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def represent_mapping(self, tag, mapping, flow_style=None): - value = [] - node = MappingNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - if hasattr(mapping, 'items'): - mapping = mapping.items() - mapping.sort() - for item_key, item_value in mapping: - node_key = self.represent_data(item_key) - node_value = self.represent_data(item_value) - if not (isinstance(node_key, ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def ignore_aliases(self, data): - return False - -class SafeRepresenter(BaseRepresenter): - - def ignore_aliases(self, data): - if data in [None, ()]: - return True - if isinstance(data, (str, unicode, bool, int, float)): - return True - - def represent_none(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:null', - u'null') - - def represent_str(self, data): - tag = None - style = None - try: - data = unicode(data, 'ascii') - tag = u'tag:yaml.org,2002:str' - except UnicodeDecodeError: - try: - data = unicode(data, 'utf-8') - tag = u'tag:yaml.org,2002:str' - except UnicodeDecodeError: - data = data.encode('base64') - tag = u'tag:yaml.org,2002:binary' - style = '|' - return self.represent_scalar(tag, data, style=style) - - def represent_unicode(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:str', data) - - def represent_bool(self, data): - if data: - value = u'true' - else: - value = u'false' - return self.represent_scalar(u'tag:yaml.org,2002:bool', value) - - def represent_int(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) - - def represent_long(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) - - inf_value = 1e300 - while repr(inf_value) != repr(inf_value*inf_value): - inf_value *= inf_value - - def represent_float(self, data): - if data != data or (data == 0.0 and data == 1.0): - value = u'.nan' - elif data == self.inf_value: - value = u'.inf' - elif data == -self.inf_value: - value = u'-.inf' - else: - value = unicode(repr(data)).lower() - # Note that in some cases `repr(data)` represents a float number - # without the decimal parts. For instance: - # >>> repr(1e17) - # '1e17' - # Unfortunately, this is not a valid float representation according - # to the definition of the `!!float` tag. We fix this by adding - # '.0' before the 'e' symbol. - if u'.' not in value and u'e' in value: - value = value.replace(u'e', u'.0e', 1) - return self.represent_scalar(u'tag:yaml.org,2002:float', value) - - def represent_list(self, data): - #pairs = (len(data) > 0 and isinstance(data, list)) - #if pairs: - # for item in data: - # if not isinstance(item, tuple) or len(item) != 2: - # pairs = False - # break - #if not pairs: - return self.represent_sequence(u'tag:yaml.org,2002:seq', data) - #value = [] - #for item_key, item_value in data: - # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', - # [(item_key, item_value)])) - #return SequenceNode(u'tag:yaml.org,2002:pairs', value) - - def represent_dict(self, data): - return self.represent_mapping(u'tag:yaml.org,2002:map', data) - - def represent_set(self, data): - value = {} - for key in data: - value[key] = None - return self.represent_mapping(u'tag:yaml.org,2002:set', value) - - def represent_date(self, data): - value = unicode(data.isoformat()) - return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) - - def represent_datetime(self, data): - value = unicode(data.isoformat(' ')) - return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) - - def represent_yaml_object(self, tag, data, cls, flow_style=None): - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__.copy() - return self.represent_mapping(tag, state, flow_style=flow_style) - - def represent_undefined(self, data): - raise RepresenterError("cannot represent an object: %s" % data) - -SafeRepresenter.add_representer(type(None), - SafeRepresenter.represent_none) - -SafeRepresenter.add_representer(str, - SafeRepresenter.represent_str) - -SafeRepresenter.add_representer(unicode, - SafeRepresenter.represent_unicode) - -SafeRepresenter.add_representer(bool, - SafeRepresenter.represent_bool) - -SafeRepresenter.add_representer(int, - SafeRepresenter.represent_int) - -SafeRepresenter.add_representer(long, - SafeRepresenter.represent_long) - -SafeRepresenter.add_representer(float, - SafeRepresenter.represent_float) - -SafeRepresenter.add_representer(list, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(tuple, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(dict, - SafeRepresenter.represent_dict) - -SafeRepresenter.add_representer(set, - SafeRepresenter.represent_set) - -SafeRepresenter.add_representer(datetime.date, - SafeRepresenter.represent_date) - -SafeRepresenter.add_representer(datetime.datetime, - SafeRepresenter.represent_datetime) - -SafeRepresenter.add_representer(None, - SafeRepresenter.represent_undefined) - -class Representer(SafeRepresenter): - - def represent_str(self, data): - tag = None - style = None - try: - data = unicode(data, 'ascii') - tag = u'tag:yaml.org,2002:str' - except UnicodeDecodeError: - try: - data = unicode(data, 'utf-8') - tag = u'tag:yaml.org,2002:python/str' - except UnicodeDecodeError: - data = data.encode('base64') - tag = u'tag:yaml.org,2002:binary' - style = '|' - return self.represent_scalar(tag, data, style=style) - - def represent_unicode(self, data): - tag = None - try: - data.encode('ascii') - tag = u'tag:yaml.org,2002:python/unicode' - except UnicodeEncodeError: - tag = u'tag:yaml.org,2002:str' - return self.represent_scalar(tag, data) - - def represent_long(self, data): - tag = u'tag:yaml.org,2002:int' - if int(data) is not data: - tag = u'tag:yaml.org,2002:python/long' - return self.represent_scalar(tag, unicode(data)) - - def represent_complex(self, data): - if data.imag == 0.0: - data = u'%r' % data.real - elif data.real == 0.0: - data = u'%rj' % data.imag - elif data.imag > 0: - data = u'%r+%rj' % (data.real, data.imag) - else: - data = u'%r%rj' % (data.real, data.imag) - return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data) - - def represent_tuple(self, data): - return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data) - - def represent_name(self, data): - name = u'%s.%s' % (data.__module__, data.__name__) - return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'') - - def represent_module(self, data): - return self.represent_scalar( - u'tag:yaml.org,2002:python/module:'+data.__name__, u'') - - def represent_instance(self, data): - # For instances of classic classes, we use __getinitargs__ and - # __getstate__ to serialize the data. - - # If data.__getinitargs__ exists, the object must be reconstructed by - # calling cls(**args), where args is a tuple returned by - # __getinitargs__. Otherwise, the cls.__init__ method should never be - # called and the class instance is created by instantiating a trivial - # class and assigning to the instance's __class__ variable. - - # If data.__getstate__ exists, it returns the state of the object. - # Otherwise, the state of the object is data.__dict__. - - # We produce either a !!python/object or !!python/object/new node. - # If data.__getinitargs__ does not exist and state is a dictionary, we - # produce a !!python/object node . Otherwise we produce a - # !!python/object/new node. - - cls = data.__class__ - class_name = u'%s.%s' % (cls.__module__, cls.__name__) - args = None - state = None - if hasattr(data, '__getinitargs__'): - args = list(data.__getinitargs__()) - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__ - if args is None and isinstance(state, dict): - return self.represent_mapping( - u'tag:yaml.org,2002:python/object:'+class_name, state) - if isinstance(state, dict) and not state: - return self.represent_sequence( - u'tag:yaml.org,2002:python/object/new:'+class_name, args) - value = {} - if args: - value['args'] = args - value['state'] = state - return self.represent_mapping( - u'tag:yaml.org,2002:python/object/new:'+class_name, value) - - def represent_object(self, data): - # We use __reduce__ API to save the data. data.__reduce__ returns - # a tuple of length 2-5: - # (function, args, state, listitems, dictitems) - - # For reconstructing, we calls function(*args), then set its state, - # listitems, and dictitems if they are not None. - - # A special case is when function.__name__ == '__newobj__'. In this - # case we create the object with args[0].__new__(*args). - - # Another special case is when __reduce__ returns a string - we don't - # support it. - - # We produce a !!python/object, !!python/object/new or - # !!python/object/apply node. - - cls = type(data) - if cls in copy_reg.dispatch_table: - reduce = copy_reg.dispatch_table[cls](data) - elif hasattr(data, '__reduce_ex__'): - reduce = data.__reduce_ex__(2) - elif hasattr(data, '__reduce__'): - reduce = data.__reduce__() - else: - raise RepresenterError("cannot represent object: %r" % data) - reduce = (list(reduce)+[None]*5)[:5] - function, args, state, listitems, dictitems = reduce - args = list(args) - if state is None: - state = {} - if listitems is not None: - listitems = list(listitems) - if dictitems is not None: - dictitems = dict(dictitems) - if function.__name__ == '__newobj__': - function = args[0] - args = args[1:] - tag = u'tag:yaml.org,2002:python/object/new:' - newobj = True - else: - tag = u'tag:yaml.org,2002:python/object/apply:' - newobj = False - function_name = u'%s.%s' % (function.__module__, function.__name__) - if not args and not listitems and not dictitems \ - and isinstance(state, dict) and newobj: - return self.represent_mapping( - u'tag:yaml.org,2002:python/object:'+function_name, state) - if not listitems and not dictitems \ - and isinstance(state, dict) and not state: - return self.represent_sequence(tag+function_name, args) - value = {} - if args: - value['args'] = args - if state or not isinstance(state, dict): - value['state'] = state - if listitems: - value['listitems'] = listitems - if dictitems: - value['dictitems'] = dictitems - return self.represent_mapping(tag+function_name, value) - -Representer.add_representer(str, - Representer.represent_str) - -Representer.add_representer(unicode, - Representer.represent_unicode) - -Representer.add_representer(long, - Representer.represent_long) - -Representer.add_representer(complex, - Representer.represent_complex) - -Representer.add_representer(tuple, - Representer.represent_tuple) - -Representer.add_representer(type, - Representer.represent_name) - -Representer.add_representer(types.ClassType, - Representer.represent_name) - -Representer.add_representer(types.FunctionType, - Representer.represent_name) - -Representer.add_representer(types.BuiltinFunctionType, - Representer.represent_name) - -Representer.add_representer(types.ModuleType, - Representer.represent_module) - -Representer.add_multi_representer(types.InstanceType, - Representer.represent_instance) - -Representer.add_multi_representer(object, - Representer.represent_object) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/resolver.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/resolver.py deleted file mode 100644 index 6b5ab87..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/resolver.py +++ /dev/null @@ -1,224 +0,0 @@ - -__all__ = ['BaseResolver', 'Resolver'] - -from error import * -from nodes import * - -import re - -class ResolverError(YAMLError): - pass - -class BaseResolver(object): - - DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str' - DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq' - DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map' - - yaml_implicit_resolvers = {} - yaml_path_resolvers = {} - - def __init__(self): - self.resolver_exact_paths = [] - self.resolver_prefix_paths = [] - - def add_implicit_resolver(cls, tag, regexp, first): - if not 'yaml_implicit_resolvers' in cls.__dict__: - cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() - if first is None: - first = [None] - for ch in first: - cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) - add_implicit_resolver = classmethod(add_implicit_resolver) - - def add_path_resolver(cls, tag, path, kind=None): - # Note: `add_path_resolver` is experimental. The API could be changed. - # `new_path` is a pattern that is matched against the path from the - # root to the node that is being considered. `node_path` elements are - # tuples `(node_check, index_check)`. `node_check` is a node class: - # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` - # matches any kind of a node. `index_check` could be `None`, a boolean - # value, a string value, or a number. `None` and `False` match against - # any _value_ of sequence and mapping nodes. `True` matches against - # any _key_ of a mapping node. A string `index_check` matches against - # a mapping value that corresponds to a scalar key which content is - # equal to the `index_check` value. An integer `index_check` matches - # against a sequence value with the index equal to `index_check`. - if not 'yaml_path_resolvers' in cls.__dict__: - cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() - new_path = [] - for element in path: - if isinstance(element, (list, tuple)): - if len(element) == 2: - node_check, index_check = element - elif len(element) == 1: - node_check = element[0] - index_check = True - else: - raise ResolverError("Invalid path element: %s" % element) - else: - node_check = None - index_check = element - if node_check is str: - node_check = ScalarNode - elif node_check is list: - node_check = SequenceNode - elif node_check is dict: - node_check = MappingNode - elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ - and not isinstance(node_check, basestring) \ - and node_check is not None: - raise ResolverError("Invalid node checker: %s" % node_check) - if not isinstance(index_check, (basestring, int)) \ - and index_check is not None: - raise ResolverError("Invalid index checker: %s" % index_check) - new_path.append((node_check, index_check)) - if kind is str: - kind = ScalarNode - elif kind is list: - kind = SequenceNode - elif kind is dict: - kind = MappingNode - elif kind not in [ScalarNode, SequenceNode, MappingNode] \ - and kind is not None: - raise ResolverError("Invalid node kind: %s" % kind) - cls.yaml_path_resolvers[tuple(new_path), kind] = tag - add_path_resolver = classmethod(add_path_resolver) - - def descend_resolver(self, current_node, current_index): - if not self.yaml_path_resolvers: - return - exact_paths = {} - prefix_paths = [] - if current_node: - depth = len(self.resolver_prefix_paths) - for path, kind in self.resolver_prefix_paths[-1]: - if self.check_resolver_prefix(depth, path, kind, - current_node, current_index): - if len(path) > depth: - prefix_paths.append((path, kind)) - else: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - for path, kind in self.yaml_path_resolvers: - if not path: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - prefix_paths.append((path, kind)) - self.resolver_exact_paths.append(exact_paths) - self.resolver_prefix_paths.append(prefix_paths) - - def ascend_resolver(self): - if not self.yaml_path_resolvers: - return - self.resolver_exact_paths.pop() - self.resolver_prefix_paths.pop() - - def check_resolver_prefix(self, depth, path, kind, - current_node, current_index): - node_check, index_check = path[depth-1] - if isinstance(node_check, basestring): - if current_node.tag != node_check: - return - elif node_check is not None: - if not isinstance(current_node, node_check): - return - if index_check is True and current_index is not None: - return - if (index_check is False or index_check is None) \ - and current_index is None: - return - if isinstance(index_check, basestring): - if not (isinstance(current_index, ScalarNode) - and index_check == current_index.value): - return - elif isinstance(index_check, int) and not isinstance(index_check, bool): - if index_check != current_index: - return - return True - - def resolve(self, kind, value, implicit): - if kind is ScalarNode and implicit[0]: - if value == u'': - resolvers = self.yaml_implicit_resolvers.get(u'', []) - else: - resolvers = self.yaml_implicit_resolvers.get(value[0], []) - resolvers += self.yaml_implicit_resolvers.get(None, []) - for tag, regexp in resolvers: - if regexp.match(value): - return tag - implicit = implicit[1] - if self.yaml_path_resolvers: - exact_paths = self.resolver_exact_paths[-1] - if kind in exact_paths: - return exact_paths[kind] - if None in exact_paths: - return exact_paths[None] - if kind is ScalarNode: - return self.DEFAULT_SCALAR_TAG - elif kind is SequenceNode: - return self.DEFAULT_SEQUENCE_TAG - elif kind is MappingNode: - return self.DEFAULT_MAPPING_TAG - -class Resolver(BaseResolver): - pass - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:bool', - re.compile(ur'''^(?:yes|Yes|YES|no|No|NO - |true|True|TRUE|false|False|FALSE - |on|On|ON|off|Off|OFF)$''', re.X), - list(u'yYnNtTfFoO')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:float', - re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? - |\.[0-9_]+(?:[eE][-+][0-9]+)? - |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* - |[-+]?\.(?:inf|Inf|INF) - |\.(?:nan|NaN|NAN))$''', re.X), - list(u'-+0123456789.')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:int', - re.compile(ur'''^(?:[-+]?0b[0-1_]+ - |[-+]?0[0-7_]+ - |[-+]?(?:0|[1-9][0-9_]*) - |[-+]?0x[0-9a-fA-F_]+ - |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), - list(u'-+0123456789')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:merge', - re.compile(ur'^(?:<<)$'), - [u'<']) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:null', - re.compile(ur'''^(?: ~ - |null|Null|NULL - | )$''', re.X), - [u'~', u'n', u'N', u'']) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:timestamp', - re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] - |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? - (?:[Tt]|[ \t]+)[0-9][0-9]? - :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? - (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), - list(u'0123456789')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:value', - re.compile(ur'^(?:=)$'), - [u'=']) - -# The following resolver is only for documentation purposes. It cannot work -# because plain scalars cannot start with '!', '&', or '*'. -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:yaml', - re.compile(ur'^(?:!|&|\*)$'), - list(u'!&*')) - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/scanner.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/scanner.py deleted file mode 100644 index 5228fad..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/scanner.py +++ /dev/null @@ -1,1457 +0,0 @@ - -# Scanner produces tokens of the following types: -# STREAM-START -# STREAM-END -# DIRECTIVE(name, value) -# DOCUMENT-START -# DOCUMENT-END -# BLOCK-SEQUENCE-START -# BLOCK-MAPPING-START -# BLOCK-END -# FLOW-SEQUENCE-START -# FLOW-MAPPING-START -# FLOW-SEQUENCE-END -# FLOW-MAPPING-END -# BLOCK-ENTRY -# FLOW-ENTRY -# KEY -# VALUE -# ALIAS(value) -# ANCHOR(value) -# TAG(value) -# SCALAR(value, plain, style) -# -# Read comments in the Scanner code for more details. -# - -__all__ = ['Scanner', 'ScannerError'] - -from error import MarkedYAMLError -from tokens import * - -class ScannerError(MarkedYAMLError): - pass - -class SimpleKey(object): - # See below simple keys treatment. - - def __init__(self, token_number, required, index, line, column, mark): - self.token_number = token_number - self.required = required - self.index = index - self.line = line - self.column = column - self.mark = mark - -class Scanner(object): - - def __init__(self): - """Initialize the scanner.""" - # It is assumed that Scanner and Reader will have a common descendant. - # Reader do the dirty work of checking for BOM and converting the - # input data to Unicode. It also adds NUL to the end. - # - # Reader supports the following methods - # self.peek(i=0) # peek the next i-th character - # self.prefix(l=1) # peek the next l characters - # self.forward(l=1) # read the next l characters and move the pointer. - - # Had we reached the end of the stream? - self.done = False - - # The number of unclosed '{' and '['. `flow_level == 0` means block - # context. - self.flow_level = 0 - - # List of processed tokens that are not yet emitted. - self.tokens = [] - - # Add the STREAM-START token. - self.fetch_stream_start() - - # Number of tokens that were emitted through the `get_token` method. - self.tokens_taken = 0 - - # The current indentation level. - self.indent = -1 - - # Past indentation levels. - self.indents = [] - - # Variables related to simple keys treatment. - - # A simple key is a key that is not denoted by the '?' indicator. - # Example of simple keys: - # --- - # block simple key: value - # ? not a simple key: - # : { flow simple key: value } - # We emit the KEY token before all keys, so when we find a potential - # simple key, we try to locate the corresponding ':' indicator. - # Simple keys should be limited to a single line and 1024 characters. - - # Can a simple key start at the current position? A simple key may - # start: - # - at the beginning of the line, not counting indentation spaces - # (in block context), - # - after '{', '[', ',' (in the flow context), - # - after '?', ':', '-' (in the block context). - # In the block context, this flag also signifies if a block collection - # may start at the current position. - self.allow_simple_key = True - - # Keep track of possible simple keys. This is a dictionary. The key - # is `flow_level`; there can be no more that one possible simple key - # for each level. The value is a SimpleKey record: - # (token_number, required, index, line, column, mark) - # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), - # '[', or '{' tokens. - self.possible_simple_keys = {} - - # Public methods. - - def check_token(self, *choices): - # Check if the next token is one of the given types. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - # Return the next token, but do not delete if from the queue. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - return self.tokens[0] - - def get_token(self): - # Return the next token. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - self.tokens_taken += 1 - return self.tokens.pop(0) - - # Private methods. - - def need_more_tokens(self): - if self.done: - return False - if not self.tokens: - return True - # The current token may be a potential simple key, so we - # need to look further. - self.stale_possible_simple_keys() - if self.next_possible_simple_key() == self.tokens_taken: - return True - - def fetch_more_tokens(self): - - # Eat whitespaces and comments until we reach the next token. - self.scan_to_next_token() - - # Remove obsolete possible simple keys. - self.stale_possible_simple_keys() - - # Compare the current indentation and column. It may add some tokens - # and decrease the current indentation level. - self.unwind_indent(self.column) - - # Peek the next character. - ch = self.peek() - - # Is it the end of stream? - if ch == u'\0': - return self.fetch_stream_end() - - # Is it a directive? - if ch == u'%' and self.check_directive(): - return self.fetch_directive() - - # Is it the document start? - if ch == u'-' and self.check_document_start(): - return self.fetch_document_start() - - # Is it the document end? - if ch == u'.' and self.check_document_end(): - return self.fetch_document_end() - - # TODO: support for BOM within a stream. - #if ch == u'\uFEFF': - # return self.fetch_bom() <-- issue BOMToken - - # Note: the order of the following checks is NOT significant. - - # Is it the flow sequence start indicator? - if ch == u'[': - return self.fetch_flow_sequence_start() - - # Is it the flow mapping start indicator? - if ch == u'{': - return self.fetch_flow_mapping_start() - - # Is it the flow sequence end indicator? - if ch == u']': - return self.fetch_flow_sequence_end() - - # Is it the flow mapping end indicator? - if ch == u'}': - return self.fetch_flow_mapping_end() - - # Is it the flow entry indicator? - if ch == u',': - return self.fetch_flow_entry() - - # Is it the block entry indicator? - if ch == u'-' and self.check_block_entry(): - return self.fetch_block_entry() - - # Is it the key indicator? - if ch == u'?' and self.check_key(): - return self.fetch_key() - - # Is it the value indicator? - if ch == u':' and self.check_value(): - return self.fetch_value() - - # Is it an alias? - if ch == u'*': - return self.fetch_alias() - - # Is it an anchor? - if ch == u'&': - return self.fetch_anchor() - - # Is it a tag? - if ch == u'!': - return self.fetch_tag() - - # Is it a literal scalar? - if ch == u'|' and not self.flow_level: - return self.fetch_literal() - - # Is it a folded scalar? - if ch == u'>' and not self.flow_level: - return self.fetch_folded() - - # Is it a single quoted scalar? - if ch == u'\'': - return self.fetch_single() - - # Is it a double quoted scalar? - if ch == u'\"': - return self.fetch_double() - - # It must be a plain scalar then. - if self.check_plain(): - return self.fetch_plain() - - # No? It's an error. Let's produce a nice error message. - raise ScannerError("while scanning for the next token", None, - "found character %r that cannot start any token" - % ch.encode('utf-8'), self.get_mark()) - - # Simple keys treatment. - - def next_possible_simple_key(self): - # Return the number of the nearest possible simple key. Actually we - # don't need to loop through the whole dictionary. We may replace it - # with the following code: - # if not self.possible_simple_keys: - # return None - # return self.possible_simple_keys[ - # min(self.possible_simple_keys.keys())].token_number - min_token_number = None - for level in self.possible_simple_keys: - key = self.possible_simple_keys[level] - if min_token_number is None or key.token_number < min_token_number: - min_token_number = key.token_number - return min_token_number - - def stale_possible_simple_keys(self): - # Remove entries that are no longer possible simple keys. According to - # the YAML specification, simple keys - # - should be limited to a single line, - # - should be no longer than 1024 characters. - # Disabling this procedure will allow simple keys of any length and - # height (may cause problems if indentation is broken though). - for level in self.possible_simple_keys.keys(): - key = self.possible_simple_keys[level] - if key.line != self.line \ - or self.index-key.index > 1024: - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - del self.possible_simple_keys[level] - - def save_possible_simple_key(self): - # The next token may start a simple key. We check if it's possible - # and save its position. This function is called for - # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. - - # Check if a simple key is required at the current position. - required = not self.flow_level and self.indent == self.column - - # A simple key is required only if it is the first token in the current - # line. Therefore it is always allowed. - assert self.allow_simple_key or not required - - # The next token might be a simple key. Let's save it's number and - # position. - if self.allow_simple_key: - self.remove_possible_simple_key() - token_number = self.tokens_taken+len(self.tokens) - key = SimpleKey(token_number, required, - self.index, self.line, self.column, self.get_mark()) - self.possible_simple_keys[self.flow_level] = key - - def remove_possible_simple_key(self): - # Remove the saved possible key position at the current flow level. - if self.flow_level in self.possible_simple_keys: - key = self.possible_simple_keys[self.flow_level] - - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - - del self.possible_simple_keys[self.flow_level] - - # Indentation functions. - - def unwind_indent(self, column): - - ## In flow context, tokens should respect indentation. - ## Actually the condition should be `self.indent >= column` according to - ## the spec. But this condition will prohibit intuitively correct - ## constructions such as - ## key : { - ## } - #if self.flow_level and self.indent > column: - # raise ScannerError(None, None, - # "invalid intendation or unclosed '[' or '{'", - # self.get_mark()) - - # In the flow context, indentation is ignored. We make the scanner less - # restrictive then specification requires. - if self.flow_level: - return - - # In block context, we may need to issue the BLOCK-END tokens. - while self.indent > column: - mark = self.get_mark() - self.indent = self.indents.pop() - self.tokens.append(BlockEndToken(mark, mark)) - - def add_indent(self, column): - # Check if we need to increase indentation. - if self.indent < column: - self.indents.append(self.indent) - self.indent = column - return True - return False - - # Fetchers. - - def fetch_stream_start(self): - # We always add STREAM-START as the first token and STREAM-END as the - # last token. - - # Read the token. - mark = self.get_mark() - - # Add STREAM-START. - self.tokens.append(StreamStartToken(mark, mark, - encoding=self.encoding)) - - - def fetch_stream_end(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - self.possible_simple_keys = {} - - # Read the token. - mark = self.get_mark() - - # Add STREAM-END. - self.tokens.append(StreamEndToken(mark, mark)) - - # The steam is finished. - self.done = True - - def fetch_directive(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Scan and add DIRECTIVE. - self.tokens.append(self.scan_directive()) - - def fetch_document_start(self): - self.fetch_document_indicator(DocumentStartToken) - - def fetch_document_end(self): - self.fetch_document_indicator(DocumentEndToken) - - def fetch_document_indicator(self, TokenClass): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. Note that there could not be a block collection - # after '---'. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Add DOCUMENT-START or DOCUMENT-END. - start_mark = self.get_mark() - self.forward(3) - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_start(self): - self.fetch_flow_collection_start(FlowSequenceStartToken) - - def fetch_flow_mapping_start(self): - self.fetch_flow_collection_start(FlowMappingStartToken) - - def fetch_flow_collection_start(self, TokenClass): - - # '[' and '{' may start a simple key. - self.save_possible_simple_key() - - # Increase the flow level. - self.flow_level += 1 - - # Simple keys are allowed after '[' and '{'. - self.allow_simple_key = True - - # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_end(self): - self.fetch_flow_collection_end(FlowSequenceEndToken) - - def fetch_flow_mapping_end(self): - self.fetch_flow_collection_end(FlowMappingEndToken) - - def fetch_flow_collection_end(self, TokenClass): - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Decrease the flow level. - self.flow_level -= 1 - - # No simple keys after ']' or '}'. - self.allow_simple_key = False - - # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_entry(self): - - # Simple keys are allowed after ','. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add FLOW-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(FlowEntryToken(start_mark, end_mark)) - - def fetch_block_entry(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a new entry? - if not self.allow_simple_key: - raise ScannerError(None, None, - "sequence entries are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-SEQUENCE-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockSequenceStartToken(mark, mark)) - - # It's an error for the block entry to occur in the flow context, - # but we let the parser detect this. - else: - pass - - # Simple keys are allowed after '-'. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add BLOCK-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(BlockEntryToken(start_mark, end_mark)) - - def fetch_key(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a key (not nessesary a simple)? - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping keys are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-MAPPING-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after '?' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add KEY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(KeyToken(start_mark, end_mark)) - - def fetch_value(self): - - # Do we determine a simple key? - if self.flow_level in self.possible_simple_keys: - - # Add KEY. - key = self.possible_simple_keys[self.flow_level] - del self.possible_simple_keys[self.flow_level] - self.tokens.insert(key.token_number-self.tokens_taken, - KeyToken(key.mark, key.mark)) - - # If this key starts a new block mapping, we need to add - # BLOCK-MAPPING-START. - if not self.flow_level: - if self.add_indent(key.column): - self.tokens.insert(key.token_number-self.tokens_taken, - BlockMappingStartToken(key.mark, key.mark)) - - # There cannot be two simple keys one after another. - self.allow_simple_key = False - - # It must be a part of a complex key. - else: - - # Block context needs additional checks. - # (Do we really need them? They will be catched by the parser - # anyway.) - if not self.flow_level: - - # We are allowed to start a complex value if and only if - # we can start a simple key. - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping values are not allowed here", - self.get_mark()) - - # If this value starts a new block mapping, we need to add - # BLOCK-MAPPING-START. It will be detected as an error later by - # the parser. - if not self.flow_level: - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after ':' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add VALUE. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(ValueToken(start_mark, end_mark)) - - def fetch_alias(self): - - # ALIAS could be a simple key. - self.save_possible_simple_key() - - # No simple keys after ALIAS. - self.allow_simple_key = False - - # Scan and add ALIAS. - self.tokens.append(self.scan_anchor(AliasToken)) - - def fetch_anchor(self): - - # ANCHOR could start a simple key. - self.save_possible_simple_key() - - # No simple keys after ANCHOR. - self.allow_simple_key = False - - # Scan and add ANCHOR. - self.tokens.append(self.scan_anchor(AnchorToken)) - - def fetch_tag(self): - - # TAG could start a simple key. - self.save_possible_simple_key() - - # No simple keys after TAG. - self.allow_simple_key = False - - # Scan and add TAG. - self.tokens.append(self.scan_tag()) - - def fetch_literal(self): - self.fetch_block_scalar(style='|') - - def fetch_folded(self): - self.fetch_block_scalar(style='>') - - def fetch_block_scalar(self, style): - - # A simple key may follow a block scalar. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Scan and add SCALAR. - self.tokens.append(self.scan_block_scalar(style)) - - def fetch_single(self): - self.fetch_flow_scalar(style='\'') - - def fetch_double(self): - self.fetch_flow_scalar(style='"') - - def fetch_flow_scalar(self, style): - - # A flow scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after flow scalars. - self.allow_simple_key = False - - # Scan and add SCALAR. - self.tokens.append(self.scan_flow_scalar(style)) - - def fetch_plain(self): - - # A plain scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after plain scalars. But note that `scan_plain` will - # change this flag if the scan is finished at the beginning of the - # line. - self.allow_simple_key = False - - # Scan and add SCALAR. May change `allow_simple_key`. - self.tokens.append(self.scan_plain()) - - # Checkers. - - def check_directive(self): - - # DIRECTIVE: ^ '%' ... - # The '%' indicator is already checked. - if self.column == 0: - return True - - def check_document_start(self): - - # DOCUMENT-START: ^ '---' (' '|'\n') - if self.column == 0: - if self.prefix(3) == u'---' \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return True - - def check_document_end(self): - - # DOCUMENT-END: ^ '...' (' '|'\n') - if self.column == 0: - if self.prefix(3) == u'...' \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return True - - def check_block_entry(self): - - # BLOCK-ENTRY: '-' (' '|'\n') - return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' - - def check_key(self): - - # KEY(flow context): '?' - if self.flow_level: - return True - - # KEY(block context): '?' (' '|'\n') - else: - return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' - - def check_value(self): - - # VALUE(flow context): ':' - if self.flow_level: - return True - - # VALUE(block context): ':' (' '|'\n') - else: - return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' - - def check_plain(self): - - # A plain scalar may start with any non-space character except: - # '-', '?', ':', ',', '[', ']', '{', '}', - # '#', '&', '*', '!', '|', '>', '\'', '\"', - # '%', '@', '`'. - # - # It may also start with - # '-', '?', ':' - # if it is followed by a non-space character. - # - # Note that we limit the last rule to the block context (except the - # '-' character) because we want the flow context to be space - # independent. - ch = self.peek() - return ch not in u'\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ - or (self.peek(1) not in u'\0 \t\r\n\x85\u2028\u2029' - and (ch == u'-' or (not self.flow_level and ch in u'?:'))) - - # Scanners. - - def scan_to_next_token(self): - # We ignore spaces, line breaks and comments. - # If we find a line break in the block context, we set the flag - # `allow_simple_key` on. - # The byte order mark is stripped if it's the first character in the - # stream. We do not yet support BOM inside the stream as the - # specification requires. Any such mark will be considered as a part - # of the document. - # - # TODO: We need to make tab handling rules more sane. A good rule is - # Tabs cannot precede tokens - # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, - # KEY(block), VALUE(block), BLOCK-ENTRY - # So the checking code is - # if : - # self.allow_simple_keys = False - # We also need to add the check for `allow_simple_keys == True` to - # `unwind_indent` before issuing BLOCK-END. - # Scanners for block, flow, and plain scalars need to be modified. - - if self.index == 0 and self.peek() == u'\uFEFF': - self.forward() - found = False - while not found: - while self.peek() == u' ': - self.forward() - if self.peek() == u'#': - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - if self.scan_line_break(): - if not self.flow_level: - self.allow_simple_key = True - else: - found = True - - def scan_directive(self): - # See the specification for details. - start_mark = self.get_mark() - self.forward() - name = self.scan_directive_name(start_mark) - value = None - if name == u'YAML': - value = self.scan_yaml_directive_value(start_mark) - end_mark = self.get_mark() - elif name == u'TAG': - value = self.scan_tag_directive_value(start_mark) - end_mark = self.get_mark() - else: - end_mark = self.get_mark() - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - self.scan_directive_ignored_line(start_mark) - return DirectiveToken(name, value, start_mark, end_mark) - - def scan_directive_name(self, start_mark): - # See the specification for details. - length = 0 - ch = self.peek(length) - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - return value - - def scan_yaml_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - major = self.scan_yaml_directive_number(start_mark) - if self.peek() != '.': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or '.', but found %r" - % self.peek().encode('utf-8'), - self.get_mark()) - self.forward() - minor = self.scan_yaml_directive_number(start_mark) - if self.peek() not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or ' ', but found %r" - % self.peek().encode('utf-8'), - self.get_mark()) - return (major, minor) - - def scan_yaml_directive_number(self, start_mark): - # See the specification for details. - ch = self.peek() - if not (u'0' <= ch <= u'9'): - raise ScannerError("while scanning a directive", start_mark, - "expected a digit, but found %r" % ch.encode('utf-8'), - self.get_mark()) - length = 0 - while u'0' <= self.peek(length) <= u'9': - length += 1 - value = int(self.prefix(length)) - self.forward(length) - return value - - def scan_tag_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - handle = self.scan_tag_directive_handle(start_mark) - while self.peek() == u' ': - self.forward() - prefix = self.scan_tag_directive_prefix(start_mark) - return (handle, prefix) - - def scan_tag_directive_handle(self, start_mark): - # See the specification for details. - value = self.scan_tag_handle('directive', start_mark) - ch = self.peek() - if ch != u' ': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch.encode('utf-8'), - self.get_mark()) - return value - - def scan_tag_directive_prefix(self, start_mark): - # See the specification for details. - value = self.scan_tag_uri('directive', start_mark) - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch.encode('utf-8'), - self.get_mark()) - return value - - def scan_directive_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - if self.peek() == u'#': - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in u'\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a comment or a line break, but found %r" - % ch.encode('utf-8'), self.get_mark()) - self.scan_line_break() - - def scan_anchor(self, TokenClass): - # The specification does not restrict characters for anchors and - # aliases. This may lead to problems, for instance, the document: - # [ *alias, value ] - # can be interpteted in two ways, as - # [ "value" ] - # and - # [ *alias , "value" ] - # Therefore we restrict aliases to numbers and ASCII letters. - start_mark = self.get_mark() - indicator = self.peek() - if indicator == u'*': - name = 'alias' - else: - name = 'anchor' - self.forward() - length = 0 - ch = self.peek(length) - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`': - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - end_mark = self.get_mark() - return TokenClass(value, start_mark, end_mark) - - def scan_tag(self): - # See the specification for details. - start_mark = self.get_mark() - ch = self.peek(1) - if ch == u'<': - handle = None - self.forward(2) - suffix = self.scan_tag_uri('tag', start_mark) - if self.peek() != u'>': - raise ScannerError("while parsing a tag", start_mark, - "expected '>', but found %r" % self.peek().encode('utf-8'), - self.get_mark()) - self.forward() - elif ch in u'\0 \t\r\n\x85\u2028\u2029': - handle = None - suffix = u'!' - self.forward() - else: - length = 1 - use_handle = False - while ch not in u'\0 \r\n\x85\u2028\u2029': - if ch == u'!': - use_handle = True - break - length += 1 - ch = self.peek(length) - handle = u'!' - if use_handle: - handle = self.scan_tag_handle('tag', start_mark) - else: - handle = u'!' - self.forward() - suffix = self.scan_tag_uri('tag', start_mark) - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a tag", start_mark, - "expected ' ', but found %r" % ch.encode('utf-8'), - self.get_mark()) - value = (handle, suffix) - end_mark = self.get_mark() - return TagToken(value, start_mark, end_mark) - - def scan_block_scalar(self, style): - # See the specification for details. - - if style == '>': - folded = True - else: - folded = False - - chunks = [] - start_mark = self.get_mark() - - # Scan the header. - self.forward() - chomping, increment = self.scan_block_scalar_indicators(start_mark) - self.scan_block_scalar_ignored_line(start_mark) - - # Determine the indentation level and go to the first non-empty line. - min_indent = self.indent+1 - if min_indent < 1: - min_indent = 1 - if increment is None: - breaks, max_indent, end_mark = self.scan_block_scalar_indentation() - indent = max(min_indent, max_indent) - else: - indent = min_indent+increment-1 - breaks, end_mark = self.scan_block_scalar_breaks(indent) - line_break = u'' - - # Scan the inner part of the block scalar. - while self.column == indent and self.peek() != u'\0': - chunks.extend(breaks) - leading_non_space = self.peek() not in u' \t' - length = 0 - while self.peek(length) not in u'\0\r\n\x85\u2028\u2029': - length += 1 - chunks.append(self.prefix(length)) - self.forward(length) - line_break = self.scan_line_break() - breaks, end_mark = self.scan_block_scalar_breaks(indent) - if self.column == indent and self.peek() != u'\0': - - # Unfortunately, folding rules are ambiguous. - # - # This is the folding according to the specification: - - if folded and line_break == u'\n' \ - and leading_non_space and self.peek() not in u' \t': - if not breaks: - chunks.append(u' ') - else: - chunks.append(line_break) - - # This is Clark Evans's interpretation (also in the spec - # examples): - # - #if folded and line_break == u'\n': - # if not breaks: - # if self.peek() not in ' \t': - # chunks.append(u' ') - # else: - # chunks.append(line_break) - #else: - # chunks.append(line_break) - else: - break - - # Chomp the tail. - if chomping is not False: - chunks.append(line_break) - if chomping is True: - chunks.extend(breaks) - - # We are done. - return ScalarToken(u''.join(chunks), False, start_mark, end_mark, - style) - - def scan_block_scalar_indicators(self, start_mark): - # See the specification for details. - chomping = None - increment = None - ch = self.peek() - if ch in u'+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch in u'0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - elif ch in u'0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - ch = self.peek() - if ch in u'+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected chomping or indentation indicators, but found %r" - % ch.encode('utf-8'), self.get_mark()) - return chomping, increment - - def scan_block_scalar_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - if self.peek() == u'#': - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in u'\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected a comment or a line break, but found %r" - % ch.encode('utf-8'), self.get_mark()) - self.scan_line_break() - - def scan_block_scalar_indentation(self): - # See the specification for details. - chunks = [] - max_indent = 0 - end_mark = self.get_mark() - while self.peek() in u' \r\n\x85\u2028\u2029': - if self.peek() != u' ': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - else: - self.forward() - if self.column > max_indent: - max_indent = self.column - return chunks, max_indent, end_mark - - def scan_block_scalar_breaks(self, indent): - # See the specification for details. - chunks = [] - end_mark = self.get_mark() - while self.column < indent and self.peek() == u' ': - self.forward() - while self.peek() in u'\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - while self.column < indent and self.peek() == u' ': - self.forward() - return chunks, end_mark - - def scan_flow_scalar(self, style): - # See the specification for details. - # Note that we loose indentation rules for quoted scalars. Quoted - # scalars don't need to adhere indentation because " and ' clearly - # mark the beginning and the end of them. Therefore we are less - # restrictive then the specification requires. We only need to check - # that document separators are not included in scalars. - if style == '"': - double = True - else: - double = False - chunks = [] - start_mark = self.get_mark() - quote = self.peek() - self.forward() - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - while self.peek() != quote: - chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - self.forward() - end_mark = self.get_mark() - return ScalarToken(u''.join(chunks), False, start_mark, end_mark, - style) - - ESCAPE_REPLACEMENTS = { - u'0': u'\0', - u'a': u'\x07', - u'b': u'\x08', - u't': u'\x09', - u'\t': u'\x09', - u'n': u'\x0A', - u'v': u'\x0B', - u'f': u'\x0C', - u'r': u'\x0D', - u'e': u'\x1B', - u' ': u'\x20', - u'\"': u'\"', - u'\\': u'\\', - u'N': u'\x85', - u'_': u'\xA0', - u'L': u'\u2028', - u'P': u'\u2029', - } - - ESCAPE_CODES = { - u'x': 2, - u'u': 4, - u'U': 8, - } - - def scan_flow_scalar_non_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - length = 0 - while self.peek(length) not in u'\'\"\\\0 \t\r\n\x85\u2028\u2029': - length += 1 - if length: - chunks.append(self.prefix(length)) - self.forward(length) - ch = self.peek() - if not double and ch == u'\'' and self.peek(1) == u'\'': - chunks.append(u'\'') - self.forward(2) - elif (double and ch == u'\'') or (not double and ch in u'\"\\'): - chunks.append(ch) - self.forward() - elif double and ch == u'\\': - self.forward() - ch = self.peek() - if ch in self.ESCAPE_REPLACEMENTS: - chunks.append(self.ESCAPE_REPLACEMENTS[ch]) - self.forward() - elif ch in self.ESCAPE_CODES: - length = self.ESCAPE_CODES[ch] - self.forward() - for k in range(length): - if self.peek(k) not in u'0123456789ABCDEFabcdef': - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "expected escape sequence of %d hexdecimal numbers, but found %r" % - (length, self.peek(k).encode('utf-8')), self.get_mark()) - code = int(self.prefix(length), 16) - chunks.append(unichr(code)) - self.forward(length) - elif ch in u'\r\n\x85\u2028\u2029': - self.scan_line_break() - chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) - else: - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "found unknown escape character %r" % ch.encode('utf-8'), self.get_mark()) - else: - return chunks - - def scan_flow_scalar_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - length = 0 - while self.peek(length) in u' \t': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch == u'\0': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected end of stream", self.get_mark()) - elif ch in u'\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - breaks = self.scan_flow_scalar_breaks(double, start_mark) - if line_break != u'\n': - chunks.append(line_break) - elif not breaks: - chunks.append(u' ') - chunks.extend(breaks) - else: - chunks.append(whitespaces) - return chunks - - def scan_flow_scalar_breaks(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - # Instead of checking indentation, we check for document - # separators. - prefix = self.prefix(3) - if (prefix == u'---' or prefix == u'...') \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected document separator", self.get_mark()) - while self.peek() in u' \t': - self.forward() - if self.peek() in u'\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - else: - return chunks - - def scan_plain(self): - # See the specification for details. - # We add an additional restriction for the flow context: - # plain scalars in the flow context cannot contain ',', ':' and '?'. - # We also keep track of the `allow_simple_key` flag here. - # Indentation rules are loosed for the flow context. - chunks = [] - start_mark = self.get_mark() - end_mark = start_mark - indent = self.indent+1 - # We allow zero indentation for scalars, but then we need to check for - # document separators at the beginning of the line. - #if indent == 0: - # indent = 1 - spaces = [] - while True: - length = 0 - if self.peek() == u'#': - break - while True: - ch = self.peek(length) - if ch in u'\0 \t\r\n\x85\u2028\u2029' \ - or (not self.flow_level and ch == u':' and - self.peek(length+1) in u'\0 \t\r\n\x85\u2028\u2029') \ - or (self.flow_level and ch in u',:?[]{}'): - break - length += 1 - # It's not clear what we should do with ':' in the flow context. - if (self.flow_level and ch == u':' - and self.peek(length+1) not in u'\0 \t\r\n\x85\u2028\u2029,[]{}'): - self.forward(length) - raise ScannerError("while scanning a plain scalar", start_mark, - "found unexpected ':'", self.get_mark(), - "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.") - if length == 0: - break - self.allow_simple_key = False - chunks.extend(spaces) - chunks.append(self.prefix(length)) - self.forward(length) - end_mark = self.get_mark() - spaces = self.scan_plain_spaces(indent, start_mark) - if not spaces or self.peek() == u'#' \ - or (not self.flow_level and self.column < indent): - break - return ScalarToken(u''.join(chunks), True, start_mark, end_mark) - - def scan_plain_spaces(self, indent, start_mark): - # See the specification for details. - # The specification is really confusing about tabs in plain scalars. - # We just forbid them completely. Do not use tabs in YAML! - chunks = [] - length = 0 - while self.peek(length) in u' ': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch in u'\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - self.allow_simple_key = True - prefix = self.prefix(3) - if (prefix == u'---' or prefix == u'...') \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return - breaks = [] - while self.peek() in u' \r\n\x85\u2028\u2029': - if self.peek() == ' ': - self.forward() - else: - breaks.append(self.scan_line_break()) - prefix = self.prefix(3) - if (prefix == u'---' or prefix == u'...') \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return - if line_break != u'\n': - chunks.append(line_break) - elif not breaks: - chunks.append(u' ') - chunks.extend(breaks) - elif whitespaces: - chunks.append(whitespaces) - return chunks - - def scan_tag_handle(self, name, start_mark): - # See the specification for details. - # For some strange reasons, the specification does not allow '_' in - # tag handles. I have allowed it anyway. - ch = self.peek() - if ch != u'!': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch.encode('utf-8'), - self.get_mark()) - length = 1 - ch = self.peek(length) - if ch != u' ': - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_': - length += 1 - ch = self.peek(length) - if ch != u'!': - self.forward(length) - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch.encode('utf-8'), - self.get_mark()) - length += 1 - value = self.prefix(length) - self.forward(length) - return value - - def scan_tag_uri(self, name, start_mark): - # See the specification for details. - # Note: we do not check if URI is well-formed. - chunks = [] - length = 0 - ch = self.peek(length) - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-;/?:@&=+$,_.!~*\'()[]%': - if ch == u'%': - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - chunks.append(self.scan_uri_escapes(name, start_mark)) - else: - length += 1 - ch = self.peek(length) - if length: - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - if not chunks: - raise ScannerError("while parsing a %s" % name, start_mark, - "expected URI, but found %r" % ch.encode('utf-8'), - self.get_mark()) - return u''.join(chunks) - - def scan_uri_escapes(self, name, start_mark): - # See the specification for details. - bytes = [] - mark = self.get_mark() - while self.peek() == u'%': - self.forward() - for k in range(2): - if self.peek(k) not in u'0123456789ABCDEFabcdef': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected URI escape sequence of 2 hexdecimal numbers, but found %r" % - (self.peek(k).encode('utf-8')), self.get_mark()) - bytes.append(chr(int(self.prefix(2), 16))) - self.forward(2) - try: - value = unicode(''.join(bytes), 'utf-8') - except UnicodeDecodeError, exc: - raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) - return value - - def scan_line_break(self): - # Transforms: - # '\r\n' : '\n' - # '\r' : '\n' - # '\n' : '\n' - # '\x85' : '\n' - # '\u2028' : '\u2028' - # '\u2029 : '\u2029' - # default : '' - ch = self.peek() - if ch in u'\r\n\x85': - if self.prefix(2) == u'\r\n': - self.forward(2) - else: - self.forward() - return u'\n' - elif ch in u'\u2028\u2029': - self.forward() - return ch - return u'' - -#try: -# import psyco -# psyco.bind(Scanner) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/serializer.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/serializer.py deleted file mode 100644 index 0bf1e96..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/serializer.py +++ /dev/null @@ -1,111 +0,0 @@ - -__all__ = ['Serializer', 'SerializerError'] - -from error import YAMLError -from events import * -from nodes import * - -class SerializerError(YAMLError): - pass - -class Serializer(object): - - ANCHOR_TEMPLATE = u'id%03d' - - def __init__(self, encoding=None, - explicit_start=None, explicit_end=None, version=None, tags=None): - self.use_encoding = encoding - self.use_explicit_start = explicit_start - self.use_explicit_end = explicit_end - self.use_version = version - self.use_tags = tags - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - self.closed = None - - def open(self): - if self.closed is None: - self.emit(StreamStartEvent(encoding=self.use_encoding)) - self.closed = False - elif self.closed: - raise SerializerError("serializer is closed") - else: - raise SerializerError("serializer is already opened") - - def close(self): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif not self.closed: - self.emit(StreamEndEvent()) - self.closed = True - - #def __del__(self): - # self.close() - - def serialize(self, node): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif self.closed: - raise SerializerError("serializer is closed") - self.emit(DocumentStartEvent(explicit=self.use_explicit_start, - version=self.use_version, tags=self.use_tags)) - self.anchor_node(node) - self.serialize_node(node, None, None) - self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - - def anchor_node(self, node): - if node in self.anchors: - if self.anchors[node] is None: - self.anchors[node] = self.generate_anchor(node) - else: - self.anchors[node] = None - if isinstance(node, SequenceNode): - for item in node.value: - self.anchor_node(item) - elif isinstance(node, MappingNode): - for key, value in node.value: - self.anchor_node(key) - self.anchor_node(value) - - def generate_anchor(self, node): - self.last_anchor_id += 1 - return self.ANCHOR_TEMPLATE % self.last_anchor_id - - def serialize_node(self, node, parent, index): - alias = self.anchors[node] - if node in self.serialized_nodes: - self.emit(AliasEvent(alias)) - else: - self.serialized_nodes[node] = True - self.descend_resolver(parent, index) - if isinstance(node, ScalarNode): - detected_tag = self.resolve(ScalarNode, node.value, (True, False)) - default_tag = self.resolve(ScalarNode, node.value, (False, True)) - implicit = (node.tag == detected_tag), (node.tag == default_tag) - self.emit(ScalarEvent(alias, node.tag, implicit, node.value, - style=node.style)) - elif isinstance(node, SequenceNode): - implicit = (node.tag - == self.resolve(SequenceNode, node.value, True)) - self.emit(SequenceStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - index = 0 - for item in node.value: - self.serialize_node(item, node, index) - index += 1 - self.emit(SequenceEndEvent()) - elif isinstance(node, MappingNode): - implicit = (node.tag - == self.resolve(MappingNode, node.value, True)) - self.emit(MappingStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - for key, value in node.value: - self.serialize_node(key, node, None) - self.serialize_node(value, node, key) - self.emit(MappingEndEvent()) - self.ascend_resolver() - diff --git a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/tokens.py b/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/tokens.py deleted file mode 100644 index 4d0b48a..0000000 --- a/libs/PyYAML-3.10/build/lib.linux-x86_64-2.7/yaml/tokens.py +++ /dev/null @@ -1,104 +0,0 @@ - -class Token(object): - def __init__(self, start_mark, end_mark): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in self.__dict__ - if not key.endswith('_mark')] - attributes.sort() - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -#class BOMToken(Token): -# id = '' - -class DirectiveToken(Token): - id = '' - def __init__(self, name, value, start_mark, end_mark): - self.name = name - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class DocumentStartToken(Token): - id = '' - -class DocumentEndToken(Token): - id = '' - -class StreamStartToken(Token): - id = '' - def __init__(self, start_mark=None, end_mark=None, - encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndToken(Token): - id = '' - -class BlockSequenceStartToken(Token): - id = '' - -class BlockMappingStartToken(Token): - id = '' - -class BlockEndToken(Token): - id = '' - -class FlowSequenceStartToken(Token): - id = '[' - -class FlowMappingStartToken(Token): - id = '{' - -class FlowSequenceEndToken(Token): - id = ']' - -class FlowMappingEndToken(Token): - id = '}' - -class KeyToken(Token): - id = '?' - -class ValueToken(Token): - id = ':' - -class BlockEntryToken(Token): - id = '-' - -class FlowEntryToken(Token): - id = ',' - -class AliasToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class AnchorToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class TagToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class ScalarToken(Token): - id = '' - def __init__(self, value, plain, start_mark, end_mark, style=None): - self.value = value - self.plain = plain - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/__init__.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/__init__.py deleted file mode 100644 index 0033d9c..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/__init__.py +++ /dev/null @@ -1,312 +0,0 @@ - -from .error import * - -from .tokens import * -from .events import * -from .nodes import * - -from .loader import * -from .dumper import * - -__version__ = '3.10' -try: - from .cyaml import * - __with_libyaml__ = True -except ImportError: - __with_libyaml__ = False - -import io - -def scan(stream, Loader=Loader): - """ - Scan a YAML stream and produce scanning tokens. - """ - loader = Loader(stream) - try: - while loader.check_token(): - yield loader.get_token() - finally: - loader.dispose() - -def parse(stream, Loader=Loader): - """ - Parse a YAML stream and produce parsing events. - """ - loader = Loader(stream) - try: - while loader.check_event(): - yield loader.get_event() - finally: - loader.dispose() - -def compose(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding representation tree. - """ - loader = Loader(stream) - try: - return loader.get_single_node() - finally: - loader.dispose() - -def compose_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding representation trees. - """ - loader = Loader(stream) - try: - while loader.check_node(): - yield loader.get_node() - finally: - loader.dispose() - -def load(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - """ - loader = Loader(stream) - try: - return loader.get_single_data() - finally: - loader.dispose() - -def load_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - """ - loader = Loader(stream) - try: - while loader.check_data(): - yield loader.get_data() - finally: - loader.dispose() - -def safe_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - Resolve only basic YAML tags. - """ - return load(stream, SafeLoader) - -def safe_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - Resolve only basic YAML tags. - """ - return load_all(stream, SafeLoader) - -def emit(events, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - """ - Emit YAML parsing events into a stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - stream = io.StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - try: - for event in events: - dumper.emit(event) - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize_all(nodes, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of representation trees into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for node in nodes: - dumper.serialize(node) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize(node, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a representation tree into a YAML stream. - If stream is None, return the produced string instead. - """ - return serialize_all([node], stream, Dumper=Dumper, **kwds) - -def dump_all(documents, stream=None, Dumper=Dumper, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of Python objects into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - getvalue = stream.getvalue - dumper = Dumper(stream, default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for data in documents: - dumper.represent(data) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def dump(data, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a Python object into a YAML stream. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=Dumper, **kwds) - -def safe_dump_all(documents, stream=None, **kwds): - """ - Serialize a sequence of Python objects into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all(documents, stream, Dumper=SafeDumper, **kwds) - -def safe_dump(data, stream=None, **kwds): - """ - Serialize a Python object into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=SafeDumper, **kwds) - -def add_implicit_resolver(tag, regexp, first=None, - Loader=Loader, Dumper=Dumper): - """ - Add an implicit scalar detector. - If an implicit scalar value matches the given regexp, - the corresponding tag is assigned to the scalar. - first is a sequence of possible initial characters or None. - """ - Loader.add_implicit_resolver(tag, regexp, first) - Dumper.add_implicit_resolver(tag, regexp, first) - -def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): - """ - Add a path based resolver for the given tag. - A path is a list of keys that forms a path - to a node in the representation tree. - Keys can be string values, integers, or None. - """ - Loader.add_path_resolver(tag, path, kind) - Dumper.add_path_resolver(tag, path, kind) - -def add_constructor(tag, constructor, Loader=Loader): - """ - Add a constructor for the given tag. - Constructor is a function that accepts a Loader instance - and a node object and produces the corresponding Python object. - """ - Loader.add_constructor(tag, constructor) - -def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): - """ - Add a multi-constructor for the given tag prefix. - Multi-constructor is called for a node if its tag starts with tag_prefix. - Multi-constructor accepts a Loader instance, a tag suffix, - and a node object and produces the corresponding Python object. - """ - Loader.add_multi_constructor(tag_prefix, multi_constructor) - -def add_representer(data_type, representer, Dumper=Dumper): - """ - Add a representer for the given type. - Representer is a function accepting a Dumper instance - and an instance of the given data type - and producing the corresponding representation node. - """ - Dumper.add_representer(data_type, representer) - -def add_multi_representer(data_type, multi_representer, Dumper=Dumper): - """ - Add a representer for the given type. - Multi-representer is a function accepting a Dumper instance - and an instance of the given data type or subtype - and producing the corresponding representation node. - """ - Dumper.add_multi_representer(data_type, multi_representer) - -class YAMLObjectMetaclass(type): - """ - The metaclass for YAMLObject. - """ - def __init__(cls, name, bases, kwds): - super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) - if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: - cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) - cls.yaml_dumper.add_representer(cls, cls.to_yaml) - -class YAMLObject(metaclass=YAMLObjectMetaclass): - """ - An object that can dump itself to a YAML stream - and load itself from a YAML stream. - """ - - __slots__ = () # no direct instantiation, so allow immutable subclasses - - yaml_loader = Loader - yaml_dumper = Dumper - - yaml_tag = None - yaml_flow_style = None - - @classmethod - def from_yaml(cls, loader, node): - """ - Convert a representation node to a Python object. - """ - return loader.construct_yaml_object(node, cls) - - @classmethod - def to_yaml(cls, dumper, data): - """ - Convert a Python object to a representation node. - """ - return dumper.represent_yaml_object(cls.yaml_tag, data, cls, - flow_style=cls.yaml_flow_style) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/composer.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/composer.py deleted file mode 100644 index d5c6a7a..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/composer.py +++ /dev/null @@ -1,139 +0,0 @@ - -__all__ = ['Composer', 'ComposerError'] - -from .error import MarkedYAMLError -from .events import * -from .nodes import * - -class ComposerError(MarkedYAMLError): - pass - -class Composer: - - def __init__(self): - self.anchors = {} - - def check_node(self): - # Drop the STREAM-START event. - if self.check_event(StreamStartEvent): - self.get_event() - - # If there are more documents available? - return not self.check_event(StreamEndEvent) - - def get_node(self): - # Get the root node of the next document. - if not self.check_event(StreamEndEvent): - return self.compose_document() - - def get_single_node(self): - # Drop the STREAM-START event. - self.get_event() - - # Compose a document if the stream is not empty. - document = None - if not self.check_event(StreamEndEvent): - document = self.compose_document() - - # Ensure that the stream contains no more documents. - if not self.check_event(StreamEndEvent): - event = self.get_event() - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) - - # Drop the STREAM-END event. - self.get_event() - - return document - - def compose_document(self): - # Drop the DOCUMENT-START event. - self.get_event() - - # Compose the root node. - node = self.compose_node(None, None) - - # Drop the DOCUMENT-END event. - self.get_event() - - self.anchors = {} - return node - - def compose_node(self, parent, index): - if self.check_event(AliasEvent): - event = self.get_event() - anchor = event.anchor - if anchor not in self.anchors: - raise ComposerError(None, None, "found undefined alias %r" - % anchor, event.start_mark) - return self.anchors[anchor] - event = self.peek_event() - anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: - raise ComposerError("found duplicate anchor %r; first occurence" - % anchor, self.anchors[anchor].start_mark, - "second occurence", event.start_mark) - self.descend_resolver(parent, index) - if self.check_event(ScalarEvent): - node = self.compose_scalar_node(anchor) - elif self.check_event(SequenceStartEvent): - node = self.compose_sequence_node(anchor) - elif self.check_event(MappingStartEvent): - node = self.compose_mapping_node(anchor) - self.ascend_resolver() - return node - - def compose_scalar_node(self, anchor): - event = self.get_event() - tag = event.tag - if tag is None or tag == '!': - tag = self.resolve(ScalarNode, event.value, event.implicit) - node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) - if anchor is not None: - self.anchors[anchor] = node - return node - - def compose_sequence_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == '!': - tag = self.resolve(SequenceNode, None, start_event.implicit) - node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - index = 0 - while not self.check_event(SequenceEndEvent): - node.value.append(self.compose_node(node, index)) - index += 1 - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - - def compose_mapping_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == '!': - tag = self.resolve(MappingNode, None, start_event.implicit) - node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() - item_key = self.compose_node(node, None) - #if item_key in node.value: - # raise ComposerError("while composing a mapping", start_event.start_mark, - # "found duplicate key", key_event.start_mark) - item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value - node.value.append((item_key, item_value)) - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/constructor.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/constructor.py deleted file mode 100644 index 981543a..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/constructor.py +++ /dev/null @@ -1,686 +0,0 @@ - -__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', - 'ConstructorError'] - -from .error import * -from .nodes import * - -import collections, datetime, base64, binascii, re, sys, types - -class ConstructorError(MarkedYAMLError): - pass - -class BaseConstructor: - - yaml_constructors = {} - yaml_multi_constructors = {} - - def __init__(self): - self.constructed_objects = {} - self.recursive_objects = {} - self.state_generators = [] - self.deep_construct = False - - def check_data(self): - # If there are more documents available? - return self.check_node() - - def get_data(self): - # Construct and return the next document. - if self.check_node(): - return self.construct_document(self.get_node()) - - def get_single_data(self): - # Ensure that the stream contains a single document and construct it. - node = self.get_single_node() - if node is not None: - return self.construct_document(node) - return None - - def construct_document(self, node): - data = self.construct_object(node) - while self.state_generators: - state_generators = self.state_generators - self.state_generators = [] - for generator in state_generators: - for dummy in generator: - pass - self.constructed_objects = {} - self.recursive_objects = {} - self.deep_construct = False - return data - - def construct_object(self, node, deep=False): - if node in self.constructed_objects: - return self.constructed_objects[node] - if deep: - old_deep = self.deep_construct - self.deep_construct = True - if node in self.recursive_objects: - raise ConstructorError(None, None, - "found unconstructable recursive node", node.start_mark) - self.recursive_objects[node] = None - constructor = None - tag_suffix = None - if node.tag in self.yaml_constructors: - constructor = self.yaml_constructors[node.tag] - else: - for tag_prefix in self.yaml_multi_constructors: - if node.tag.startswith(tag_prefix): - tag_suffix = node.tag[len(tag_prefix):] - constructor = self.yaml_multi_constructors[tag_prefix] - break - else: - if None in self.yaml_multi_constructors: - tag_suffix = node.tag - constructor = self.yaml_multi_constructors[None] - elif None in self.yaml_constructors: - constructor = self.yaml_constructors[None] - elif isinstance(node, ScalarNode): - constructor = self.__class__.construct_scalar - elif isinstance(node, SequenceNode): - constructor = self.__class__.construct_sequence - elif isinstance(node, MappingNode): - constructor = self.__class__.construct_mapping - if tag_suffix is None: - data = constructor(self, node) - else: - data = constructor(self, tag_suffix, node) - if isinstance(data, types.GeneratorType): - generator = data - data = next(generator) - if self.deep_construct: - for dummy in generator: - pass - else: - self.state_generators.append(generator) - self.constructed_objects[node] = data - del self.recursive_objects[node] - if deep: - self.deep_construct = old_deep - return data - - def construct_scalar(self, node): - if not isinstance(node, ScalarNode): - raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) - return node.value - - def construct_sequence(self, node, deep=False): - if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - return [self.construct_object(child, deep=deep) - for child in node.value] - - def construct_mapping(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - mapping = {} - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - if not isinstance(key, collections.Hashable): - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unhashable key", key_node.start_mark) - value = self.construct_object(value_node, deep=deep) - mapping[key] = value - return mapping - - def construct_pairs(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - pairs = [] - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - value = self.construct_object(value_node, deep=deep) - pairs.append((key, value)) - return pairs - - @classmethod - def add_constructor(cls, tag, constructor): - if not 'yaml_constructors' in cls.__dict__: - cls.yaml_constructors = cls.yaml_constructors.copy() - cls.yaml_constructors[tag] = constructor - - @classmethod - def add_multi_constructor(cls, tag_prefix, multi_constructor): - if not 'yaml_multi_constructors' in cls.__dict__: - cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() - cls.yaml_multi_constructors[tag_prefix] = multi_constructor - -class SafeConstructor(BaseConstructor): - - def construct_scalar(self, node): - if isinstance(node, MappingNode): - for key_node, value_node in node.value: - if key_node.tag == 'tag:yaml.org,2002:value': - return self.construct_scalar(value_node) - return super().construct_scalar(node) - - def flatten_mapping(self, node): - merge = [] - index = 0 - while index < len(node.value): - key_node, value_node = node.value[index] - if key_node.tag == 'tag:yaml.org,2002:merge': - del node.value[index] - if isinstance(value_node, MappingNode): - self.flatten_mapping(value_node) - merge.extend(value_node.value) - elif isinstance(value_node, SequenceNode): - submerge = [] - for subnode in value_node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) - self.flatten_mapping(subnode) - submerge.append(subnode.value) - submerge.reverse() - for value in submerge: - merge.extend(value) - else: - raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) - elif key_node.tag == 'tag:yaml.org,2002:value': - key_node.tag = 'tag:yaml.org,2002:str' - index += 1 - else: - index += 1 - if merge: - node.value = merge + node.value - - def construct_mapping(self, node, deep=False): - if isinstance(node, MappingNode): - self.flatten_mapping(node) - return super().construct_mapping(node, deep=deep) - - def construct_yaml_null(self, node): - self.construct_scalar(node) - return None - - bool_values = { - 'yes': True, - 'no': False, - 'true': True, - 'false': False, - 'on': True, - 'off': False, - } - - def construct_yaml_bool(self, node): - value = self.construct_scalar(node) - return self.bool_values[value.lower()] - - def construct_yaml_int(self, node): - value = self.construct_scalar(node) - value = value.replace('_', '') - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '0': - return 0 - elif value.startswith('0b'): - return sign*int(value[2:], 2) - elif value.startswith('0x'): - return sign*int(value[2:], 16) - elif value[0] == '0': - return sign*int(value, 8) - elif ':' in value: - digits = [int(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*int(value) - - inf_value = 1e300 - while inf_value != inf_value*inf_value: - inf_value *= inf_value - nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). - - def construct_yaml_float(self, node): - value = self.construct_scalar(node) - value = value.replace('_', '').lower() - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '.inf': - return sign*self.inf_value - elif value == '.nan': - return self.nan_value - elif ':' in value: - digits = [float(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0.0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*float(value) - - def construct_yaml_binary(self, node): - try: - value = self.construct_scalar(node).encode('ascii') - except UnicodeEncodeError as exc: - raise ConstructorError(None, None, - "failed to convert base64 data into ascii: %s" % exc, - node.start_mark) - try: - if hasattr(base64, 'decodebytes'): - return base64.decodebytes(value) - else: - return base64.decodestring(value) - except binascii.Error as exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - timestamp_regexp = re.compile( - r'''^(?P[0-9][0-9][0-9][0-9]) - -(?P[0-9][0-9]?) - -(?P[0-9][0-9]?) - (?:(?:[Tt]|[ \t]+) - (?P[0-9][0-9]?) - :(?P[0-9][0-9]) - :(?P[0-9][0-9]) - (?:\.(?P[0-9]*))? - (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) - (?::(?P[0-9][0-9]))?))?)?$''', re.X) - - def construct_yaml_timestamp(self, node): - value = self.construct_scalar(node) - match = self.timestamp_regexp.match(node.value) - values = match.groupdict() - year = int(values['year']) - month = int(values['month']) - day = int(values['day']) - if not values['hour']: - return datetime.date(year, month, day) - hour = int(values['hour']) - minute = int(values['minute']) - second = int(values['second']) - fraction = 0 - if values['fraction']: - fraction = values['fraction'][:6] - while len(fraction) < 6: - fraction += '0' - fraction = int(fraction) - delta = None - if values['tz_sign']: - tz_hour = int(values['tz_hour']) - tz_minute = int(values['tz_minute'] or 0) - delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) - if values['tz_sign'] == '-': - delta = -delta - data = datetime.datetime(year, month, day, hour, minute, second, fraction) - if delta: - data -= delta - return data - - def construct_yaml_omap(self, node): - # Note: we do not check for duplicate keys, because it's too - # CPU-expensive. - omap = [] - yield omap - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - omap.append((key, value)) - - def construct_yaml_pairs(self, node): - # Note: the same code as `construct_yaml_omap`. - pairs = [] - yield pairs - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - pairs.append((key, value)) - - def construct_yaml_set(self, node): - data = set() - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_str(self, node): - return self.construct_scalar(node) - - def construct_yaml_seq(self, node): - data = [] - yield data - data.extend(self.construct_sequence(node)) - - def construct_yaml_map(self, node): - data = {} - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_object(self, node, cls): - data = cls.__new__(cls) - yield data - if hasattr(data, '__setstate__'): - state = self.construct_mapping(node, deep=True) - data.__setstate__(state) - else: - state = self.construct_mapping(node) - data.__dict__.update(state) - - def construct_undefined(self, node): - raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag, - node.start_mark) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:null', - SafeConstructor.construct_yaml_null) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:bool', - SafeConstructor.construct_yaml_bool) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:int', - SafeConstructor.construct_yaml_int) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:float', - SafeConstructor.construct_yaml_float) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:binary', - SafeConstructor.construct_yaml_binary) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:timestamp', - SafeConstructor.construct_yaml_timestamp) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:omap', - SafeConstructor.construct_yaml_omap) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:pairs', - SafeConstructor.construct_yaml_pairs) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:set', - SafeConstructor.construct_yaml_set) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:str', - SafeConstructor.construct_yaml_str) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:seq', - SafeConstructor.construct_yaml_seq) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:map', - SafeConstructor.construct_yaml_map) - -SafeConstructor.add_constructor(None, - SafeConstructor.construct_undefined) - -class Constructor(SafeConstructor): - - def construct_python_str(self, node): - return self.construct_scalar(node) - - def construct_python_unicode(self, node): - return self.construct_scalar(node) - - def construct_python_bytes(self, node): - try: - value = self.construct_scalar(node).encode('ascii') - except UnicodeEncodeError as exc: - raise ConstructorError(None, None, - "failed to convert base64 data into ascii: %s" % exc, - node.start_mark) - try: - if hasattr(base64, 'decodebytes'): - return base64.decodebytes(value) - else: - return base64.decodestring(value) - except binascii.Error as exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - def construct_python_long(self, node): - return self.construct_yaml_int(node) - - def construct_python_complex(self, node): - return complex(self.construct_scalar(node)) - - def construct_python_tuple(self, node): - return tuple(self.construct_sequence(node)) - - def find_python_module(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python module", mark, - "expected non-empty name appended to the tag", mark) - try: - __import__(name) - except ImportError as exc: - raise ConstructorError("while constructing a Python module", mark, - "cannot find module %r (%s)" % (name, exc), mark) - return sys.modules[name] - - def find_python_name(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python object", mark, - "expected non-empty name appended to the tag", mark) - if '.' in name: - module_name, object_name = name.rsplit('.', 1) - else: - module_name = 'builtins' - object_name = name - try: - __import__(module_name) - except ImportError as exc: - raise ConstructorError("while constructing a Python object", mark, - "cannot find module %r (%s)" % (module_name, exc), mark) - module = sys.modules[module_name] - if not hasattr(module, object_name): - raise ConstructorError("while constructing a Python object", mark, - "cannot find %r in the module %r" - % (object_name, module.__name__), mark) - return getattr(module, object_name) - - def construct_python_name(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python name", node.start_mark, - "expected the empty value, but found %r" % value, node.start_mark) - return self.find_python_name(suffix, node.start_mark) - - def construct_python_module(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python module", node.start_mark, - "expected the empty value, but found %r" % value, node.start_mark) - return self.find_python_module(suffix, node.start_mark) - - def make_python_instance(self, suffix, node, - args=None, kwds=None, newobj=False): - if not args: - args = [] - if not kwds: - kwds = {} - cls = self.find_python_name(suffix, node.start_mark) - if newobj and isinstance(cls, type): - return cls.__new__(cls, *args, **kwds) - else: - return cls(*args, **kwds) - - def set_python_instance_state(self, instance, state): - if hasattr(instance, '__setstate__'): - instance.__setstate__(state) - else: - slotstate = {} - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - if hasattr(instance, '__dict__'): - instance.__dict__.update(state) - elif state: - slotstate.update(state) - for key, value in slotstate.items(): - setattr(object, key, value) - - def construct_python_object(self, suffix, node): - # Format: - # !!python/object:module.name { ... state ... } - instance = self.make_python_instance(suffix, node, newobj=True) - yield instance - deep = hasattr(instance, '__setstate__') - state = self.construct_mapping(node, deep=deep) - self.set_python_instance_state(instance, state) - - def construct_python_object_apply(self, suffix, node, newobj=False): - # Format: - # !!python/object/apply # (or !!python/object/new) - # args: [ ... arguments ... ] - # kwds: { ... keywords ... } - # state: ... state ... - # listitems: [ ... listitems ... ] - # dictitems: { ... dictitems ... } - # or short format: - # !!python/object/apply [ ... arguments ... ] - # The difference between !!python/object/apply and !!python/object/new - # is how an object is created, check make_python_instance for details. - if isinstance(node, SequenceNode): - args = self.construct_sequence(node, deep=True) - kwds = {} - state = {} - listitems = [] - dictitems = {} - else: - value = self.construct_mapping(node, deep=True) - args = value.get('args', []) - kwds = value.get('kwds', {}) - state = value.get('state', {}) - listitems = value.get('listitems', []) - dictitems = value.get('dictitems', {}) - instance = self.make_python_instance(suffix, node, args, kwds, newobj) - if state: - self.set_python_instance_state(instance, state) - if listitems: - instance.extend(listitems) - if dictitems: - for key in dictitems: - instance[key] = dictitems[key] - return instance - - def construct_python_object_new(self, suffix, node): - return self.construct_python_object_apply(suffix, node, newobj=True) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/none', - Constructor.construct_yaml_null) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/bool', - Constructor.construct_yaml_bool) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/str', - Constructor.construct_python_str) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/unicode', - Constructor.construct_python_unicode) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/bytes', - Constructor.construct_python_bytes) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/int', - Constructor.construct_yaml_int) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/long', - Constructor.construct_python_long) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/float', - Constructor.construct_yaml_float) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/complex', - Constructor.construct_python_complex) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/list', - Constructor.construct_yaml_seq) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/tuple', - Constructor.construct_python_tuple) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/dict', - Constructor.construct_yaml_map) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/name:', - Constructor.construct_python_name) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/module:', - Constructor.construct_python_module) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object:', - Constructor.construct_python_object) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object/apply:', - Constructor.construct_python_object_apply) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object/new:', - Constructor.construct_python_object_new) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/cyaml.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/cyaml.py deleted file mode 100644 index d5cb87e..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/cyaml.py +++ /dev/null @@ -1,85 +0,0 @@ - -__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', - 'CBaseDumper', 'CSafeDumper', 'CDumper'] - -from _yaml import CParser, CEmitter - -from .constructor import * - -from .serializer import * -from .representer import * - -from .resolver import * - -class CBaseLoader(CParser, BaseConstructor, BaseResolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class CSafeLoader(CParser, SafeConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class CLoader(CParser, Constructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - Constructor.__init__(self) - Resolver.__init__(self) - -class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CSafeDumper(CEmitter, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CDumper(CEmitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/dumper.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/dumper.py deleted file mode 100644 index 0b69128..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/dumper.py +++ /dev/null @@ -1,62 +0,0 @@ - -__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] - -from .emitter import * -from .serializer import * -from .representer import * -from .resolver import * - -class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class Dumper(Emitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/emitter.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/emitter.py deleted file mode 100644 index 34cb145..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/emitter.py +++ /dev/null @@ -1,1137 +0,0 @@ - -# Emitter expects events obeying the following grammar: -# stream ::= STREAM-START document* STREAM-END -# document ::= DOCUMENT-START node DOCUMENT-END -# node ::= SCALAR | sequence | mapping -# sequence ::= SEQUENCE-START node* SEQUENCE-END -# mapping ::= MAPPING-START (node node)* MAPPING-END - -__all__ = ['Emitter', 'EmitterError'] - -from .error import YAMLError -from .events import * - -class EmitterError(YAMLError): - pass - -class ScalarAnalysis: - def __init__(self, scalar, empty, multiline, - allow_flow_plain, allow_block_plain, - allow_single_quoted, allow_double_quoted, - allow_block): - self.scalar = scalar - self.empty = empty - self.multiline = multiline - self.allow_flow_plain = allow_flow_plain - self.allow_block_plain = allow_block_plain - self.allow_single_quoted = allow_single_quoted - self.allow_double_quoted = allow_double_quoted - self.allow_block = allow_block - -class Emitter: - - DEFAULT_TAG_PREFIXES = { - '!' : '!', - 'tag:yaml.org,2002:' : '!!', - } - - def __init__(self, stream, canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - - # The stream should have the methods `write` and possibly `flush`. - self.stream = stream - - # Encoding can be overriden by STREAM-START. - self.encoding = None - - # Emitter is a state machine with a stack of states to handle nested - # structures. - self.states = [] - self.state = self.expect_stream_start - - # Current event and the event queue. - self.events = [] - self.event = None - - # The current indentation level and the stack of previous indents. - self.indents = [] - self.indent = None - - # Flow level. - self.flow_level = 0 - - # Contexts. - self.root_context = False - self.sequence_context = False - self.mapping_context = False - self.simple_key_context = False - - # Characteristics of the last emitted character: - # - current position. - # - is it a whitespace? - # - is it an indention character - # (indentation space, '-', '?', or ':')? - self.line = 0 - self.column = 0 - self.whitespace = True - self.indention = True - - # Whether the document requires an explicit document indicator - self.open_ended = False - - # Formatting details. - self.canonical = canonical - self.allow_unicode = allow_unicode - self.best_indent = 2 - if indent and 1 < indent < 10: - self.best_indent = indent - self.best_width = 80 - if width and width > self.best_indent*2: - self.best_width = width - self.best_line_break = '\n' - if line_break in ['\r', '\n', '\r\n']: - self.best_line_break = line_break - - # Tag prefixes. - self.tag_prefixes = None - - # Prepared anchor and tag. - self.prepared_anchor = None - self.prepared_tag = None - - # Scalar analysis and style. - self.analysis = None - self.style = None - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def emit(self, event): - self.events.append(event) - while not self.need_more_events(): - self.event = self.events.pop(0) - self.state() - self.event = None - - # In some cases, we wait for a few next events before emitting. - - def need_more_events(self): - if not self.events: - return True - event = self.events[0] - if isinstance(event, DocumentStartEvent): - return self.need_events(1) - elif isinstance(event, SequenceStartEvent): - return self.need_events(2) - elif isinstance(event, MappingStartEvent): - return self.need_events(3) - else: - return False - - def need_events(self, count): - level = 0 - for event in self.events[1:]: - if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): - level += 1 - elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): - level -= 1 - elif isinstance(event, StreamEndEvent): - level = -1 - if level < 0: - return False - return (len(self.events) < count+1) - - def increase_indent(self, flow=False, indentless=False): - self.indents.append(self.indent) - if self.indent is None: - if flow: - self.indent = self.best_indent - else: - self.indent = 0 - elif not indentless: - self.indent += self.best_indent - - # States. - - # Stream handlers. - - def expect_stream_start(self): - if isinstance(self.event, StreamStartEvent): - if self.event.encoding and not hasattr(self.stream, 'encoding'): - self.encoding = self.event.encoding - self.write_stream_start() - self.state = self.expect_first_document_start - else: - raise EmitterError("expected StreamStartEvent, but got %s" - % self.event) - - def expect_nothing(self): - raise EmitterError("expected nothing, but got %s" % self.event) - - # Document handlers. - - def expect_first_document_start(self): - return self.expect_document_start(first=True) - - def expect_document_start(self, first=False): - if isinstance(self.event, DocumentStartEvent): - if (self.event.version or self.event.tags) and self.open_ended: - self.write_indicator('...', True) - self.write_indent() - if self.event.version: - version_text = self.prepare_version(self.event.version) - self.write_version_directive(version_text) - self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() - if self.event.tags: - handles = sorted(self.event.tags.keys()) - for handle in handles: - prefix = self.event.tags[handle] - self.tag_prefixes[prefix] = handle - handle_text = self.prepare_tag_handle(handle) - prefix_text = self.prepare_tag_prefix(prefix) - self.write_tag_directive(handle_text, prefix_text) - implicit = (first and not self.event.explicit and not self.canonical - and not self.event.version and not self.event.tags - and not self.check_empty_document()) - if not implicit: - self.write_indent() - self.write_indicator('---', True) - if self.canonical: - self.write_indent() - self.state = self.expect_document_root - elif isinstance(self.event, StreamEndEvent): - if self.open_ended: - self.write_indicator('...', True) - self.write_indent() - self.write_stream_end() - self.state = self.expect_nothing - else: - raise EmitterError("expected DocumentStartEvent, but got %s" - % self.event) - - def expect_document_end(self): - if isinstance(self.event, DocumentEndEvent): - self.write_indent() - if self.event.explicit: - self.write_indicator('...', True) - self.write_indent() - self.flush_stream() - self.state = self.expect_document_start - else: - raise EmitterError("expected DocumentEndEvent, but got %s" - % self.event) - - def expect_document_root(self): - self.states.append(self.expect_document_end) - self.expect_node(root=True) - - # Node handlers. - - def expect_node(self, root=False, sequence=False, mapping=False, - simple_key=False): - self.root_context = root - self.sequence_context = sequence - self.mapping_context = mapping - self.simple_key_context = simple_key - if isinstance(self.event, AliasEvent): - self.expect_alias() - elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): - self.process_anchor('&') - self.process_tag() - if isinstance(self.event, ScalarEvent): - self.expect_scalar() - elif isinstance(self.event, SequenceStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_sequence(): - self.expect_flow_sequence() - else: - self.expect_block_sequence() - elif isinstance(self.event, MappingStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_mapping(): - self.expect_flow_mapping() - else: - self.expect_block_mapping() - else: - raise EmitterError("expected NodeEvent, but got %s" % self.event) - - def expect_alias(self): - if self.event.anchor is None: - raise EmitterError("anchor is not specified for alias") - self.process_anchor('*') - self.state = self.states.pop() - - def expect_scalar(self): - self.increase_indent(flow=True) - self.process_scalar() - self.indent = self.indents.pop() - self.state = self.states.pop() - - # Flow sequence handlers. - - def expect_flow_sequence(self): - self.write_indicator('[', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_sequence_item - - def expect_first_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(']', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - def expect_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(',', False) - self.write_indent() - self.write_indicator(']', False) - self.state = self.states.pop() - else: - self.write_indicator(',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - # Flow mapping handlers. - - def expect_flow_mapping(self): - self.write_indicator('{', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_mapping_key - - def expect_first_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator('}', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(',', False) - self.write_indent() - self.write_indicator('}', False) - self.state = self.states.pop() - else: - self.write_indicator(',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_simple_value(self): - self.write_indicator(':', False) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - def expect_flow_mapping_value(self): - if self.canonical or self.column > self.best_width: - self.write_indent() - self.write_indicator(':', True) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - # Block sequence handlers. - - def expect_block_sequence(self): - indentless = (self.mapping_context and not self.indention) - self.increase_indent(flow=False, indentless=indentless) - self.state = self.expect_first_block_sequence_item - - def expect_first_block_sequence_item(self): - return self.expect_block_sequence_item(first=True) - - def expect_block_sequence_item(self, first=False): - if not first and isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - self.write_indicator('-', True, indention=True) - self.states.append(self.expect_block_sequence_item) - self.expect_node(sequence=True) - - # Block mapping handlers. - - def expect_block_mapping(self): - self.increase_indent(flow=False) - self.state = self.expect_first_block_mapping_key - - def expect_first_block_mapping_key(self): - return self.expect_block_mapping_key(first=True) - - def expect_block_mapping_key(self, first=False): - if not first and isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - if self.check_simple_key(): - self.states.append(self.expect_block_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True, indention=True) - self.states.append(self.expect_block_mapping_value) - self.expect_node(mapping=True) - - def expect_block_mapping_simple_value(self): - self.write_indicator(':', False) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - def expect_block_mapping_value(self): - self.write_indent() - self.write_indicator(':', True, indention=True) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - # Checkers. - - def check_empty_sequence(self): - return (isinstance(self.event, SequenceStartEvent) and self.events - and isinstance(self.events[0], SequenceEndEvent)) - - def check_empty_mapping(self): - return (isinstance(self.event, MappingStartEvent) and self.events - and isinstance(self.events[0], MappingEndEvent)) - - def check_empty_document(self): - if not isinstance(self.event, DocumentStartEvent) or not self.events: - return False - event = self.events[0] - return (isinstance(event, ScalarEvent) and event.anchor is None - and event.tag is None and event.implicit and event.value == '') - - def check_simple_key(self): - length = 0 - if isinstance(self.event, NodeEvent) and self.event.anchor is not None: - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - length += len(self.prepared_anchor) - if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ - and self.event.tag is not None: - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(self.event.tag) - length += len(self.prepared_tag) - if isinstance(self.event, ScalarEvent): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - length += len(self.analysis.scalar) - return (length < 128 and (isinstance(self.event, AliasEvent) - or (isinstance(self.event, ScalarEvent) - and not self.analysis.empty and not self.analysis.multiline) - or self.check_empty_sequence() or self.check_empty_mapping())) - - # Anchor, Tag, and Scalar processors. - - def process_anchor(self, indicator): - if self.event.anchor is None: - self.prepared_anchor = None - return - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - if self.prepared_anchor: - self.write_indicator(indicator+self.prepared_anchor, True) - self.prepared_anchor = None - - def process_tag(self): - tag = self.event.tag - if isinstance(self.event, ScalarEvent): - if self.style is None: - self.style = self.choose_scalar_style() - if ((not self.canonical or tag is None) and - ((self.style == '' and self.event.implicit[0]) - or (self.style != '' and self.event.implicit[1]))): - self.prepared_tag = None - return - if self.event.implicit[0] and tag is None: - tag = '!' - self.prepared_tag = None - else: - if (not self.canonical or tag is None) and self.event.implicit: - self.prepared_tag = None - return - if tag is None: - raise EmitterError("tag is not specified") - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(tag) - if self.prepared_tag: - self.write_indicator(self.prepared_tag, True) - self.prepared_tag = None - - def choose_scalar_style(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.event.style == '"' or self.canonical: - return '"' - if not self.event.style and self.event.implicit[0]: - if (not (self.simple_key_context and - (self.analysis.empty or self.analysis.multiline)) - and (self.flow_level and self.analysis.allow_flow_plain - or (not self.flow_level and self.analysis.allow_block_plain))): - return '' - if self.event.style and self.event.style in '|>': - if (not self.flow_level and not self.simple_key_context - and self.analysis.allow_block): - return self.event.style - if not self.event.style or self.event.style == '\'': - if (self.analysis.allow_single_quoted and - not (self.simple_key_context and self.analysis.multiline)): - return '\'' - return '"' - - def process_scalar(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.style is None: - self.style = self.choose_scalar_style() - split = (not self.simple_key_context) - #if self.analysis.multiline and split \ - # and (not self.style or self.style in '\'\"'): - # self.write_indent() - if self.style == '"': - self.write_double_quoted(self.analysis.scalar, split) - elif self.style == '\'': - self.write_single_quoted(self.analysis.scalar, split) - elif self.style == '>': - self.write_folded(self.analysis.scalar) - elif self.style == '|': - self.write_literal(self.analysis.scalar) - else: - self.write_plain(self.analysis.scalar, split) - self.analysis = None - self.style = None - - # Analyzers. - - def prepare_version(self, version): - major, minor = version - if major != 1: - raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) - return '%d.%d' % (major, minor) - - def prepare_tag_handle(self, handle): - if not handle: - raise EmitterError("tag handle must not be empty") - if handle[0] != '!' or handle[-1] != '!': - raise EmitterError("tag handle must start and end with '!': %r" % handle) - for ch in handle[1:-1]: - if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_'): - raise EmitterError("invalid character %r in the tag handle: %r" - % (ch, handle)) - return handle - - def prepare_tag_prefix(self, prefix): - if not prefix: - raise EmitterError("tag prefix must not be empty") - chunks = [] - start = end = 0 - if prefix[0] == '!': - end = 1 - while end < len(prefix): - ch = prefix[end] - if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?!:@&=+$,_.~*\'()[]': - end += 1 - else: - if start < end: - chunks.append(prefix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append('%%%02X' % ord(ch)) - if start < end: - chunks.append(prefix[start:end]) - return ''.join(chunks) - - def prepare_tag(self, tag): - if not tag: - raise EmitterError("tag must not be empty") - if tag == '!': - return tag - handle = None - suffix = tag - prefixes = sorted(self.tag_prefixes.keys()) - for prefix in prefixes: - if tag.startswith(prefix) \ - and (prefix == '!' or len(prefix) < len(tag)): - handle = self.tag_prefixes[prefix] - suffix = tag[len(prefix):] - chunks = [] - start = end = 0 - while end < len(suffix): - ch = suffix[end] - if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?:@&=+$,_.~*\'()[]' \ - or (ch == '!' and handle != '!'): - end += 1 - else: - if start < end: - chunks.append(suffix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append('%%%02X' % ord(ch)) - if start < end: - chunks.append(suffix[start:end]) - suffix_text = ''.join(chunks) - if handle: - return '%s%s' % (handle, suffix_text) - else: - return '!<%s>' % suffix_text - - def prepare_anchor(self, anchor): - if not anchor: - raise EmitterError("anchor must not be empty") - for ch in anchor: - if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_'): - raise EmitterError("invalid character %r in the anchor: %r" - % (ch, anchor)) - return anchor - - def analyze_scalar(self, scalar): - - # Empty scalar is a special case. - if not scalar: - return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, - allow_flow_plain=False, allow_block_plain=True, - allow_single_quoted=True, allow_double_quoted=True, - allow_block=False) - - # Indicators and special characters. - block_indicators = False - flow_indicators = False - line_breaks = False - special_characters = False - - # Important whitespace combinations. - leading_space = False - leading_break = False - trailing_space = False - trailing_break = False - break_space = False - space_break = False - - # Check document indicators. - if scalar.startswith('---') or scalar.startswith('...'): - block_indicators = True - flow_indicators = True - - # First character or preceded by a whitespace. - preceeded_by_whitespace = True - - # Last character or followed by a whitespace. - followed_by_whitespace = (len(scalar) == 1 or - scalar[1] in '\0 \t\r\n\x85\u2028\u2029') - - # The previous character is a space. - previous_space = False - - # The previous character is a break. - previous_break = False - - index = 0 - while index < len(scalar): - ch = scalar[index] - - # Check for indicators. - if index == 0: - # Leading indicators are special characters. - if ch in '#,[]{}&*!|>\'\"%@`': - flow_indicators = True - block_indicators = True - if ch in '?:': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == '-' and followed_by_whitespace: - flow_indicators = True - block_indicators = True - else: - # Some indicators cannot appear within a scalar as well. - if ch in ',?[]{}': - flow_indicators = True - if ch == ':': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == '#' and preceeded_by_whitespace: - flow_indicators = True - block_indicators = True - - # Check for line breaks, special, and unicode characters. - if ch in '\n\x85\u2028\u2029': - line_breaks = True - if not (ch == '\n' or '\x20' <= ch <= '\x7E'): - if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF' - or '\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': - unicode_characters = True - if not self.allow_unicode: - special_characters = True - else: - special_characters = True - - # Detect important whitespace combinations. - if ch == ' ': - if index == 0: - leading_space = True - if index == len(scalar)-1: - trailing_space = True - if previous_break: - break_space = True - previous_space = True - previous_break = False - elif ch in '\n\x85\u2028\u2029': - if index == 0: - leading_break = True - if index == len(scalar)-1: - trailing_break = True - if previous_space: - space_break = True - previous_space = False - previous_break = True - else: - previous_space = False - previous_break = False - - # Prepare for the next character. - index += 1 - preceeded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029') - followed_by_whitespace = (index+1 >= len(scalar) or - scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029') - - # Let's decide what styles are allowed. - allow_flow_plain = True - allow_block_plain = True - allow_single_quoted = True - allow_double_quoted = True - allow_block = True - - # Leading and trailing whitespaces are bad for plain scalars. - if (leading_space or leading_break - or trailing_space or trailing_break): - allow_flow_plain = allow_block_plain = False - - # We do not permit trailing spaces for block scalars. - if trailing_space: - allow_block = False - - # Spaces at the beginning of a new line are only acceptable for block - # scalars. - if break_space: - allow_flow_plain = allow_block_plain = allow_single_quoted = False - - # Spaces followed by breaks, as well as special character are only - # allowed for double quoted scalars. - if space_break or special_characters: - allow_flow_plain = allow_block_plain = \ - allow_single_quoted = allow_block = False - - # Although the plain scalar writer supports breaks, we never emit - # multiline plain scalars. - if line_breaks: - allow_flow_plain = allow_block_plain = False - - # Flow indicators are forbidden for flow plain scalars. - if flow_indicators: - allow_flow_plain = False - - # Block indicators are forbidden for block plain scalars. - if block_indicators: - allow_block_plain = False - - return ScalarAnalysis(scalar=scalar, - empty=False, multiline=line_breaks, - allow_flow_plain=allow_flow_plain, - allow_block_plain=allow_block_plain, - allow_single_quoted=allow_single_quoted, - allow_double_quoted=allow_double_quoted, - allow_block=allow_block) - - # Writers. - - def flush_stream(self): - if hasattr(self.stream, 'flush'): - self.stream.flush() - - def write_stream_start(self): - # Write BOM if needed. - if self.encoding and self.encoding.startswith('utf-16'): - self.stream.write('\uFEFF'.encode(self.encoding)) - - def write_stream_end(self): - self.flush_stream() - - def write_indicator(self, indicator, need_whitespace, - whitespace=False, indention=False): - if self.whitespace or not need_whitespace: - data = indicator - else: - data = ' '+indicator - self.whitespace = whitespace - self.indention = self.indention and indention - self.column += len(data) - self.open_ended = False - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_indent(self): - indent = self.indent or 0 - if not self.indention or self.column > indent \ - or (self.column == indent and not self.whitespace): - self.write_line_break() - if self.column < indent: - self.whitespace = True - data = ' '*(indent-self.column) - self.column = indent - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_line_break(self, data=None): - if data is None: - data = self.best_line_break - self.whitespace = True - self.indention = True - self.line += 1 - self.column = 0 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_version_directive(self, version_text): - data = '%%YAML %s' % version_text - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - def write_tag_directive(self, handle_text, prefix_text): - data = '%%TAG %s %s' % (handle_text, prefix_text) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - # Scalar streams. - - def write_single_quoted(self, text, split=True): - self.write_indicator('\'', True) - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch is None or ch != ' ': - if start+1 == end and self.column > self.best_width and split \ - and start != 0 and end != len(text): - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - if text[start] == '\n': - self.write_line_break() - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'': - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch == '\'': - data = '\'\'' - self.column += 2 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end + 1 - if ch is not None: - spaces = (ch == ' ') - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - self.write_indicator('\'', False) - - ESCAPE_REPLACEMENTS = { - '\0': '0', - '\x07': 'a', - '\x08': 'b', - '\x09': 't', - '\x0A': 'n', - '\x0B': 'v', - '\x0C': 'f', - '\x0D': 'r', - '\x1B': 'e', - '\"': '\"', - '\\': '\\', - '\x85': 'N', - '\xA0': '_', - '\u2028': 'L', - '\u2029': 'P', - } - - def write_double_quoted(self, text, split=True): - self.write_indicator('"', True) - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \ - or not ('\x20' <= ch <= '\x7E' - or (self.allow_unicode - and ('\xA0' <= ch <= '\uD7FF' - or '\uE000' <= ch <= '\uFFFD'))): - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - if ch in self.ESCAPE_REPLACEMENTS: - data = '\\'+self.ESCAPE_REPLACEMENTS[ch] - elif ch <= '\xFF': - data = '\\x%02X' % ord(ch) - elif ch <= '\uFFFF': - data = '\\u%04X' % ord(ch) - else: - data = '\\U%08X' % ord(ch) - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end+1 - if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \ - and self.column+(end-start) > self.best_width and split: - data = text[start:end]+'\\' - if start < end: - start = end - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_indent() - self.whitespace = False - self.indention = False - if text[start] == ' ': - data = '\\' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - end += 1 - self.write_indicator('"', False) - - def determine_block_hints(self, text): - hints = '' - if text: - if text[0] in ' \n\x85\u2028\u2029': - hints += str(self.best_indent) - if text[-1] not in '\n\x85\u2028\u2029': - hints += '-' - elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029': - hints += '+' - return hints - - def write_folded(self, text): - hints = self.determine_block_hints(text) - self.write_indicator('>'+hints, True) - if hints[-1:] == '+': - self.open_ended = True - self.write_line_break() - leading_space = True - spaces = False - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - if not leading_space and ch is not None and ch != ' ' \ - and text[start] == '\n': - self.write_line_break() - leading_space = (ch == ' ') - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - elif spaces: - if ch != ' ': - if start+1 == end and self.column > self.best_width: - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in '\n\x85\u2028\u2029') - spaces = (ch == ' ') - end += 1 - - def write_literal(self, text): - hints = self.determine_block_hints(text) - self.write_indicator('|'+hints, True) - if hints[-1:] == '+': - self.open_ended = True - self.write_line_break() - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - else: - if ch is None or ch in '\n\x85\u2028\u2029': - data = text[start:end] - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - - def write_plain(self, text, split=True): - if self.root_context: - self.open_ended = True - if not text: - return - if not self.whitespace: - data = ' ' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.whitespace = False - self.indention = False - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch != ' ': - if start+1 == end and self.column > self.best_width and split: - self.write_indent() - self.whitespace = False - self.indention = False - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch not in '\n\x85\u2028\u2029': - if text[start] == '\n': - self.write_line_break() - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - self.whitespace = False - self.indention = False - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - spaces = (ch == ' ') - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/error.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/error.py deleted file mode 100644 index b796b4d..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/error.py +++ /dev/null @@ -1,75 +0,0 @@ - -__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] - -class Mark: - - def __init__(self, name, index, line, column, buffer, pointer): - self.name = name - self.index = index - self.line = line - self.column = column - self.buffer = buffer - self.pointer = pointer - - def get_snippet(self, indent=4, max_length=75): - if self.buffer is None: - return None - head = '' - start = self.pointer - while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': - start -= 1 - if self.pointer-start > max_length/2-1: - head = ' ... ' - start += 5 - break - tail = '' - end = self.pointer - while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': - end += 1 - if end-self.pointer > max_length/2-1: - tail = ' ... ' - end -= 5 - break - snippet = self.buffer[start:end] - return ' '*indent + head + snippet + tail + '\n' \ - + ' '*(indent+self.pointer-start+len(head)) + '^' - - def __str__(self): - snippet = self.get_snippet() - where = " in \"%s\", line %d, column %d" \ - % (self.name, self.line+1, self.column+1) - if snippet is not None: - where += ":\n"+snippet - return where - -class YAMLError(Exception): - pass - -class MarkedYAMLError(YAMLError): - - def __init__(self, context=None, context_mark=None, - problem=None, problem_mark=None, note=None): - self.context = context - self.context_mark = context_mark - self.problem = problem - self.problem_mark = problem_mark - self.note = note - - def __str__(self): - lines = [] - if self.context is not None: - lines.append(self.context) - if self.context_mark is not None \ - and (self.problem is None or self.problem_mark is None - or self.context_mark.name != self.problem_mark.name - or self.context_mark.line != self.problem_mark.line - or self.context_mark.column != self.problem_mark.column): - lines.append(str(self.context_mark)) - if self.problem is not None: - lines.append(self.problem) - if self.problem_mark is not None: - lines.append(str(self.problem_mark)) - if self.note is not None: - lines.append(self.note) - return '\n'.join(lines) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/events.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/events.py deleted file mode 100644 index f79ad38..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/events.py +++ /dev/null @@ -1,86 +0,0 @@ - -# Abstract classes. - -class Event(object): - def __init__(self, start_mark=None, end_mark=None): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] - if hasattr(self, key)] - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -class NodeEvent(Event): - def __init__(self, anchor, start_mark=None, end_mark=None): - self.anchor = anchor - self.start_mark = start_mark - self.end_mark = end_mark - -class CollectionStartEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, - flow_style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class CollectionEndEvent(Event): - pass - -# Implementations. - -class StreamStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndEvent(Event): - pass - -class DocumentStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None, version=None, tags=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - self.version = version - self.tags = tags - -class DocumentEndEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - -class AliasEvent(NodeEvent): - pass - -class ScalarEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, value, - start_mark=None, end_mark=None, style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class SequenceStartEvent(CollectionStartEvent): - pass - -class SequenceEndEvent(CollectionEndEvent): - pass - -class MappingStartEvent(CollectionStartEvent): - pass - -class MappingEndEvent(CollectionEndEvent): - pass - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/loader.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/loader.py deleted file mode 100644 index 08c8f01..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/loader.py +++ /dev/null @@ -1,40 +0,0 @@ - -__all__ = ['BaseLoader', 'SafeLoader', 'Loader'] - -from .reader import * -from .scanner import * -from .parser import * -from .composer import * -from .constructor import * -from .resolver import * - -class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - Constructor.__init__(self) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/nodes.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/nodes.py deleted file mode 100644 index c4f070c..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/nodes.py +++ /dev/null @@ -1,49 +0,0 @@ - -class Node(object): - def __init__(self, tag, value, start_mark, end_mark): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - value = self.value - #if isinstance(value, list): - # if len(value) == 0: - # value = '' - # elif len(value) == 1: - # value = '<1 item>' - # else: - # value = '<%d items>' % len(value) - #else: - # if len(value) > 75: - # value = repr(value[:70]+u' ... ') - # else: - # value = repr(value) - value = repr(value) - return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) - -class ScalarNode(Node): - id = 'scalar' - def __init__(self, tag, value, - start_mark=None, end_mark=None, style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class CollectionNode(Node): - def __init__(self, tag, value, - start_mark=None, end_mark=None, flow_style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class SequenceNode(CollectionNode): - id = 'sequence' - -class MappingNode(CollectionNode): - id = 'mapping' - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/parser.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/parser.py deleted file mode 100644 index 13a5995..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/parser.py +++ /dev/null @@ -1,589 +0,0 @@ - -# The following YAML grammar is LL(1) and is parsed by a recursive descent -# parser. -# -# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -# implicit_document ::= block_node DOCUMENT-END* -# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -# block_node_or_indentless_sequence ::= -# ALIAS -# | properties (block_content | indentless_block_sequence)? -# | block_content -# | indentless_block_sequence -# block_node ::= ALIAS -# | properties block_content? -# | block_content -# flow_node ::= ALIAS -# | properties flow_content? -# | flow_content -# properties ::= TAG ANCHOR? | ANCHOR TAG? -# block_content ::= block_collection | flow_collection | SCALAR -# flow_content ::= flow_collection | SCALAR -# block_collection ::= block_sequence | block_mapping -# flow_collection ::= flow_sequence | flow_mapping -# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -# block_mapping ::= BLOCK-MAPPING_START -# ((KEY block_node_or_indentless_sequence?)? -# (VALUE block_node_or_indentless_sequence?)?)* -# BLOCK-END -# flow_sequence ::= FLOW-SEQUENCE-START -# (flow_sequence_entry FLOW-ENTRY)* -# flow_sequence_entry? -# FLOW-SEQUENCE-END -# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# flow_mapping ::= FLOW-MAPPING-START -# (flow_mapping_entry FLOW-ENTRY)* -# flow_mapping_entry? -# FLOW-MAPPING-END -# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# -# FIRST sets: -# -# stream: { STREAM-START } -# explicit_document: { DIRECTIVE DOCUMENT-START } -# implicit_document: FIRST(block_node) -# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_sequence: { BLOCK-SEQUENCE-START } -# block_mapping: { BLOCK-MAPPING-START } -# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } -# indentless_sequence: { ENTRY } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_sequence: { FLOW-SEQUENCE-START } -# flow_mapping: { FLOW-MAPPING-START } -# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } -# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } - -__all__ = ['Parser', 'ParserError'] - -from .error import MarkedYAMLError -from .tokens import * -from .events import * -from .scanner import * - -class ParserError(MarkedYAMLError): - pass - -class Parser: - # Since writing a recursive-descendant parser is a straightforward task, we - # do not give many comments here. - - DEFAULT_TAGS = { - '!': '!', - '!!': 'tag:yaml.org,2002:', - } - - def __init__(self): - self.current_event = None - self.yaml_version = None - self.tag_handles = {} - self.states = [] - self.marks = [] - self.state = self.parse_stream_start - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def check_event(self, *choices): - # Check the type of the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - if self.current_event is not None: - if not choices: - return True - for choice in choices: - if isinstance(self.current_event, choice): - return True - return False - - def peek_event(self): - # Get the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - return self.current_event - - def get_event(self): - # Get the next event and proceed further. - if self.current_event is None: - if self.state: - self.current_event = self.state() - value = self.current_event - self.current_event = None - return value - - # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END - # implicit_document ::= block_node DOCUMENT-END* - # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* - - def parse_stream_start(self): - - # Parse the stream start. - token = self.get_token() - event = StreamStartEvent(token.start_mark, token.end_mark, - encoding=token.encoding) - - # Prepare the next state. - self.state = self.parse_implicit_document_start - - return event - - def parse_implicit_document_start(self): - - # Parse an implicit document. - if not self.check_token(DirectiveToken, DocumentStartToken, - StreamEndToken): - self.tag_handles = self.DEFAULT_TAGS - token = self.peek_token() - start_mark = end_mark = token.start_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=False) - - # Prepare the next state. - self.states.append(self.parse_document_end) - self.state = self.parse_block_node - - return event - - else: - return self.parse_document_start() - - def parse_document_start(self): - - # Parse any extra document end indicators. - while self.check_token(DocumentEndToken): - self.get_token() - - # Parse an explicit document. - if not self.check_token(StreamEndToken): - token = self.peek_token() - start_mark = token.start_mark - version, tags = self.process_directives() - if not self.check_token(DocumentStartToken): - raise ParserError(None, None, - "expected '', but found %r" - % self.peek_token().id, - self.peek_token().start_mark) - token = self.get_token() - end_mark = token.end_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=True, version=version, tags=tags) - self.states.append(self.parse_document_end) - self.state = self.parse_document_content - else: - # Parse the end of the stream. - token = self.get_token() - event = StreamEndEvent(token.start_mark, token.end_mark) - assert not self.states - assert not self.marks - self.state = None - return event - - def parse_document_end(self): - - # Parse the document end. - token = self.peek_token() - start_mark = end_mark = token.start_mark - explicit = False - if self.check_token(DocumentEndToken): - token = self.get_token() - end_mark = token.end_mark - explicit = True - event = DocumentEndEvent(start_mark, end_mark, - explicit=explicit) - - # Prepare the next state. - self.state = self.parse_document_start - - return event - - def parse_document_content(self): - if self.check_token(DirectiveToken, - DocumentStartToken, DocumentEndToken, StreamEndToken): - event = self.process_empty_scalar(self.peek_token().start_mark) - self.state = self.states.pop() - return event - else: - return self.parse_block_node() - - def process_directives(self): - self.yaml_version = None - self.tag_handles = {} - while self.check_token(DirectiveToken): - token = self.get_token() - if token.name == 'YAML': - if self.yaml_version is not None: - raise ParserError(None, None, - "found duplicate YAML directive", token.start_mark) - major, minor = token.value - if major != 1: - raise ParserError(None, None, - "found incompatible YAML document (version 1.* is required)", - token.start_mark) - self.yaml_version = token.value - elif token.name == 'TAG': - handle, prefix = token.value - if handle in self.tag_handles: - raise ParserError(None, None, - "duplicate tag handle %r" % handle, - token.start_mark) - self.tag_handles[handle] = prefix - if self.tag_handles: - value = self.yaml_version, self.tag_handles.copy() - else: - value = self.yaml_version, None - for key in self.DEFAULT_TAGS: - if key not in self.tag_handles: - self.tag_handles[key] = self.DEFAULT_TAGS[key] - return value - - # block_node_or_indentless_sequence ::= ALIAS - # | properties (block_content | indentless_block_sequence)? - # | block_content - # | indentless_block_sequence - # block_node ::= ALIAS - # | properties block_content? - # | block_content - # flow_node ::= ALIAS - # | properties flow_content? - # | flow_content - # properties ::= TAG ANCHOR? | ANCHOR TAG? - # block_content ::= block_collection | flow_collection | SCALAR - # flow_content ::= flow_collection | SCALAR - # block_collection ::= block_sequence | block_mapping - # flow_collection ::= flow_sequence | flow_mapping - - def parse_block_node(self): - return self.parse_node(block=True) - - def parse_flow_node(self): - return self.parse_node() - - def parse_block_node_or_indentless_sequence(self): - return self.parse_node(block=True, indentless_sequence=True) - - def parse_node(self, block=False, indentless_sequence=False): - if self.check_token(AliasToken): - token = self.get_token() - event = AliasEvent(token.value, token.start_mark, token.end_mark) - self.state = self.states.pop() - else: - anchor = None - tag = None - start_mark = end_mark = tag_mark = None - if self.check_token(AnchorToken): - token = self.get_token() - start_mark = token.start_mark - end_mark = token.end_mark - anchor = token.value - if self.check_token(TagToken): - token = self.get_token() - tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - elif self.check_token(TagToken): - token = self.get_token() - start_mark = tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - if self.check_token(AnchorToken): - token = self.get_token() - end_mark = token.end_mark - anchor = token.value - if tag is not None: - handle, suffix = tag - if handle is not None: - if handle not in self.tag_handles: - raise ParserError("while parsing a node", start_mark, - "found undefined tag handle %r" % handle, - tag_mark) - tag = self.tag_handles[handle]+suffix - else: - tag = suffix - #if tag == '!': - # raise ParserError("while parsing a node", start_mark, - # "found non-specific tag '!'", tag_mark, - # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") - if start_mark is None: - start_mark = end_mark = self.peek_token().start_mark - event = None - implicit = (tag is None or tag == '!') - if indentless_sequence and self.check_token(BlockEntryToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark) - self.state = self.parse_indentless_sequence_entry - else: - if self.check_token(ScalarToken): - token = self.get_token() - end_mark = token.end_mark - if (token.plain and tag is None) or tag == '!': - implicit = (True, False) - elif tag is None: - implicit = (False, True) - else: - implicit = (False, False) - event = ScalarEvent(anchor, tag, implicit, token.value, - start_mark, end_mark, style=token.style) - self.state = self.states.pop() - elif self.check_token(FlowSequenceStartToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_sequence_first_entry - elif self.check_token(FlowMappingStartToken): - end_mark = self.peek_token().end_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_mapping_first_key - elif block and self.check_token(BlockSequenceStartToken): - end_mark = self.peek_token().start_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_sequence_first_entry - elif block and self.check_token(BlockMappingStartToken): - end_mark = self.peek_token().start_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_mapping_first_key - elif anchor is not None or tag is not None: - # Empty scalars are allowed even if a tag or an anchor is - # specified. - event = ScalarEvent(anchor, tag, (implicit, False), '', - start_mark, end_mark) - self.state = self.states.pop() - else: - if block: - node = 'block' - else: - node = 'flow' - token = self.peek_token() - raise ParserError("while parsing a %s node" % node, start_mark, - "expected the node content, but found %r" % token.id, - token.start_mark) - return event - - # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END - - def parse_block_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_sequence_entry() - - def parse_block_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, BlockEndToken): - self.states.append(self.parse_block_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_block_sequence_entry - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block collection", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ - - def parse_indentless_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, - KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_indentless_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_indentless_sequence_entry - return self.process_empty_scalar(token.end_mark) - token = self.peek_token() - event = SequenceEndEvent(token.start_mark, token.start_mark) - self.state = self.states.pop() - return event - - # block_mapping ::= BLOCK-MAPPING_START - # ((KEY block_node_or_indentless_sequence?)? - # (VALUE block_node_or_indentless_sequence?)?)* - # BLOCK-END - - def parse_block_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_mapping_key() - - def parse_block_mapping_key(self): - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_value) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_value - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block mapping", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_block_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_key) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_block_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - # flow_sequence ::= FLOW-SEQUENCE-START - # (flow_sequence_entry FLOW-ENTRY)* - # flow_sequence_entry? - # FLOW-SEQUENCE-END - # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - # - # Note that while production rules for both flow_sequence_entry and - # flow_mapping_entry are equal, their interpretations are different. - # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` - # generate an inline mapping (set syntax). - - def parse_flow_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_sequence_entry(first=True) - - def parse_flow_sequence_entry(self, first=False): - if not self.check_token(FlowSequenceEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow sequence", self.marks[-1], - "expected ',' or ']', but got %r" % token.id, token.start_mark) - - if self.check_token(KeyToken): - token = self.peek_token() - event = MappingStartEvent(None, None, True, - token.start_mark, token.end_mark, - flow_style=True) - self.state = self.parse_flow_sequence_entry_mapping_key - return event - elif not self.check_token(FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry) - return self.parse_flow_node() - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_sequence_entry_mapping_key(self): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_value - return self.process_empty_scalar(token.end_mark) - - def parse_flow_sequence_entry_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_end) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_end - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_sequence_entry_mapping_end - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_sequence_entry_mapping_end(self): - self.state = self.parse_flow_sequence_entry - token = self.peek_token() - return MappingEndEvent(token.start_mark, token.start_mark) - - # flow_mapping ::= FLOW-MAPPING-START - # (flow_mapping_entry FLOW-ENTRY)* - # flow_mapping_entry? - # FLOW-MAPPING-END - # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - - def parse_flow_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_mapping_key(first=True) - - def parse_flow_mapping_key(self, first=False): - if not self.check_token(FlowMappingEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ',' or '}', but got %r" % token.id, token.start_mark) - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_value - return self.process_empty_scalar(token.end_mark) - elif not self.check_token(FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_empty_value) - return self.parse_flow_node() - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_key) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_mapping_empty_value(self): - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(self.peek_token().start_mark) - - def process_empty_scalar(self, mark): - return ScalarEvent(None, None, (True, False), '', mark, mark) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/reader.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/reader.py deleted file mode 100644 index f70e920..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/reader.py +++ /dev/null @@ -1,192 +0,0 @@ -# This module contains abstractions for the input stream. You don't have to -# looks further, there are no pretty code. -# -# We define two classes here. -# -# Mark(source, line, column) -# It's just a record and its only use is producing nice error messages. -# Parser does not use it for any other purposes. -# -# Reader(source, data) -# Reader determines the encoding of `data` and converts it to unicode. -# Reader provides the following methods and attributes: -# reader.peek(length=1) - return the next `length` characters -# reader.forward(length=1) - move the current position to `length` characters. -# reader.index - the number of the current character. -# reader.line, stream.column - the line and the column of the current character. - -__all__ = ['Reader', 'ReaderError'] - -from .error import YAMLError, Mark - -import codecs, re - -class ReaderError(YAMLError): - - def __init__(self, name, position, character, encoding, reason): - self.name = name - self.character = character - self.position = position - self.encoding = encoding - self.reason = reason - - def __str__(self): - if isinstance(self.character, bytes): - return "'%s' codec can't decode byte #x%02x: %s\n" \ - " in \"%s\", position %d" \ - % (self.encoding, ord(self.character), self.reason, - self.name, self.position) - else: - return "unacceptable character #x%04x: %s\n" \ - " in \"%s\", position %d" \ - % (self.character, self.reason, - self.name, self.position) - -class Reader(object): - # Reader: - # - determines the data encoding and converts it to a unicode string, - # - checks if characters are in allowed range, - # - adds '\0' to the end. - - # Reader accepts - # - a `bytes` object, - # - a `str` object, - # - a file-like object with its `read` method returning `str`, - # - a file-like object with its `read` method returning `unicode`. - - # Yeah, it's ugly and slow. - - def __init__(self, stream): - self.name = None - self.stream = None - self.stream_pointer = 0 - self.eof = True - self.buffer = '' - self.pointer = 0 - self.raw_buffer = None - self.raw_decode = None - self.encoding = None - self.index = 0 - self.line = 0 - self.column = 0 - if isinstance(stream, str): - self.name = "" - self.check_printable(stream) - self.buffer = stream+'\0' - elif isinstance(stream, bytes): - self.name = "" - self.raw_buffer = stream - self.determine_encoding() - else: - self.stream = stream - self.name = getattr(stream, 'name', "") - self.eof = False - self.raw_buffer = None - self.determine_encoding() - - def peek(self, index=0): - try: - return self.buffer[self.pointer+index] - except IndexError: - self.update(index+1) - return self.buffer[self.pointer+index] - - def prefix(self, length=1): - if self.pointer+length >= len(self.buffer): - self.update(length) - return self.buffer[self.pointer:self.pointer+length] - - def forward(self, length=1): - if self.pointer+length+1 >= len(self.buffer): - self.update(length+1) - while length: - ch = self.buffer[self.pointer] - self.pointer += 1 - self.index += 1 - if ch in '\n\x85\u2028\u2029' \ - or (ch == '\r' and self.buffer[self.pointer] != '\n'): - self.line += 1 - self.column = 0 - elif ch != '\uFEFF': - self.column += 1 - length -= 1 - - def get_mark(self): - if self.stream is None: - return Mark(self.name, self.index, self.line, self.column, - self.buffer, self.pointer) - else: - return Mark(self.name, self.index, self.line, self.column, - None, None) - - def determine_encoding(self): - while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2): - self.update_raw() - if isinstance(self.raw_buffer, bytes): - if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): - self.raw_decode = codecs.utf_16_le_decode - self.encoding = 'utf-16-le' - elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): - self.raw_decode = codecs.utf_16_be_decode - self.encoding = 'utf-16-be' - else: - self.raw_decode = codecs.utf_8_decode - self.encoding = 'utf-8' - self.update(1) - - NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]') - def check_printable(self, data): - match = self.NON_PRINTABLE.search(data) - if match: - character = match.group() - position = self.index+(len(self.buffer)-self.pointer)+match.start() - raise ReaderError(self.name, position, ord(character), - 'unicode', "special characters are not allowed") - - def update(self, length): - if self.raw_buffer is None: - return - self.buffer = self.buffer[self.pointer:] - self.pointer = 0 - while len(self.buffer) < length: - if not self.eof: - self.update_raw() - if self.raw_decode is not None: - try: - data, converted = self.raw_decode(self.raw_buffer, - 'strict', self.eof) - except UnicodeDecodeError as exc: - character = self.raw_buffer[exc.start] - if self.stream is not None: - position = self.stream_pointer-len(self.raw_buffer)+exc.start - else: - position = exc.start - raise ReaderError(self.name, position, character, - exc.encoding, exc.reason) - else: - data = self.raw_buffer - converted = len(data) - self.check_printable(data) - self.buffer += data - self.raw_buffer = self.raw_buffer[converted:] - if self.eof: - self.buffer += '\0' - self.raw_buffer = None - break - - def update_raw(self, size=4096): - data = self.stream.read(size) - if self.raw_buffer is None: - self.raw_buffer = data - else: - self.raw_buffer += data - self.stream_pointer += len(data) - if not data: - self.eof = True - -#try: -# import psyco -# psyco.bind(Reader) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/representer.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/representer.py deleted file mode 100644 index 67cd6fd..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/representer.py +++ /dev/null @@ -1,374 +0,0 @@ - -__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', - 'RepresenterError'] - -from .error import * -from .nodes import * - -import datetime, sys, copyreg, types, base64 - -class RepresenterError(YAMLError): - pass - -class BaseRepresenter: - - yaml_representers = {} - yaml_multi_representers = {} - - def __init__(self, default_style=None, default_flow_style=None): - self.default_style = default_style - self.default_flow_style = default_flow_style - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent(self, data): - node = self.represent_data(data) - self.serialize(node) - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent_data(self, data): - if self.ignore_aliases(data): - self.alias_key = None - else: - self.alias_key = id(data) - if self.alias_key is not None: - if self.alias_key in self.represented_objects: - node = self.represented_objects[self.alias_key] - #if node is None: - # raise RepresenterError("recursive objects are not allowed: %r" % data) - return node - #self.represented_objects[alias_key] = None - self.object_keeper.append(data) - data_types = type(data).__mro__ - if data_types[0] in self.yaml_representers: - node = self.yaml_representers[data_types[0]](self, data) - else: - for data_type in data_types: - if data_type in self.yaml_multi_representers: - node = self.yaml_multi_representers[data_type](self, data) - break - else: - if None in self.yaml_multi_representers: - node = self.yaml_multi_representers[None](self, data) - elif None in self.yaml_representers: - node = self.yaml_representers[None](self, data) - else: - node = ScalarNode(None, str(data)) - #if alias_key is not None: - # self.represented_objects[alias_key] = node - return node - - @classmethod - def add_representer(cls, data_type, representer): - if not 'yaml_representers' in cls.__dict__: - cls.yaml_representers = cls.yaml_representers.copy() - cls.yaml_representers[data_type] = representer - - @classmethod - def add_multi_representer(cls, data_type, representer): - if not 'yaml_multi_representers' in cls.__dict__: - cls.yaml_multi_representers = cls.yaml_multi_representers.copy() - cls.yaml_multi_representers[data_type] = representer - - def represent_scalar(self, tag, value, style=None): - if style is None: - style = self.default_style - node = ScalarNode(tag, value, style=style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - return node - - def represent_sequence(self, tag, sequence, flow_style=None): - value = [] - node = SequenceNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - for item in sequence: - node_item = self.represent_data(item) - if not (isinstance(node_item, ScalarNode) and not node_item.style): - best_style = False - value.append(node_item) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def represent_mapping(self, tag, mapping, flow_style=None): - value = [] - node = MappingNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - if hasattr(mapping, 'items'): - mapping = list(mapping.items()) - try: - mapping = sorted(mapping) - except TypeError: - pass - for item_key, item_value in mapping: - node_key = self.represent_data(item_key) - node_value = self.represent_data(item_value) - if not (isinstance(node_key, ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def ignore_aliases(self, data): - return False - -class SafeRepresenter(BaseRepresenter): - - def ignore_aliases(self, data): - if data in [None, ()]: - return True - if isinstance(data, (str, bytes, bool, int, float)): - return True - - def represent_none(self, data): - return self.represent_scalar('tag:yaml.org,2002:null', 'null') - - def represent_str(self, data): - return self.represent_scalar('tag:yaml.org,2002:str', data) - - def represent_binary(self, data): - if hasattr(base64, 'encodebytes'): - data = base64.encodebytes(data).decode('ascii') - else: - data = base64.encodestring(data).decode('ascii') - return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|') - - def represent_bool(self, data): - if data: - value = 'true' - else: - value = 'false' - return self.represent_scalar('tag:yaml.org,2002:bool', value) - - def represent_int(self, data): - return self.represent_scalar('tag:yaml.org,2002:int', str(data)) - - inf_value = 1e300 - while repr(inf_value) != repr(inf_value*inf_value): - inf_value *= inf_value - - def represent_float(self, data): - if data != data or (data == 0.0 and data == 1.0): - value = '.nan' - elif data == self.inf_value: - value = '.inf' - elif data == -self.inf_value: - value = '-.inf' - else: - value = repr(data).lower() - # Note that in some cases `repr(data)` represents a float number - # without the decimal parts. For instance: - # >>> repr(1e17) - # '1e17' - # Unfortunately, this is not a valid float representation according - # to the definition of the `!!float` tag. We fix this by adding - # '.0' before the 'e' symbol. - if '.' not in value and 'e' in value: - value = value.replace('e', '.0e', 1) - return self.represent_scalar('tag:yaml.org,2002:float', value) - - def represent_list(self, data): - #pairs = (len(data) > 0 and isinstance(data, list)) - #if pairs: - # for item in data: - # if not isinstance(item, tuple) or len(item) != 2: - # pairs = False - # break - #if not pairs: - return self.represent_sequence('tag:yaml.org,2002:seq', data) - #value = [] - #for item_key, item_value in data: - # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', - # [(item_key, item_value)])) - #return SequenceNode(u'tag:yaml.org,2002:pairs', value) - - def represent_dict(self, data): - return self.represent_mapping('tag:yaml.org,2002:map', data) - - def represent_set(self, data): - value = {} - for key in data: - value[key] = None - return self.represent_mapping('tag:yaml.org,2002:set', value) - - def represent_date(self, data): - value = data.isoformat() - return self.represent_scalar('tag:yaml.org,2002:timestamp', value) - - def represent_datetime(self, data): - value = data.isoformat(' ') - return self.represent_scalar('tag:yaml.org,2002:timestamp', value) - - def represent_yaml_object(self, tag, data, cls, flow_style=None): - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__.copy() - return self.represent_mapping(tag, state, flow_style=flow_style) - - def represent_undefined(self, data): - raise RepresenterError("cannot represent an object: %s" % data) - -SafeRepresenter.add_representer(type(None), - SafeRepresenter.represent_none) - -SafeRepresenter.add_representer(str, - SafeRepresenter.represent_str) - -SafeRepresenter.add_representer(bytes, - SafeRepresenter.represent_binary) - -SafeRepresenter.add_representer(bool, - SafeRepresenter.represent_bool) - -SafeRepresenter.add_representer(int, - SafeRepresenter.represent_int) - -SafeRepresenter.add_representer(float, - SafeRepresenter.represent_float) - -SafeRepresenter.add_representer(list, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(tuple, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(dict, - SafeRepresenter.represent_dict) - -SafeRepresenter.add_representer(set, - SafeRepresenter.represent_set) - -SafeRepresenter.add_representer(datetime.date, - SafeRepresenter.represent_date) - -SafeRepresenter.add_representer(datetime.datetime, - SafeRepresenter.represent_datetime) - -SafeRepresenter.add_representer(None, - SafeRepresenter.represent_undefined) - -class Representer(SafeRepresenter): - - def represent_complex(self, data): - if data.imag == 0.0: - data = '%r' % data.real - elif data.real == 0.0: - data = '%rj' % data.imag - elif data.imag > 0: - data = '%r+%rj' % (data.real, data.imag) - else: - data = '%r%rj' % (data.real, data.imag) - return self.represent_scalar('tag:yaml.org,2002:python/complex', data) - - def represent_tuple(self, data): - return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) - - def represent_name(self, data): - name = '%s.%s' % (data.__module__, data.__name__) - return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '') - - def represent_module(self, data): - return self.represent_scalar( - 'tag:yaml.org,2002:python/module:'+data.__name__, '') - - def represent_object(self, data): - # We use __reduce__ API to save the data. data.__reduce__ returns - # a tuple of length 2-5: - # (function, args, state, listitems, dictitems) - - # For reconstructing, we calls function(*args), then set its state, - # listitems, and dictitems if they are not None. - - # A special case is when function.__name__ == '__newobj__'. In this - # case we create the object with args[0].__new__(*args). - - # Another special case is when __reduce__ returns a string - we don't - # support it. - - # We produce a !!python/object, !!python/object/new or - # !!python/object/apply node. - - cls = type(data) - if cls in copyreg.dispatch_table: - reduce = copyreg.dispatch_table[cls](data) - elif hasattr(data, '__reduce_ex__'): - reduce = data.__reduce_ex__(2) - elif hasattr(data, '__reduce__'): - reduce = data.__reduce__() - else: - raise RepresenterError("cannot represent object: %r" % data) - reduce = (list(reduce)+[None]*5)[:5] - function, args, state, listitems, dictitems = reduce - args = list(args) - if state is None: - state = {} - if listitems is not None: - listitems = list(listitems) - if dictitems is not None: - dictitems = dict(dictitems) - if function.__name__ == '__newobj__': - function = args[0] - args = args[1:] - tag = 'tag:yaml.org,2002:python/object/new:' - newobj = True - else: - tag = 'tag:yaml.org,2002:python/object/apply:' - newobj = False - function_name = '%s.%s' % (function.__module__, function.__name__) - if not args and not listitems and not dictitems \ - and isinstance(state, dict) and newobj: - return self.represent_mapping( - 'tag:yaml.org,2002:python/object:'+function_name, state) - if not listitems and not dictitems \ - and isinstance(state, dict) and not state: - return self.represent_sequence(tag+function_name, args) - value = {} - if args: - value['args'] = args - if state or not isinstance(state, dict): - value['state'] = state - if listitems: - value['listitems'] = listitems - if dictitems: - value['dictitems'] = dictitems - return self.represent_mapping(tag+function_name, value) - -Representer.add_representer(complex, - Representer.represent_complex) - -Representer.add_representer(tuple, - Representer.represent_tuple) - -Representer.add_representer(type, - Representer.represent_name) - -Representer.add_representer(types.FunctionType, - Representer.represent_name) - -Representer.add_representer(types.BuiltinFunctionType, - Representer.represent_name) - -Representer.add_representer(types.ModuleType, - Representer.represent_module) - -Representer.add_multi_representer(object, - Representer.represent_object) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/resolver.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/resolver.py deleted file mode 100644 index 0eece25..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/resolver.py +++ /dev/null @@ -1,224 +0,0 @@ - -__all__ = ['BaseResolver', 'Resolver'] - -from .error import * -from .nodes import * - -import re - -class ResolverError(YAMLError): - pass - -class BaseResolver: - - DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str' - DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq' - DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map' - - yaml_implicit_resolvers = {} - yaml_path_resolvers = {} - - def __init__(self): - self.resolver_exact_paths = [] - self.resolver_prefix_paths = [] - - @classmethod - def add_implicit_resolver(cls, tag, regexp, first): - if not 'yaml_implicit_resolvers' in cls.__dict__: - cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() - if first is None: - first = [None] - for ch in first: - cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) - - @classmethod - def add_path_resolver(cls, tag, path, kind=None): - # Note: `add_path_resolver` is experimental. The API could be changed. - # `new_path` is a pattern that is matched against the path from the - # root to the node that is being considered. `node_path` elements are - # tuples `(node_check, index_check)`. `node_check` is a node class: - # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` - # matches any kind of a node. `index_check` could be `None`, a boolean - # value, a string value, or a number. `None` and `False` match against - # any _value_ of sequence and mapping nodes. `True` matches against - # any _key_ of a mapping node. A string `index_check` matches against - # a mapping value that corresponds to a scalar key which content is - # equal to the `index_check` value. An integer `index_check` matches - # against a sequence value with the index equal to `index_check`. - if not 'yaml_path_resolvers' in cls.__dict__: - cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() - new_path = [] - for element in path: - if isinstance(element, (list, tuple)): - if len(element) == 2: - node_check, index_check = element - elif len(element) == 1: - node_check = element[0] - index_check = True - else: - raise ResolverError("Invalid path element: %s" % element) - else: - node_check = None - index_check = element - if node_check is str: - node_check = ScalarNode - elif node_check is list: - node_check = SequenceNode - elif node_check is dict: - node_check = MappingNode - elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ - and not isinstance(node_check, str) \ - and node_check is not None: - raise ResolverError("Invalid node checker: %s" % node_check) - if not isinstance(index_check, (str, int)) \ - and index_check is not None: - raise ResolverError("Invalid index checker: %s" % index_check) - new_path.append((node_check, index_check)) - if kind is str: - kind = ScalarNode - elif kind is list: - kind = SequenceNode - elif kind is dict: - kind = MappingNode - elif kind not in [ScalarNode, SequenceNode, MappingNode] \ - and kind is not None: - raise ResolverError("Invalid node kind: %s" % kind) - cls.yaml_path_resolvers[tuple(new_path), kind] = tag - - def descend_resolver(self, current_node, current_index): - if not self.yaml_path_resolvers: - return - exact_paths = {} - prefix_paths = [] - if current_node: - depth = len(self.resolver_prefix_paths) - for path, kind in self.resolver_prefix_paths[-1]: - if self.check_resolver_prefix(depth, path, kind, - current_node, current_index): - if len(path) > depth: - prefix_paths.append((path, kind)) - else: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - for path, kind in self.yaml_path_resolvers: - if not path: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - prefix_paths.append((path, kind)) - self.resolver_exact_paths.append(exact_paths) - self.resolver_prefix_paths.append(prefix_paths) - - def ascend_resolver(self): - if not self.yaml_path_resolvers: - return - self.resolver_exact_paths.pop() - self.resolver_prefix_paths.pop() - - def check_resolver_prefix(self, depth, path, kind, - current_node, current_index): - node_check, index_check = path[depth-1] - if isinstance(node_check, str): - if current_node.tag != node_check: - return - elif node_check is not None: - if not isinstance(current_node, node_check): - return - if index_check is True and current_index is not None: - return - if (index_check is False or index_check is None) \ - and current_index is None: - return - if isinstance(index_check, str): - if not (isinstance(current_index, ScalarNode) - and index_check == current_index.value): - return - elif isinstance(index_check, int) and not isinstance(index_check, bool): - if index_check != current_index: - return - return True - - def resolve(self, kind, value, implicit): - if kind is ScalarNode and implicit[0]: - if value == '': - resolvers = self.yaml_implicit_resolvers.get('', []) - else: - resolvers = self.yaml_implicit_resolvers.get(value[0], []) - resolvers += self.yaml_implicit_resolvers.get(None, []) - for tag, regexp in resolvers: - if regexp.match(value): - return tag - implicit = implicit[1] - if self.yaml_path_resolvers: - exact_paths = self.resolver_exact_paths[-1] - if kind in exact_paths: - return exact_paths[kind] - if None in exact_paths: - return exact_paths[None] - if kind is ScalarNode: - return self.DEFAULT_SCALAR_TAG - elif kind is SequenceNode: - return self.DEFAULT_SEQUENCE_TAG - elif kind is MappingNode: - return self.DEFAULT_MAPPING_TAG - -class Resolver(BaseResolver): - pass - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:bool', - re.compile(r'''^(?:yes|Yes|YES|no|No|NO - |true|True|TRUE|false|False|FALSE - |on|On|ON|off|Off|OFF)$''', re.X), - list('yYnNtTfFoO')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:float', - re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? - |\.[0-9_]+(?:[eE][-+][0-9]+)? - |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* - |[-+]?\.(?:inf|Inf|INF) - |\.(?:nan|NaN|NAN))$''', re.X), - list('-+0123456789.')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:int', - re.compile(r'''^(?:[-+]?0b[0-1_]+ - |[-+]?0[0-7_]+ - |[-+]?(?:0|[1-9][0-9_]*) - |[-+]?0x[0-9a-fA-F_]+ - |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), - list('-+0123456789')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:merge', - re.compile(r'^(?:<<)$'), - ['<']) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:null', - re.compile(r'''^(?: ~ - |null|Null|NULL - | )$''', re.X), - ['~', 'n', 'N', '']) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:timestamp', - re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] - |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? - (?:[Tt]|[ \t]+)[0-9][0-9]? - :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? - (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), - list('0123456789')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:value', - re.compile(r'^(?:=)$'), - ['=']) - -# The following resolver is only for documentation purposes. It cannot work -# because plain scalars cannot start with '!', '&', or '*'. -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:yaml', - re.compile(r'^(?:!|&|\*)$'), - list('!&*')) - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/scanner.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/scanner.py deleted file mode 100644 index 494d975..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/scanner.py +++ /dev/null @@ -1,1448 +0,0 @@ - -# Scanner produces tokens of the following types: -# STREAM-START -# STREAM-END -# DIRECTIVE(name, value) -# DOCUMENT-START -# DOCUMENT-END -# BLOCK-SEQUENCE-START -# BLOCK-MAPPING-START -# BLOCK-END -# FLOW-SEQUENCE-START -# FLOW-MAPPING-START -# FLOW-SEQUENCE-END -# FLOW-MAPPING-END -# BLOCK-ENTRY -# FLOW-ENTRY -# KEY -# VALUE -# ALIAS(value) -# ANCHOR(value) -# TAG(value) -# SCALAR(value, plain, style) -# -# Read comments in the Scanner code for more details. -# - -__all__ = ['Scanner', 'ScannerError'] - -from .error import MarkedYAMLError -from .tokens import * - -class ScannerError(MarkedYAMLError): - pass - -class SimpleKey: - # See below simple keys treatment. - - def __init__(self, token_number, required, index, line, column, mark): - self.token_number = token_number - self.required = required - self.index = index - self.line = line - self.column = column - self.mark = mark - -class Scanner: - - def __init__(self): - """Initialize the scanner.""" - # It is assumed that Scanner and Reader will have a common descendant. - # Reader do the dirty work of checking for BOM and converting the - # input data to Unicode. It also adds NUL to the end. - # - # Reader supports the following methods - # self.peek(i=0) # peek the next i-th character - # self.prefix(l=1) # peek the next l characters - # self.forward(l=1) # read the next l characters and move the pointer. - - # Had we reached the end of the stream? - self.done = False - - # The number of unclosed '{' and '['. `flow_level == 0` means block - # context. - self.flow_level = 0 - - # List of processed tokens that are not yet emitted. - self.tokens = [] - - # Add the STREAM-START token. - self.fetch_stream_start() - - # Number of tokens that were emitted through the `get_token` method. - self.tokens_taken = 0 - - # The current indentation level. - self.indent = -1 - - # Past indentation levels. - self.indents = [] - - # Variables related to simple keys treatment. - - # A simple key is a key that is not denoted by the '?' indicator. - # Example of simple keys: - # --- - # block simple key: value - # ? not a simple key: - # : { flow simple key: value } - # We emit the KEY token before all keys, so when we find a potential - # simple key, we try to locate the corresponding ':' indicator. - # Simple keys should be limited to a single line and 1024 characters. - - # Can a simple key start at the current position? A simple key may - # start: - # - at the beginning of the line, not counting indentation spaces - # (in block context), - # - after '{', '[', ',' (in the flow context), - # - after '?', ':', '-' (in the block context). - # In the block context, this flag also signifies if a block collection - # may start at the current position. - self.allow_simple_key = True - - # Keep track of possible simple keys. This is a dictionary. The key - # is `flow_level`; there can be no more that one possible simple key - # for each level. The value is a SimpleKey record: - # (token_number, required, index, line, column, mark) - # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), - # '[', or '{' tokens. - self.possible_simple_keys = {} - - # Public methods. - - def check_token(self, *choices): - # Check if the next token is one of the given types. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - # Return the next token, but do not delete if from the queue. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - return self.tokens[0] - - def get_token(self): - # Return the next token. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - self.tokens_taken += 1 - return self.tokens.pop(0) - - # Private methods. - - def need_more_tokens(self): - if self.done: - return False - if not self.tokens: - return True - # The current token may be a potential simple key, so we - # need to look further. - self.stale_possible_simple_keys() - if self.next_possible_simple_key() == self.tokens_taken: - return True - - def fetch_more_tokens(self): - - # Eat whitespaces and comments until we reach the next token. - self.scan_to_next_token() - - # Remove obsolete possible simple keys. - self.stale_possible_simple_keys() - - # Compare the current indentation and column. It may add some tokens - # and decrease the current indentation level. - self.unwind_indent(self.column) - - # Peek the next character. - ch = self.peek() - - # Is it the end of stream? - if ch == '\0': - return self.fetch_stream_end() - - # Is it a directive? - if ch == '%' and self.check_directive(): - return self.fetch_directive() - - # Is it the document start? - if ch == '-' and self.check_document_start(): - return self.fetch_document_start() - - # Is it the document end? - if ch == '.' and self.check_document_end(): - return self.fetch_document_end() - - # TODO: support for BOM within a stream. - #if ch == '\uFEFF': - # return self.fetch_bom() <-- issue BOMToken - - # Note: the order of the following checks is NOT significant. - - # Is it the flow sequence start indicator? - if ch == '[': - return self.fetch_flow_sequence_start() - - # Is it the flow mapping start indicator? - if ch == '{': - return self.fetch_flow_mapping_start() - - # Is it the flow sequence end indicator? - if ch == ']': - return self.fetch_flow_sequence_end() - - # Is it the flow mapping end indicator? - if ch == '}': - return self.fetch_flow_mapping_end() - - # Is it the flow entry indicator? - if ch == ',': - return self.fetch_flow_entry() - - # Is it the block entry indicator? - if ch == '-' and self.check_block_entry(): - return self.fetch_block_entry() - - # Is it the key indicator? - if ch == '?' and self.check_key(): - return self.fetch_key() - - # Is it the value indicator? - if ch == ':' and self.check_value(): - return self.fetch_value() - - # Is it an alias? - if ch == '*': - return self.fetch_alias() - - # Is it an anchor? - if ch == '&': - return self.fetch_anchor() - - # Is it a tag? - if ch == '!': - return self.fetch_tag() - - # Is it a literal scalar? - if ch == '|' and not self.flow_level: - return self.fetch_literal() - - # Is it a folded scalar? - if ch == '>' and not self.flow_level: - return self.fetch_folded() - - # Is it a single quoted scalar? - if ch == '\'': - return self.fetch_single() - - # Is it a double quoted scalar? - if ch == '\"': - return self.fetch_double() - - # It must be a plain scalar then. - if self.check_plain(): - return self.fetch_plain() - - # No? It's an error. Let's produce a nice error message. - raise ScannerError("while scanning for the next token", None, - "found character %r that cannot start any token" % ch, - self.get_mark()) - - # Simple keys treatment. - - def next_possible_simple_key(self): - # Return the number of the nearest possible simple key. Actually we - # don't need to loop through the whole dictionary. We may replace it - # with the following code: - # if not self.possible_simple_keys: - # return None - # return self.possible_simple_keys[ - # min(self.possible_simple_keys.keys())].token_number - min_token_number = None - for level in self.possible_simple_keys: - key = self.possible_simple_keys[level] - if min_token_number is None or key.token_number < min_token_number: - min_token_number = key.token_number - return min_token_number - - def stale_possible_simple_keys(self): - # Remove entries that are no longer possible simple keys. According to - # the YAML specification, simple keys - # - should be limited to a single line, - # - should be no longer than 1024 characters. - # Disabling this procedure will allow simple keys of any length and - # height (may cause problems if indentation is broken though). - for level in list(self.possible_simple_keys): - key = self.possible_simple_keys[level] - if key.line != self.line \ - or self.index-key.index > 1024: - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - del self.possible_simple_keys[level] - - def save_possible_simple_key(self): - # The next token may start a simple key. We check if it's possible - # and save its position. This function is called for - # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. - - # Check if a simple key is required at the current position. - required = not self.flow_level and self.indent == self.column - - # A simple key is required only if it is the first token in the current - # line. Therefore it is always allowed. - assert self.allow_simple_key or not required - - # The next token might be a simple key. Let's save it's number and - # position. - if self.allow_simple_key: - self.remove_possible_simple_key() - token_number = self.tokens_taken+len(self.tokens) - key = SimpleKey(token_number, required, - self.index, self.line, self.column, self.get_mark()) - self.possible_simple_keys[self.flow_level] = key - - def remove_possible_simple_key(self): - # Remove the saved possible key position at the current flow level. - if self.flow_level in self.possible_simple_keys: - key = self.possible_simple_keys[self.flow_level] - - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - - del self.possible_simple_keys[self.flow_level] - - # Indentation functions. - - def unwind_indent(self, column): - - ## In flow context, tokens should respect indentation. - ## Actually the condition should be `self.indent >= column` according to - ## the spec. But this condition will prohibit intuitively correct - ## constructions such as - ## key : { - ## } - #if self.flow_level and self.indent > column: - # raise ScannerError(None, None, - # "invalid intendation or unclosed '[' or '{'", - # self.get_mark()) - - # In the flow context, indentation is ignored. We make the scanner less - # restrictive then specification requires. - if self.flow_level: - return - - # In block context, we may need to issue the BLOCK-END tokens. - while self.indent > column: - mark = self.get_mark() - self.indent = self.indents.pop() - self.tokens.append(BlockEndToken(mark, mark)) - - def add_indent(self, column): - # Check if we need to increase indentation. - if self.indent < column: - self.indents.append(self.indent) - self.indent = column - return True - return False - - # Fetchers. - - def fetch_stream_start(self): - # We always add STREAM-START as the first token and STREAM-END as the - # last token. - - # Read the token. - mark = self.get_mark() - - # Add STREAM-START. - self.tokens.append(StreamStartToken(mark, mark, - encoding=self.encoding)) - - - def fetch_stream_end(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - self.possible_simple_keys = {} - - # Read the token. - mark = self.get_mark() - - # Add STREAM-END. - self.tokens.append(StreamEndToken(mark, mark)) - - # The steam is finished. - self.done = True - - def fetch_directive(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Scan and add DIRECTIVE. - self.tokens.append(self.scan_directive()) - - def fetch_document_start(self): - self.fetch_document_indicator(DocumentStartToken) - - def fetch_document_end(self): - self.fetch_document_indicator(DocumentEndToken) - - def fetch_document_indicator(self, TokenClass): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. Note that there could not be a block collection - # after '---'. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Add DOCUMENT-START or DOCUMENT-END. - start_mark = self.get_mark() - self.forward(3) - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_start(self): - self.fetch_flow_collection_start(FlowSequenceStartToken) - - def fetch_flow_mapping_start(self): - self.fetch_flow_collection_start(FlowMappingStartToken) - - def fetch_flow_collection_start(self, TokenClass): - - # '[' and '{' may start a simple key. - self.save_possible_simple_key() - - # Increase the flow level. - self.flow_level += 1 - - # Simple keys are allowed after '[' and '{'. - self.allow_simple_key = True - - # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_end(self): - self.fetch_flow_collection_end(FlowSequenceEndToken) - - def fetch_flow_mapping_end(self): - self.fetch_flow_collection_end(FlowMappingEndToken) - - def fetch_flow_collection_end(self, TokenClass): - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Decrease the flow level. - self.flow_level -= 1 - - # No simple keys after ']' or '}'. - self.allow_simple_key = False - - # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_entry(self): - - # Simple keys are allowed after ','. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add FLOW-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(FlowEntryToken(start_mark, end_mark)) - - def fetch_block_entry(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a new entry? - if not self.allow_simple_key: - raise ScannerError(None, None, - "sequence entries are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-SEQUENCE-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockSequenceStartToken(mark, mark)) - - # It's an error for the block entry to occur in the flow context, - # but we let the parser detect this. - else: - pass - - # Simple keys are allowed after '-'. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add BLOCK-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(BlockEntryToken(start_mark, end_mark)) - - def fetch_key(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a key (not nessesary a simple)? - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping keys are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-MAPPING-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after '?' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add KEY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(KeyToken(start_mark, end_mark)) - - def fetch_value(self): - - # Do we determine a simple key? - if self.flow_level in self.possible_simple_keys: - - # Add KEY. - key = self.possible_simple_keys[self.flow_level] - del self.possible_simple_keys[self.flow_level] - self.tokens.insert(key.token_number-self.tokens_taken, - KeyToken(key.mark, key.mark)) - - # If this key starts a new block mapping, we need to add - # BLOCK-MAPPING-START. - if not self.flow_level: - if self.add_indent(key.column): - self.tokens.insert(key.token_number-self.tokens_taken, - BlockMappingStartToken(key.mark, key.mark)) - - # There cannot be two simple keys one after another. - self.allow_simple_key = False - - # It must be a part of a complex key. - else: - - # Block context needs additional checks. - # (Do we really need them? They will be catched by the parser - # anyway.) - if not self.flow_level: - - # We are allowed to start a complex value if and only if - # we can start a simple key. - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping values are not allowed here", - self.get_mark()) - - # If this value starts a new block mapping, we need to add - # BLOCK-MAPPING-START. It will be detected as an error later by - # the parser. - if not self.flow_level: - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after ':' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add VALUE. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(ValueToken(start_mark, end_mark)) - - def fetch_alias(self): - - # ALIAS could be a simple key. - self.save_possible_simple_key() - - # No simple keys after ALIAS. - self.allow_simple_key = False - - # Scan and add ALIAS. - self.tokens.append(self.scan_anchor(AliasToken)) - - def fetch_anchor(self): - - # ANCHOR could start a simple key. - self.save_possible_simple_key() - - # No simple keys after ANCHOR. - self.allow_simple_key = False - - # Scan and add ANCHOR. - self.tokens.append(self.scan_anchor(AnchorToken)) - - def fetch_tag(self): - - # TAG could start a simple key. - self.save_possible_simple_key() - - # No simple keys after TAG. - self.allow_simple_key = False - - # Scan and add TAG. - self.tokens.append(self.scan_tag()) - - def fetch_literal(self): - self.fetch_block_scalar(style='|') - - def fetch_folded(self): - self.fetch_block_scalar(style='>') - - def fetch_block_scalar(self, style): - - # A simple key may follow a block scalar. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Scan and add SCALAR. - self.tokens.append(self.scan_block_scalar(style)) - - def fetch_single(self): - self.fetch_flow_scalar(style='\'') - - def fetch_double(self): - self.fetch_flow_scalar(style='"') - - def fetch_flow_scalar(self, style): - - # A flow scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after flow scalars. - self.allow_simple_key = False - - # Scan and add SCALAR. - self.tokens.append(self.scan_flow_scalar(style)) - - def fetch_plain(self): - - # A plain scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after plain scalars. But note that `scan_plain` will - # change this flag if the scan is finished at the beginning of the - # line. - self.allow_simple_key = False - - # Scan and add SCALAR. May change `allow_simple_key`. - self.tokens.append(self.scan_plain()) - - # Checkers. - - def check_directive(self): - - # DIRECTIVE: ^ '%' ... - # The '%' indicator is already checked. - if self.column == 0: - return True - - def check_document_start(self): - - # DOCUMENT-START: ^ '---' (' '|'\n') - if self.column == 0: - if self.prefix(3) == '---' \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return True - - def check_document_end(self): - - # DOCUMENT-END: ^ '...' (' '|'\n') - if self.column == 0: - if self.prefix(3) == '...' \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return True - - def check_block_entry(self): - - # BLOCK-ENTRY: '-' (' '|'\n') - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_key(self): - - # KEY(flow context): '?' - if self.flow_level: - return True - - # KEY(block context): '?' (' '|'\n') - else: - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_value(self): - - # VALUE(flow context): ':' - if self.flow_level: - return True - - # VALUE(block context): ':' (' '|'\n') - else: - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_plain(self): - - # A plain scalar may start with any non-space character except: - # '-', '?', ':', ',', '[', ']', '{', '}', - # '#', '&', '*', '!', '|', '>', '\'', '\"', - # '%', '@', '`'. - # - # It may also start with - # '-', '?', ':' - # if it is followed by a non-space character. - # - # Note that we limit the last rule to the block context (except the - # '-' character) because we want the flow context to be space - # independent. - ch = self.peek() - return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ - or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029' - and (ch == '-' or (not self.flow_level and ch in '?:'))) - - # Scanners. - - def scan_to_next_token(self): - # We ignore spaces, line breaks and comments. - # If we find a line break in the block context, we set the flag - # `allow_simple_key` on. - # The byte order mark is stripped if it's the first character in the - # stream. We do not yet support BOM inside the stream as the - # specification requires. Any such mark will be considered as a part - # of the document. - # - # TODO: We need to make tab handling rules more sane. A good rule is - # Tabs cannot precede tokens - # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, - # KEY(block), VALUE(block), BLOCK-ENTRY - # So the checking code is - # if : - # self.allow_simple_keys = False - # We also need to add the check for `allow_simple_keys == True` to - # `unwind_indent` before issuing BLOCK-END. - # Scanners for block, flow, and plain scalars need to be modified. - - if self.index == 0 and self.peek() == '\uFEFF': - self.forward() - found = False - while not found: - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - if self.scan_line_break(): - if not self.flow_level: - self.allow_simple_key = True - else: - found = True - - def scan_directive(self): - # See the specification for details. - start_mark = self.get_mark() - self.forward() - name = self.scan_directive_name(start_mark) - value = None - if name == 'YAML': - value = self.scan_yaml_directive_value(start_mark) - end_mark = self.get_mark() - elif name == 'TAG': - value = self.scan_tag_directive_value(start_mark) - end_mark = self.get_mark() - else: - end_mark = self.get_mark() - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - self.scan_directive_ignored_line(start_mark) - return DirectiveToken(name, value, start_mark, end_mark) - - def scan_directive_name(self, start_mark): - # See the specification for details. - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - return value - - def scan_yaml_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - major = self.scan_yaml_directive_number(start_mark) - if self.peek() != '.': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or '.', but found %r" % self.peek(), - self.get_mark()) - self.forward() - minor = self.scan_yaml_directive_number(start_mark) - if self.peek() not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or ' ', but found %r" % self.peek(), - self.get_mark()) - return (major, minor) - - def scan_yaml_directive_number(self, start_mark): - # See the specification for details. - ch = self.peek() - if not ('0' <= ch <= '9'): - raise ScannerError("while scanning a directive", start_mark, - "expected a digit, but found %r" % ch, self.get_mark()) - length = 0 - while '0' <= self.peek(length) <= '9': - length += 1 - value = int(self.prefix(length)) - self.forward(length) - return value - - def scan_tag_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - handle = self.scan_tag_directive_handle(start_mark) - while self.peek() == ' ': - self.forward() - prefix = self.scan_tag_directive_prefix(start_mark) - return (handle, prefix) - - def scan_tag_directive_handle(self, start_mark): - # See the specification for details. - value = self.scan_tag_handle('directive', start_mark) - ch = self.peek() - if ch != ' ': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - return value - - def scan_tag_directive_prefix(self, start_mark): - # See the specification for details. - value = self.scan_tag_uri('directive', start_mark) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - return value - - def scan_directive_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in '\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a comment or a line break, but found %r" - % ch, self.get_mark()) - self.scan_line_break() - - def scan_anchor(self, TokenClass): - # The specification does not restrict characters for anchors and - # aliases. This may lead to problems, for instance, the document: - # [ *alias, value ] - # can be interpteted in two ways, as - # [ "value" ] - # and - # [ *alias , "value" ] - # Therefore we restrict aliases to numbers and ASCII letters. - start_mark = self.get_mark() - indicator = self.peek() - if indicator == '*': - name = 'alias' - else: - name = 'anchor' - self.forward() - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`': - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - end_mark = self.get_mark() - return TokenClass(value, start_mark, end_mark) - - def scan_tag(self): - # See the specification for details. - start_mark = self.get_mark() - ch = self.peek(1) - if ch == '<': - handle = None - self.forward(2) - suffix = self.scan_tag_uri('tag', start_mark) - if self.peek() != '>': - raise ScannerError("while parsing a tag", start_mark, - "expected '>', but found %r" % self.peek(), - self.get_mark()) - self.forward() - elif ch in '\0 \t\r\n\x85\u2028\u2029': - handle = None - suffix = '!' - self.forward() - else: - length = 1 - use_handle = False - while ch not in '\0 \r\n\x85\u2028\u2029': - if ch == '!': - use_handle = True - break - length += 1 - ch = self.peek(length) - handle = '!' - if use_handle: - handle = self.scan_tag_handle('tag', start_mark) - else: - handle = '!' - self.forward() - suffix = self.scan_tag_uri('tag', start_mark) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a tag", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - value = (handle, suffix) - end_mark = self.get_mark() - return TagToken(value, start_mark, end_mark) - - def scan_block_scalar(self, style): - # See the specification for details. - - if style == '>': - folded = True - else: - folded = False - - chunks = [] - start_mark = self.get_mark() - - # Scan the header. - self.forward() - chomping, increment = self.scan_block_scalar_indicators(start_mark) - self.scan_block_scalar_ignored_line(start_mark) - - # Determine the indentation level and go to the first non-empty line. - min_indent = self.indent+1 - if min_indent < 1: - min_indent = 1 - if increment is None: - breaks, max_indent, end_mark = self.scan_block_scalar_indentation() - indent = max(min_indent, max_indent) - else: - indent = min_indent+increment-1 - breaks, end_mark = self.scan_block_scalar_breaks(indent) - line_break = '' - - # Scan the inner part of the block scalar. - while self.column == indent and self.peek() != '\0': - chunks.extend(breaks) - leading_non_space = self.peek() not in ' \t' - length = 0 - while self.peek(length) not in '\0\r\n\x85\u2028\u2029': - length += 1 - chunks.append(self.prefix(length)) - self.forward(length) - line_break = self.scan_line_break() - breaks, end_mark = self.scan_block_scalar_breaks(indent) - if self.column == indent and self.peek() != '\0': - - # Unfortunately, folding rules are ambiguous. - # - # This is the folding according to the specification: - - if folded and line_break == '\n' \ - and leading_non_space and self.peek() not in ' \t': - if not breaks: - chunks.append(' ') - else: - chunks.append(line_break) - - # This is Clark Evans's interpretation (also in the spec - # examples): - # - #if folded and line_break == '\n': - # if not breaks: - # if self.peek() not in ' \t': - # chunks.append(' ') - # else: - # chunks.append(line_break) - #else: - # chunks.append(line_break) - else: - break - - # Chomp the tail. - if chomping is not False: - chunks.append(line_break) - if chomping is True: - chunks.extend(breaks) - - # We are done. - return ScalarToken(''.join(chunks), False, start_mark, end_mark, - style) - - def scan_block_scalar_indicators(self, start_mark): - # See the specification for details. - chomping = None - increment = None - ch = self.peek() - if ch in '+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch in '0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - elif ch in '0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - ch = self.peek() - if ch in '+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected chomping or indentation indicators, but found %r" - % ch, self.get_mark()) - return chomping, increment - - def scan_block_scalar_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in '\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected a comment or a line break, but found %r" % ch, - self.get_mark()) - self.scan_line_break() - - def scan_block_scalar_indentation(self): - # See the specification for details. - chunks = [] - max_indent = 0 - end_mark = self.get_mark() - while self.peek() in ' \r\n\x85\u2028\u2029': - if self.peek() != ' ': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - else: - self.forward() - if self.column > max_indent: - max_indent = self.column - return chunks, max_indent, end_mark - - def scan_block_scalar_breaks(self, indent): - # See the specification for details. - chunks = [] - end_mark = self.get_mark() - while self.column < indent and self.peek() == ' ': - self.forward() - while self.peek() in '\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - while self.column < indent and self.peek() == ' ': - self.forward() - return chunks, end_mark - - def scan_flow_scalar(self, style): - # See the specification for details. - # Note that we loose indentation rules for quoted scalars. Quoted - # scalars don't need to adhere indentation because " and ' clearly - # mark the beginning and the end of them. Therefore we are less - # restrictive then the specification requires. We only need to check - # that document separators are not included in scalars. - if style == '"': - double = True - else: - double = False - chunks = [] - start_mark = self.get_mark() - quote = self.peek() - self.forward() - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - while self.peek() != quote: - chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - self.forward() - end_mark = self.get_mark() - return ScalarToken(''.join(chunks), False, start_mark, end_mark, - style) - - ESCAPE_REPLACEMENTS = { - '0': '\0', - 'a': '\x07', - 'b': '\x08', - 't': '\x09', - '\t': '\x09', - 'n': '\x0A', - 'v': '\x0B', - 'f': '\x0C', - 'r': '\x0D', - 'e': '\x1B', - ' ': '\x20', - '\"': '\"', - '\\': '\\', - 'N': '\x85', - '_': '\xA0', - 'L': '\u2028', - 'P': '\u2029', - } - - ESCAPE_CODES = { - 'x': 2, - 'u': 4, - 'U': 8, - } - - def scan_flow_scalar_non_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - length = 0 - while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029': - length += 1 - if length: - chunks.append(self.prefix(length)) - self.forward(length) - ch = self.peek() - if not double and ch == '\'' and self.peek(1) == '\'': - chunks.append('\'') - self.forward(2) - elif (double and ch == '\'') or (not double and ch in '\"\\'): - chunks.append(ch) - self.forward() - elif double and ch == '\\': - self.forward() - ch = self.peek() - if ch in self.ESCAPE_REPLACEMENTS: - chunks.append(self.ESCAPE_REPLACEMENTS[ch]) - self.forward() - elif ch in self.ESCAPE_CODES: - length = self.ESCAPE_CODES[ch] - self.forward() - for k in range(length): - if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "expected escape sequence of %d hexdecimal numbers, but found %r" % - (length, self.peek(k)), self.get_mark()) - code = int(self.prefix(length), 16) - chunks.append(chr(code)) - self.forward(length) - elif ch in '\r\n\x85\u2028\u2029': - self.scan_line_break() - chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) - else: - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "found unknown escape character %r" % ch, self.get_mark()) - else: - return chunks - - def scan_flow_scalar_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - length = 0 - while self.peek(length) in ' \t': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch == '\0': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected end of stream", self.get_mark()) - elif ch in '\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - breaks = self.scan_flow_scalar_breaks(double, start_mark) - if line_break != '\n': - chunks.append(line_break) - elif not breaks: - chunks.append(' ') - chunks.extend(breaks) - else: - chunks.append(whitespaces) - return chunks - - def scan_flow_scalar_breaks(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - # Instead of checking indentation, we check for document - # separators. - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected document separator", self.get_mark()) - while self.peek() in ' \t': - self.forward() - if self.peek() in '\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - else: - return chunks - - def scan_plain(self): - # See the specification for details. - # We add an additional restriction for the flow context: - # plain scalars in the flow context cannot contain ',', ':' and '?'. - # We also keep track of the `allow_simple_key` flag here. - # Indentation rules are loosed for the flow context. - chunks = [] - start_mark = self.get_mark() - end_mark = start_mark - indent = self.indent+1 - # We allow zero indentation for scalars, but then we need to check for - # document separators at the beginning of the line. - #if indent == 0: - # indent = 1 - spaces = [] - while True: - length = 0 - if self.peek() == '#': - break - while True: - ch = self.peek(length) - if ch in '\0 \t\r\n\x85\u2028\u2029' \ - or (not self.flow_level and ch == ':' and - self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029') \ - or (self.flow_level and ch in ',:?[]{}'): - break - length += 1 - # It's not clear what we should do with ':' in the flow context. - if (self.flow_level and ch == ':' - and self.peek(length+1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'): - self.forward(length) - raise ScannerError("while scanning a plain scalar", start_mark, - "found unexpected ':'", self.get_mark(), - "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.") - if length == 0: - break - self.allow_simple_key = False - chunks.extend(spaces) - chunks.append(self.prefix(length)) - self.forward(length) - end_mark = self.get_mark() - spaces = self.scan_plain_spaces(indent, start_mark) - if not spaces or self.peek() == '#' \ - or (not self.flow_level and self.column < indent): - break - return ScalarToken(''.join(chunks), True, start_mark, end_mark) - - def scan_plain_spaces(self, indent, start_mark): - # See the specification for details. - # The specification is really confusing about tabs in plain scalars. - # We just forbid them completely. Do not use tabs in YAML! - chunks = [] - length = 0 - while self.peek(length) in ' ': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch in '\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - self.allow_simple_key = True - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return - breaks = [] - while self.peek() in ' \r\n\x85\u2028\u2029': - if self.peek() == ' ': - self.forward() - else: - breaks.append(self.scan_line_break()) - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return - if line_break != '\n': - chunks.append(line_break) - elif not breaks: - chunks.append(' ') - chunks.extend(breaks) - elif whitespaces: - chunks.append(whitespaces) - return chunks - - def scan_tag_handle(self, name, start_mark): - # See the specification for details. - # For some strange reasons, the specification does not allow '_' in - # tag handles. I have allowed it anyway. - ch = self.peek() - if ch != '!': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch, self.get_mark()) - length = 1 - ch = self.peek(length) - if ch != ' ': - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if ch != '!': - self.forward(length) - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch, self.get_mark()) - length += 1 - value = self.prefix(length) - self.forward(length) - return value - - def scan_tag_uri(self, name, start_mark): - # See the specification for details. - # Note: we do not check if URI is well-formed. - chunks = [] - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?:@&=+$,_.!~*\'()[]%': - if ch == '%': - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - chunks.append(self.scan_uri_escapes(name, start_mark)) - else: - length += 1 - ch = self.peek(length) - if length: - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - if not chunks: - raise ScannerError("while parsing a %s" % name, start_mark, - "expected URI, but found %r" % ch, self.get_mark()) - return ''.join(chunks) - - def scan_uri_escapes(self, name, start_mark): - # See the specification for details. - codes = [] - mark = self.get_mark() - while self.peek() == '%': - self.forward() - for k in range(2): - if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected URI escape sequence of 2 hexdecimal numbers, but found %r" - % self.peek(k), self.get_mark()) - codes.append(int(self.prefix(2), 16)) - self.forward(2) - try: - value = bytes(codes).decode('utf-8') - except UnicodeDecodeError as exc: - raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) - return value - - def scan_line_break(self): - # Transforms: - # '\r\n' : '\n' - # '\r' : '\n' - # '\n' : '\n' - # '\x85' : '\n' - # '\u2028' : '\u2028' - # '\u2029 : '\u2029' - # default : '' - ch = self.peek() - if ch in '\r\n\x85': - if self.prefix(2) == '\r\n': - self.forward(2) - else: - self.forward() - return '\n' - elif ch in '\u2028\u2029': - self.forward() - return ch - return '' - -#try: -# import psyco -# psyco.bind(Scanner) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/serializer.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/serializer.py deleted file mode 100644 index fe911e6..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/serializer.py +++ /dev/null @@ -1,111 +0,0 @@ - -__all__ = ['Serializer', 'SerializerError'] - -from .error import YAMLError -from .events import * -from .nodes import * - -class SerializerError(YAMLError): - pass - -class Serializer: - - ANCHOR_TEMPLATE = 'id%03d' - - def __init__(self, encoding=None, - explicit_start=None, explicit_end=None, version=None, tags=None): - self.use_encoding = encoding - self.use_explicit_start = explicit_start - self.use_explicit_end = explicit_end - self.use_version = version - self.use_tags = tags - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - self.closed = None - - def open(self): - if self.closed is None: - self.emit(StreamStartEvent(encoding=self.use_encoding)) - self.closed = False - elif self.closed: - raise SerializerError("serializer is closed") - else: - raise SerializerError("serializer is already opened") - - def close(self): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif not self.closed: - self.emit(StreamEndEvent()) - self.closed = True - - #def __del__(self): - # self.close() - - def serialize(self, node): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif self.closed: - raise SerializerError("serializer is closed") - self.emit(DocumentStartEvent(explicit=self.use_explicit_start, - version=self.use_version, tags=self.use_tags)) - self.anchor_node(node) - self.serialize_node(node, None, None) - self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - - def anchor_node(self, node): - if node in self.anchors: - if self.anchors[node] is None: - self.anchors[node] = self.generate_anchor(node) - else: - self.anchors[node] = None - if isinstance(node, SequenceNode): - for item in node.value: - self.anchor_node(item) - elif isinstance(node, MappingNode): - for key, value in node.value: - self.anchor_node(key) - self.anchor_node(value) - - def generate_anchor(self, node): - self.last_anchor_id += 1 - return self.ANCHOR_TEMPLATE % self.last_anchor_id - - def serialize_node(self, node, parent, index): - alias = self.anchors[node] - if node in self.serialized_nodes: - self.emit(AliasEvent(alias)) - else: - self.serialized_nodes[node] = True - self.descend_resolver(parent, index) - if isinstance(node, ScalarNode): - detected_tag = self.resolve(ScalarNode, node.value, (True, False)) - default_tag = self.resolve(ScalarNode, node.value, (False, True)) - implicit = (node.tag == detected_tag), (node.tag == default_tag) - self.emit(ScalarEvent(alias, node.tag, implicit, node.value, - style=node.style)) - elif isinstance(node, SequenceNode): - implicit = (node.tag - == self.resolve(SequenceNode, node.value, True)) - self.emit(SequenceStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - index = 0 - for item in node.value: - self.serialize_node(item, node, index) - index += 1 - self.emit(SequenceEndEvent()) - elif isinstance(node, MappingNode): - implicit = (node.tag - == self.resolve(MappingNode, node.value, True)) - self.emit(MappingStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - for key, value in node.value: - self.serialize_node(key, node, None) - self.serialize_node(value, node, key) - self.emit(MappingEndEvent()) - self.ascend_resolver() - diff --git a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/tokens.py b/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/tokens.py deleted file mode 100644 index 4d0b48a..0000000 --- a/libs/PyYAML-3.10/build/lib.win-amd64-3.3/yaml/tokens.py +++ /dev/null @@ -1,104 +0,0 @@ - -class Token(object): - def __init__(self, start_mark, end_mark): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in self.__dict__ - if not key.endswith('_mark')] - attributes.sort() - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -#class BOMToken(Token): -# id = '' - -class DirectiveToken(Token): - id = '' - def __init__(self, name, value, start_mark, end_mark): - self.name = name - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class DocumentStartToken(Token): - id = '' - -class DocumentEndToken(Token): - id = '' - -class StreamStartToken(Token): - id = '' - def __init__(self, start_mark=None, end_mark=None, - encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndToken(Token): - id = '' - -class BlockSequenceStartToken(Token): - id = '' - -class BlockMappingStartToken(Token): - id = '' - -class BlockEndToken(Token): - id = '' - -class FlowSequenceStartToken(Token): - id = '[' - -class FlowMappingStartToken(Token): - id = '{' - -class FlowSequenceEndToken(Token): - id = ']' - -class FlowMappingEndToken(Token): - id = '}' - -class KeyToken(Token): - id = '?' - -class ValueToken(Token): - id = ':' - -class BlockEntryToken(Token): - id = '-' - -class FlowEntryToken(Token): - id = ',' - -class AliasToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class AnchorToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class TagToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class ScalarToken(Token): - id = '' - def __init__(self, value, plain, start_mark, end_mark, style=None): - self.value = value - self.plain = plain - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - diff --git a/libs/PyYAML-3.10/build/temp.linux-x86_64-2.7/check_libyaml.c b/libs/PyYAML-3.10/build/temp.linux-x86_64-2.7/check_libyaml.c deleted file mode 100644 index a81709c..0000000 --- a/libs/PyYAML-3.10/build/temp.linux-x86_64-2.7/check_libyaml.c +++ /dev/null @@ -1,15 +0,0 @@ - -#include - -int main(void) { - yaml_parser_t parser; - yaml_emitter_t emitter; - - yaml_parser_initialize(&parser); - yaml_parser_delete(&parser); - - yaml_emitter_initialize(&emitter); - yaml_emitter_delete(&emitter); - - return 0; -} diff --git a/libs/PyYAML-3.10/build/temp.linux-x86_64-2.7/check_libyaml.out b/libs/PyYAML-3.10/build/temp.linux-x86_64-2.7/check_libyaml.out deleted file mode 100644 index 573541a..0000000 --- a/libs/PyYAML-3.10/build/temp.linux-x86_64-2.7/check_libyaml.out +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/libs/PyYAML-3.10/build/temp.win-amd64-3.3/Release/check_libyaml.c b/libs/PyYAML-3.10/build/temp.win-amd64-3.3/Release/check_libyaml.c deleted file mode 100644 index a81709c..0000000 --- a/libs/PyYAML-3.10/build/temp.win-amd64-3.3/Release/check_libyaml.c +++ /dev/null @@ -1,15 +0,0 @@ - -#include - -int main(void) { - yaml_parser_t parser; - yaml_emitter_t emitter; - - yaml_parser_initialize(&parser); - yaml_parser_delete(&parser); - - yaml_emitter_initialize(&emitter); - yaml_emitter_delete(&emitter); - - return 0; -} diff --git a/libs/PyYAML-3.10/build/temp.win-amd64-3.3/Release/check_libyaml.out b/libs/PyYAML-3.10/build/temp.win-amd64-3.3/Release/check_libyaml.out deleted file mode 100644 index 573541a..0000000 --- a/libs/PyYAML-3.10/build/temp.win-amd64-3.3/Release/check_libyaml.out +++ /dev/null @@ -1 +0,0 @@ -0 diff --git a/libs/PyYAML-3.10/examples/pygments-lexer/example.yaml b/libs/PyYAML-3.10/examples/pygments-lexer/example.yaml deleted file mode 100644 index 9c0ed9d..0000000 --- a/libs/PyYAML-3.10/examples/pygments-lexer/example.yaml +++ /dev/null @@ -1,302 +0,0 @@ - -# -# Examples from the Preview section of the YAML specification -# (http://yaml.org/spec/1.2/#Preview) -# - -# Sequence of scalars ---- -- Mark McGwire -- Sammy Sosa -- Ken Griffey - -# Mapping scalars to scalars ---- -hr: 65 # Home runs -avg: 0.278 # Batting average -rbi: 147 # Runs Batted In - -# Mapping scalars to sequences ---- -american: - - Boston Red Sox - - Detroit Tigers - - New York Yankees -national: - - New York Mets - - Chicago Cubs - - Atlanta Braves - -# Sequence of mappings ---- -- - name: Mark McGwire - hr: 65 - avg: 0.278 -- - name: Sammy Sosa - hr: 63 - avg: 0.288 - -# Sequence of sequences ---- -- [name , hr, avg ] -- [Mark McGwire, 65, 0.278] -- [Sammy Sosa , 63, 0.288] - -# Mapping of mappings ---- -Mark McGwire: {hr: 65, avg: 0.278} -Sammy Sosa: { - hr: 63, - avg: 0.288 - } - -# Two documents in a stream ---- # Ranking of 1998 home runs -- Mark McGwire -- Sammy Sosa -- Ken Griffey ---- # Team ranking -- Chicago Cubs -- St Louis Cardinals - -# Documents with the end indicator ---- -time: 20:03:20 -player: Sammy Sosa -action: strike (miss) -... ---- -time: 20:03:47 -player: Sammy Sosa -action: grand slam -... - -# Comments ---- -hr: # 1998 hr ranking - - Mark McGwire - - Sammy Sosa -rbi: - # 1998 rbi ranking - - Sammy Sosa - - Ken Griffey - -# Anchors and aliases ---- -hr: - - Mark McGwire - # Following node labeled SS - - &SS Sammy Sosa -rbi: - - *SS # Subsequent occurrence - - Ken Griffey - -# Mapping between sequences ---- -? - Detroit Tigers - - Chicago cubs -: - - 2001-07-23 -? [ New York Yankees, - Atlanta Braves ] -: [ 2001-07-02, 2001-08-12, - 2001-08-14 ] - -# Inline nested mapping ---- -# products purchased -- item : Super Hoop - quantity: 1 -- item : Basketball - quantity: 4 -- item : Big Shoes - quantity: 1 - -# Literal scalars ---- | # ASCII art - \//||\/|| - // || ||__ - -# Folded scalars ---- > - Mark McGwire's - year was crippled - by a knee injury. - -# Preserved indented block in a folded scalar ---- -> - Sammy Sosa completed another - fine season with great stats. - - 63 Home Runs - 0.288 Batting Average - - What a year! - -# Indentation determines scope ---- -name: Mark McGwire -accomplishment: > - Mark set a major league - home run record in 1998. -stats: | - 65 Home Runs - 0.278 Batting Average - -# Quoted scalars ---- -unicode: "Sosa did fine.\u263A" -control: "\b1998\t1999\t2000\n" -hex esc: "\x0d\x0a is \r\n" -single: '"Howdy!" he cried.' -quoted: ' # not a ''comment''.' -tie-fighter: '|\-*-/|' - -# Multi-line flow scalars ---- -plain: - This unquoted scalar - spans many lines. -quoted: "So does this - quoted scalar.\n" - -# Integers ---- -canonical: 12345 -decimal: +12_345 -sexagesimal: 3:25:45 -octal: 014 -hexadecimal: 0xC - -# Floating point ---- -canonical: 1.23015e+3 -exponential: 12.3015e+02 -sexagesimal: 20:30.15 -fixed: 1_230.15 -negative infinity: -.inf -not a number: .NaN - -# Miscellaneous ---- -null: ~ -true: boolean -false: boolean -string: '12345' - -# Timestamps ---- -canonical: 2001-12-15T02:59:43.1Z -iso8601: 2001-12-14t21:59:43.10-05:00 -spaced: 2001-12-14 21:59:43.10 -5 -date: 2002-12-14 - -# Various explicit tags ---- -not-date: !!str 2002-04-28 -picture: !!binary | - R0lGODlhDAAMAIQAAP//9/X - 17unp5WZmZgAAAOfn515eXv - Pz7Y6OjuDg4J+fn5OTk6enp - 56enmleECcgggoBADs= -application specific tag: !something | - The semantics of the tag - above may be different for - different documents. - -# Global tags -%TAG ! tag:clarkevans.com,2002: ---- !shape - # Use the ! handle for presenting - # tag:clarkevans.com,2002:circle -- !circle - center: &ORIGIN {x: 73, y: 129} - radius: 7 -- !line - start: *ORIGIN - finish: { x: 89, y: 102 } -- !label - start: *ORIGIN - color: 0xFFEEBB - text: Pretty vector drawing. - -# Unordered sets ---- !!set -# sets are represented as a -# mapping where each key is -# associated with the empty string -? Mark McGwire -? Sammy Sosa -? Ken Griff - -# Ordered mappings ---- !!omap -# ordered maps are represented as -# a sequence of mappings, with -# each mapping having one key -- Mark McGwire: 65 -- Sammy Sosa: 63 -- Ken Griffy: 58 - -# Full length example ---- ! -invoice: 34843 -date : 2001-01-23 -bill-to: &id001 - given : Chris - family : Dumars - address: - lines: | - 458 Walkman Dr. - Suite #292 - city : Royal Oak - state : MI - postal : 48046 -ship-to: *id001 -product: - - sku : BL394D - quantity : 4 - description : Basketball - price : 450.00 - - sku : BL4438H - quantity : 1 - description : Super Hoop - price : 2392.00 -tax : 251.42 -total: 4443.52 -comments: - Late afternoon is best. - Backup contact is Nancy - Billsmer @ 338-4338. - -# Another full-length example ---- -Time: 2001-11-23 15:01:42 -5 -User: ed -Warning: - This is an error message - for the log file ---- -Time: 2001-11-23 15:02:31 -5 -User: ed -Warning: - A slightly different error - message. ---- -Date: 2001-11-23 15:03:17 -5 -User: ed -Fatal: - Unknown variable "bar" -Stack: - - file: TopClass.py - line: 23 - code: | - x = MoreObject("345\n") - - file: MoreClass.py - line: 58 - code: |- - foo = bar - diff --git a/libs/PyYAML-3.10/examples/pygments-lexer/yaml.py b/libs/PyYAML-3.10/examples/pygments-lexer/yaml.py deleted file mode 100644 index 1ce9dac..0000000 --- a/libs/PyYAML-3.10/examples/pygments-lexer/yaml.py +++ /dev/null @@ -1,431 +0,0 @@ - -""" -yaml.py - -Lexer for YAML, a human-friendly data serialization language -(http://yaml.org/). - -Written by Kirill Simonov . - -License: Whatever suitable for inclusion into the Pygments package. -""" - -from pygments.lexer import \ - ExtendedRegexLexer, LexerContext, include, bygroups -from pygments.token import \ - Text, Comment, Punctuation, Name, Literal - -__all__ = ['YAMLLexer'] - - -class YAMLLexerContext(LexerContext): - """Indentation context for the YAML lexer.""" - - def __init__(self, *args, **kwds): - super(YAMLLexerContext, self).__init__(*args, **kwds) - self.indent_stack = [] - self.indent = -1 - self.next_indent = 0 - self.block_scalar_indent = None - - -def something(TokenClass): - """Do not produce empty tokens.""" - def callback(lexer, match, context): - text = match.group() - if not text: - return - yield match.start(), TokenClass, text - context.pos = match.end() - return callback - -def reset_indent(TokenClass): - """Reset the indentation levels.""" - def callback(lexer, match, context): - text = match.group() - context.indent_stack = [] - context.indent = -1 - context.next_indent = 0 - context.block_scalar_indent = None - yield match.start(), TokenClass, text - context.pos = match.end() - return callback - -def save_indent(TokenClass, start=False): - """Save a possible indentation level.""" - def callback(lexer, match, context): - text = match.group() - extra = '' - if start: - context.next_indent = len(text) - if context.next_indent < context.indent: - while context.next_indent < context.indent: - context.indent = context.indent_stack.pop() - if context.next_indent > context.indent: - extra = text[context.indent:] - text = text[:context.indent] - else: - context.next_indent += len(text) - if text: - yield match.start(), TokenClass, text - if extra: - yield match.start()+len(text), TokenClass.Error, extra - context.pos = match.end() - return callback - -def set_indent(TokenClass, implicit=False): - """Set the previously saved indentation level.""" - def callback(lexer, match, context): - text = match.group() - if context.indent < context.next_indent: - context.indent_stack.append(context.indent) - context.indent = context.next_indent - if not implicit: - context.next_indent += len(text) - yield match.start(), TokenClass, text - context.pos = match.end() - return callback - -def set_block_scalar_indent(TokenClass): - """Set an explicit indentation level for a block scalar.""" - def callback(lexer, match, context): - text = match.group() - context.block_scalar_indent = None - if not text: - return - increment = match.group(1) - if increment: - current_indent = max(context.indent, 0) - increment = int(increment) - context.block_scalar_indent = current_indent + increment - if text: - yield match.start(), TokenClass, text - context.pos = match.end() - return callback - -def parse_block_scalar_empty_line(IndentTokenClass, ContentTokenClass): - """Process an empty line in a block scalar.""" - def callback(lexer, match, context): - text = match.group() - if (context.block_scalar_indent is None or - len(text) <= context.block_scalar_indent): - if text: - yield match.start(), IndentTokenClass, text - else: - indentation = text[:context.block_scalar_indent] - content = text[context.block_scalar_indent:] - yield match.start(), IndentTokenClass, indentation - yield (match.start()+context.block_scalar_indent, - ContentTokenClass, content) - context.pos = match.end() - return callback - -def parse_block_scalar_indent(TokenClass): - """Process indentation spaces in a block scalar.""" - def callback(lexer, match, context): - text = match.group() - if context.block_scalar_indent is None: - if len(text) <= max(context.indent, 0): - context.stack.pop() - context.stack.pop() - return - context.block_scalar_indent = len(text) - else: - if len(text) < context.block_scalar_indent: - context.stack.pop() - context.stack.pop() - return - if text: - yield match.start(), TokenClass, text - context.pos = match.end() - return callback - -def parse_plain_scalar_indent(TokenClass): - """Process indentation spaces in a plain scalar.""" - def callback(lexer, match, context): - text = match.group() - if len(text) <= context.indent: - context.stack.pop() - context.stack.pop() - return - if text: - yield match.start(), TokenClass, text - context.pos = match.end() - return callback - - -class YAMLLexer(ExtendedRegexLexer): - """Lexer for the YAML language.""" - - name = 'YAML' - aliases = ['yaml'] - filenames = ['*.yaml', '*.yml'] - mimetypes = ['text/x-yaml'] - - tokens = { - - # the root rules - 'root': [ - # ignored whitespaces - (r'[ ]+(?=#|$)', Text.Blank), - # line breaks - (r'\n+', Text.Break), - # a comment - (r'#[^\n]*', Comment.Single), - # the '%YAML' directive - (r'^%YAML(?=[ ]|$)', reset_indent(Name.Directive), - 'yaml-directive'), - # the %TAG directive - (r'^%TAG(?=[ ]|$)', reset_indent(Name.Directive), - 'tag-directive'), - # document start and document end indicators - (r'^(?:---|\.\.\.)(?=[ ]|$)', - reset_indent(Punctuation.Document), 'block-line'), - # indentation spaces - (r'[ ]*(?![ \t\n\r\f\v]|$)', - save_indent(Text.Indent, start=True), - ('block-line', 'indentation')), - ], - - # trailing whitespaces after directives or a block scalar indicator - 'ignored-line': [ - # ignored whitespaces - (r'[ ]+(?=#|$)', Text.Blank), - # a comment - (r'#[^\n]*', Comment.Single), - # line break - (r'\n', Text.Break, '#pop:2'), - ], - - # the %YAML directive - 'yaml-directive': [ - # the version number - (r'([ ]+)([0-9]+\.[0-9]+)', - bygroups(Text.Blank, Literal.Version), 'ignored-line'), - ], - - # the %YAG directive - 'tag-directive': [ - # a tag handle and the corresponding prefix - (r'([ ]+)(!|![0-9A-Za-z_-]*!)' - r'([ ]+)(!|!?[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)', - bygroups(Text.Blank, Name.Type, Text.Blank, Name.Type), - 'ignored-line'), - ], - - # block scalar indicators and indentation spaces - 'indentation': [ - # trailing whitespaces are ignored - (r'[ ]*$', something(Text.Blank), '#pop:2'), - # whitespaces preceeding block collection indicators - (r'[ ]+(?=[?:-](?:[ ]|$))', save_indent(Text.Indent)), - # block collection indicators - (r'[?:-](?=[ ]|$)', set_indent(Punctuation.Indicator)), - # the beginning a block line - (r'[ ]*', save_indent(Text.Indent), '#pop'), - ], - - # an indented line in the block context - 'block-line': [ - # the line end - (r'[ ]*(?=#|$)', something(Text.Blank), '#pop'), - # whitespaces separating tokens - (r'[ ]+', Text.Blank), - # tags, anchors and aliases, - include('descriptors'), - # block collections and scalars - include('block-nodes'), - # flow collections and quoted scalars - include('flow-nodes'), - # a plain scalar - (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`-]|[?:-][^ \t\n\r\f\v])', - something(Literal.Scalar.Plain), - 'plain-scalar-in-block-context'), - ], - - # tags, anchors, aliases - 'descriptors' : [ - # a full-form tag - (r'!<[0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+>', Name.Type), - # a tag in the form '!', '!suffix' or '!handle!suffix' - (r'!(?:[0-9A-Za-z_-]+)?' - r'(?:![0-9A-Za-z;/?:@&=+$,_.!~*\'()\[\]%-]+)?', Name.Type), - # an anchor - (r'&[0-9A-Za-z_-]+', Name.Anchor), - # an alias - (r'\*[0-9A-Za-z_-]+', Name.Alias), - ], - - # block collections and scalars - 'block-nodes': [ - # implicit key - (r':(?=[ ]|$)', set_indent(Punctuation.Indicator, implicit=True)), - # literal and folded scalars - (r'[|>]', Punctuation.Indicator, - ('block-scalar-content', 'block-scalar-header')), - ], - - # flow collections and quoted scalars - 'flow-nodes': [ - # a flow sequence - (r'\[', Punctuation.Indicator, 'flow-sequence'), - # a flow mapping - (r'\{', Punctuation.Indicator, 'flow-mapping'), - # a single-quoted scalar - (r'\'', Literal.Scalar.Flow.Quote, 'single-quoted-scalar'), - # a double-quoted scalar - (r'\"', Literal.Scalar.Flow.Quote, 'double-quoted-scalar'), - ], - - # the content of a flow collection - 'flow-collection': [ - # whitespaces - (r'[ ]+', Text.Blank), - # line breaks - (r'\n+', Text.Break), - # a comment - (r'#[^\n]*', Comment.Single), - # simple indicators - (r'[?:,]', Punctuation.Indicator), - # tags, anchors and aliases - include('descriptors'), - # nested collections and quoted scalars - include('flow-nodes'), - # a plain scalar - (r'(?=[^ \t\n\r\f\v?:,\[\]{}#&*!|>\'"%@`])', - something(Literal.Scalar.Plain), - 'plain-scalar-in-flow-context'), - ], - - # a flow sequence indicated by '[' and ']' - 'flow-sequence': [ - # include flow collection rules - include('flow-collection'), - # the closing indicator - (r'\]', Punctuation.Indicator, '#pop'), - ], - - # a flow mapping indicated by '{' and '}' - 'flow-mapping': [ - # include flow collection rules - include('flow-collection'), - # the closing indicator - (r'\}', Punctuation.Indicator, '#pop'), - ], - - # block scalar lines - 'block-scalar-content': [ - # line break - (r'\n', Text.Break), - # empty line - (r'^[ ]+$', - parse_block_scalar_empty_line(Text.Indent, - Literal.Scalar.Block)), - # indentation spaces (we may leave the state here) - (r'^[ ]*', parse_block_scalar_indent(Text.Indent)), - # line content - (r'[^\n\r\f\v]+', Literal.Scalar.Block), - ], - - # the content of a literal or folded scalar - 'block-scalar-header': [ - # indentation indicator followed by chomping flag - (r'([1-9])?[+-]?(?=[ ]|$)', - set_block_scalar_indent(Punctuation.Indicator), - 'ignored-line'), - # chomping flag followed by indentation indicator - (r'[+-]?([1-9])?(?=[ ]|$)', - set_block_scalar_indent(Punctuation.Indicator), - 'ignored-line'), - ], - - # ignored and regular whitespaces in quoted scalars - 'quoted-scalar-whitespaces': [ - # leading and trailing whitespaces are ignored - (r'^[ ]+|[ ]+$', Text.Blank), - # line breaks are ignored - (r'\n+', Text.Break), - # other whitespaces are a part of the value - (r'[ ]+', Literal.Scalar.Flow), - ], - - # single-quoted scalars - 'single-quoted-scalar': [ - # include whitespace and line break rules - include('quoted-scalar-whitespaces'), - # escaping of the quote character - (r'\'\'', Literal.Scalar.Flow.Escape), - # regular non-whitespace characters - (r'[^ \t\n\r\f\v\']+', Literal.Scalar.Flow), - # the closing quote - (r'\'', Literal.Scalar.Flow.Quote, '#pop'), - ], - - # double-quoted scalars - 'double-quoted-scalar': [ - # include whitespace and line break rules - include('quoted-scalar-whitespaces'), - # escaping of special characters - (r'\\[0abt\tn\nvfre "\\N_LP]', Literal.Scalar.Flow.Escape), - # escape codes - (r'\\(?:x[0-9A-Fa-f]{2}|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})', - Literal.Scalar.Flow.Escape), - # regular non-whitespace characters - (r'[^ \t\n\r\f\v\"\\]+', Literal.Scalar.Flow), - # the closing quote - (r'"', Literal.Scalar.Flow.Quote, '#pop'), - ], - - # the beginning of a new line while scanning a plain scalar - 'plain-scalar-in-block-context-new-line': [ - # empty lines - (r'^[ ]+$', Text.Blank), - # line breaks - (r'\n+', Text.Break), - # document start and document end indicators - (r'^(?=---|\.\.\.)', something(Punctuation.Document), '#pop:3'), - # indentation spaces (we may leave the block line state here) - (r'^[ ]*', parse_plain_scalar_indent(Text.Indent), '#pop'), - ], - - # a plain scalar in the block context - 'plain-scalar-in-block-context': [ - # the scalar ends with the ':' indicator - (r'[ ]*(?=:[ ]|:$)', something(Text.Blank), '#pop'), - # the scalar ends with whitespaces followed by a comment - (r'[ ]+(?=#)', Text.Blank, '#pop'), - # trailing whitespaces are ignored - (r'[ ]+$', Text.Blank), - # line breaks are ignored - (r'\n+', Text.Break, 'plain-scalar-in-block-context-new-line'), - # other whitespaces are a part of the value - (r'[ ]+', Literal.Scalar.Plain), - # regular non-whitespace characters - (r'(?::(?![ \t\n\r\f\v])|[^ \t\n\r\f\v:])+', - Literal.Scalar.Plain), - ], - - # a plain scalar is the flow context - 'plain-scalar-in-flow-context': [ - # the scalar ends with an indicator character - (r'[ ]*(?=[,:?\[\]{}])', something(Text.Blank), '#pop'), - # the scalar ends with a comment - (r'[ ]+(?=#)', Text.Blank, '#pop'), - # leading and trailing whitespaces are ignored - (r'^[ ]+|[ ]+$', Text.Blank), - # line breaks are ignored - (r'\n+', Text.Break), - # other whitespaces are a part of the value - (r'[ ]+', Literal.Scalar.Plain), - # regular non-whitespace characters - (r'[^ \t\n\r\f\v,:?\[\]{}]+', Literal.Scalar.Plain), - ], - - } - - def get_tokens_unprocessed(self, text=None, context=None): - if context is None: - context = YAMLLexerContext(text, 0) - return super(YAMLLexer, self).get_tokens_unprocessed(text, context) - - diff --git a/libs/PyYAML-3.10/examples/yaml-highlight/yaml_hl.cfg b/libs/PyYAML-3.10/examples/yaml-highlight/yaml_hl.cfg deleted file mode 100644 index 69bb847..0000000 --- a/libs/PyYAML-3.10/examples/yaml-highlight/yaml_hl.cfg +++ /dev/null @@ -1,115 +0,0 @@ -%YAML 1.1 ---- - -ascii: - - header: "\e[0;1;30;40m" - - footer: "\e[0m" - - tokens: - stream-start: - stream-end: - directive: { start: "\e[35m", end: "\e[0;1;30;40m" } - document-start: { start: "\e[35m", end: "\e[0;1;30;40m" } - document-end: { start: "\e[35m", end: "\e[0;1;30;40m" } - block-sequence-start: - block-mapping-start: - block-end: - flow-sequence-start: { start: "\e[33m", end: "\e[0;1;30;40m" } - flow-mapping-start: { start: "\e[33m", end: "\e[0;1;30;40m" } - flow-sequence-end: { start: "\e[33m", end: "\e[0;1;30;40m" } - flow-mapping-end: { start: "\e[33m", end: "\e[0;1;30;40m" } - key: { start: "\e[33m", end: "\e[0;1;30;40m" } - value: { start: "\e[33m", end: "\e[0;1;30;40m" } - block-entry: { start: "\e[33m", end: "\e[0;1;30;40m" } - flow-entry: { start: "\e[33m", end: "\e[0;1;30;40m" } - alias: { start: "\e[32m", end: "\e[0;1;30;40m" } - anchor: { start: "\e[32m", end: "\e[0;1;30;40m" } - tag: { start: "\e[32m", end: "\e[0;1;30;40m" } - scalar: { start: "\e[36m", end: "\e[0;1;30;40m" } - - replaces: - - "\r\n": "\n" - - "\r": "\n" - - "\n": "\n" - - "\x85": "\n" - - "\u2028": "\n" - - "\u2029": "\n" - -html: &html - - tokens: - stream-start: - stream-end: - directive: { start: , end: } - document-start: { start: , end: } - document-end: { start: , end: } - block-sequence-start: - block-mapping-start: - block-end: - flow-sequence-start: { start: , end: } - flow-mapping-start: { start: , end: } - flow-sequence-end: { start: , end: } - flow-mapping-end: { start: , end: } - key: { start: , end: } - value: { start: , end: } - block-entry: { start: , end: } - flow-entry: { start: , end: } - alias: { start: , end: } - anchor: { start: , end: } - tag: { start: , end: } - scalar: { start: , end: } - - events: - stream-start: { start:
 }
-        stream-end:     { end: 
} - document-start: { start: } - document-end: { end: } - sequence-start: { start: } - sequence-end: { end: } - mapping-start: { start: } - mapping-end: { end: } - scalar: { start: , end: } - - replaces: - - "\r\n": "\n" - - "\r": "\n" - - "\n": "\n" - - "\x85": "\n" - - "\u2028": "\n" - - "\u2029": "\n" - - "&": "&" - - "<": "<" - - ">": ">" - -html-page: - - header: | - - - A YAML stream - - - - footer: | - - - - <<: *html - - -# vim: ft=yaml diff --git a/libs/PyYAML-3.10/examples/yaml-highlight/yaml_hl.py b/libs/PyYAML-3.10/examples/yaml-highlight/yaml_hl.py deleted file mode 100644 index d6f7bf4..0000000 --- a/libs/PyYAML-3.10/examples/yaml-highlight/yaml_hl.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/python - -import yaml, codecs, sys, os.path, optparse - -class Style: - - def __init__(self, header=None, footer=None, - tokens=None, events=None, replaces=None): - self.header = header - self.footer = footer - self.replaces = replaces - self.substitutions = {} - for domain, Class in [(tokens, 'Token'), (events, 'Event')]: - if not domain: - continue - for key in domain: - name = ''.join([part.capitalize() for part in key.split('-')]) - cls = getattr(yaml, '%s%s' % (name, Class)) - value = domain[key] - if not value: - continue - start = value.get('start') - end = value.get('end') - if start: - self.substitutions[cls, -1] = start - if end: - self.substitutions[cls, +1] = end - - def __setstate__(self, state): - self.__init__(**state) - -yaml.add_path_resolver(u'tag:yaml.org,2002:python/object:__main__.Style', - [None], dict) -yaml.add_path_resolver(u'tag:yaml.org,2002:pairs', - [None, u'replaces'], list) - -class YAMLHighlight: - - def __init__(self, options): - config = yaml.load(file(options.config, 'rb').read()) - self.style = config[options.style] - if options.input: - self.input = file(options.input, 'rb') - else: - self.input = sys.stdin - if options.output: - self.output = file(options.output, 'wb') - else: - self.output = sys.stdout - - def highlight(self): - input = self.input.read() - if input.startswith(codecs.BOM_UTF16_LE): - input = unicode(input, 'utf-16-le') - elif input.startswith(codecs.BOM_UTF16_BE): - input = unicode(input, 'utf-16-be') - else: - input = unicode(input, 'utf-8') - substitutions = self.style.substitutions - tokens = yaml.scan(input) - events = yaml.parse(input) - markers = [] - number = 0 - for token in tokens: - number += 1 - if token.start_mark.index != token.end_mark.index: - cls = token.__class__ - if (cls, -1) in substitutions: - markers.append([token.start_mark.index, +2, number, substitutions[cls, -1]]) - if (cls, +1) in substitutions: - markers.append([token.end_mark.index, -2, number, substitutions[cls, +1]]) - number = 0 - for event in events: - number += 1 - cls = event.__class__ - if (cls, -1) in substitutions: - markers.append([event.start_mark.index, +1, number, substitutions[cls, -1]]) - if (cls, +1) in substitutions: - markers.append([event.end_mark.index, -1, number, substitutions[cls, +1]]) - markers.sort() - markers.reverse() - chunks = [] - position = len(input) - for index, weight1, weight2, substitution in markers: - if index < position: - chunk = input[index:position] - for substring, replacement in self.style.replaces: - chunk = chunk.replace(substring, replacement) - chunks.append(chunk) - position = index - chunks.append(substitution) - chunks.reverse() - result = u''.join(chunks) - if self.style.header: - self.output.write(self.style.header) - self.output.write(result.encode('utf-8')) - if self.style.footer: - self.output.write(self.style.footer) - -if __name__ == '__main__': - parser = optparse.OptionParser() - parser.add_option('-s', '--style', dest='style', default='ascii', - help="specify the highlighting style", metavar='STYLE') - parser.add_option('-c', '--config', dest='config', - default=os.path.join(os.path.dirname(sys.argv[0]), 'yaml_hl.cfg'), - help="set an alternative configuration file", metavar='CONFIG') - parser.add_option('-i', '--input', dest='input', default=None, - help="set the input file (default: stdin)", metavar='FILE') - parser.add_option('-o', '--output', dest='output', default=None, - help="set the output file (default: stdout)", metavar='FILE') - (options, args) = parser.parse_args() - hl = YAMLHighlight(options) - hl.highlight() - diff --git a/libs/PyYAML-3.10/ext/_yaml.c b/libs/PyYAML-3.10/ext/_yaml.c deleted file mode 100644 index 71d5069..0000000 --- a/libs/PyYAML-3.10/ext/_yaml.c +++ /dev/null @@ -1,21417 +0,0 @@ -/* Generated by Cython 0.14.1 on Mon May 30 00:24:10 2011 */ - -#define PY_SSIZE_T_CLEAN -#include "Python.h" -#ifndef Py_PYTHON_H - #error Python headers needed to compile C extensions, please install development version of Python. -#else - -#include /* For offsetof */ -#ifndef offsetof -#define offsetof(type, member) ( (size_t) & ((type*)0) -> member ) -#endif - -#if !defined(WIN32) && !defined(MS_WINDOWS) - #ifndef __stdcall - #define __stdcall - #endif - #ifndef __cdecl - #define __cdecl - #endif - #ifndef __fastcall - #define __fastcall - #endif -#endif - -#ifndef DL_IMPORT - #define DL_IMPORT(t) t -#endif -#ifndef DL_EXPORT - #define DL_EXPORT(t) t -#endif - -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif - -#if PY_VERSION_HEX < 0x02040000 - #define METH_COEXIST 0 - #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type) - #define PyDict_Contains(d,o) PySequence_Contains(d,o) -#endif - -#if PY_VERSION_HEX < 0x02050000 - typedef int Py_ssize_t; - #define PY_SSIZE_T_MAX INT_MAX - #define PY_SSIZE_T_MIN INT_MIN - #define PY_FORMAT_SIZE_T "" - #define PyInt_FromSsize_t(z) PyInt_FromLong(z) - #define PyInt_AsSsize_t(o) PyInt_AsLong(o) - #define PyNumber_Index(o) PyNumber_Int(o) - #define PyIndex_Check(o) PyNumber_Check(o) - #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message) -#endif - -#if PY_VERSION_HEX < 0x02060000 - #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt) - #define Py_TYPE(ob) (((PyObject*)(ob))->ob_type) - #define Py_SIZE(ob) (((PyVarObject*)(ob))->ob_size) - #define PyVarObject_HEAD_INIT(type, size) \ - PyObject_HEAD_INIT(type) size, - #define PyType_Modified(t) - - typedef struct { - void *buf; - PyObject *obj; - Py_ssize_t len; - Py_ssize_t itemsize; - int readonly; - int ndim; - char *format; - Py_ssize_t *shape; - Py_ssize_t *strides; - Py_ssize_t *suboffsets; - void *internal; - } Py_buffer; - - #define PyBUF_SIMPLE 0 - #define PyBUF_WRITABLE 0x0001 - #define PyBUF_FORMAT 0x0004 - #define PyBUF_ND 0x0008 - #define PyBUF_STRIDES (0x0010 | PyBUF_ND) - #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES) - #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES) - #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES) - #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES) - -#endif - -#if PY_MAJOR_VERSION < 3 - #define __Pyx_BUILTIN_MODULE_NAME "__builtin__" -#else - #define __Pyx_BUILTIN_MODULE_NAME "builtins" -#endif - -#if PY_MAJOR_VERSION >= 3 - #define Py_TPFLAGS_CHECKTYPES 0 - #define Py_TPFLAGS_HAVE_INDEX 0 -#endif - -#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3) - #define Py_TPFLAGS_HAVE_NEWBUFFER 0 -#endif - -#if PY_MAJOR_VERSION >= 3 - #define PyBaseString_Type PyUnicode_Type - #define PyStringObject PyUnicodeObject - #define PyString_Type PyUnicode_Type - #define PyString_Check PyUnicode_Check - #define PyString_CheckExact PyUnicode_CheckExact -#endif - -#if PY_VERSION_HEX < 0x02060000 - #define PyBytesObject PyStringObject - #define PyBytes_Type PyString_Type - #define PyBytes_Check PyString_Check - #define PyBytes_CheckExact PyString_CheckExact - #define PyBytes_FromString PyString_FromString - #define PyBytes_FromStringAndSize PyString_FromStringAndSize - #define PyBytes_FromFormat PyString_FromFormat - #define PyBytes_DecodeEscape PyString_DecodeEscape - #define PyBytes_AsString PyString_AsString - #define PyBytes_AsStringAndSize PyString_AsStringAndSize - #define PyBytes_Size PyString_Size - #define PyBytes_AS_STRING PyString_AS_STRING - #define PyBytes_GET_SIZE PyString_GET_SIZE - #define PyBytes_Repr PyString_Repr - #define PyBytes_Concat PyString_Concat - #define PyBytes_ConcatAndDel PyString_ConcatAndDel -#endif - -#if PY_VERSION_HEX < 0x02060000 - #define PySet_Check(obj) PyObject_TypeCheck(obj, &PySet_Type) - #define PyFrozenSet_Check(obj) PyObject_TypeCheck(obj, &PyFrozenSet_Type) -#endif -#ifndef PySet_CheckExact - #define PySet_CheckExact(obj) (Py_TYPE(obj) == &PySet_Type) -#endif - -#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type) - -#if PY_MAJOR_VERSION >= 3 - #define PyIntObject PyLongObject - #define PyInt_Type PyLong_Type - #define PyInt_Check(op) PyLong_Check(op) - #define PyInt_CheckExact(op) PyLong_CheckExact(op) - #define PyInt_FromString PyLong_FromString - #define PyInt_FromUnicode PyLong_FromUnicode - #define PyInt_FromLong PyLong_FromLong - #define PyInt_FromSize_t PyLong_FromSize_t - #define PyInt_FromSsize_t PyLong_FromSsize_t - #define PyInt_AsLong PyLong_AsLong - #define PyInt_AS_LONG PyLong_AS_LONG - #define PyInt_AsSsize_t PyLong_AsSsize_t - #define PyInt_AsUnsignedLongMask PyLong_AsUnsignedLongMask - #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask -#endif - -#if PY_MAJOR_VERSION >= 3 - #define PyBoolObject PyLongObject -#endif - - -#if PY_MAJOR_VERSION >= 3 - #define __Pyx_PyNumber_Divide(x,y) PyNumber_TrueDivide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceTrueDivide(x,y) -#else - #define __Pyx_PyNumber_Divide(x,y) PyNumber_Divide(x,y) - #define __Pyx_PyNumber_InPlaceDivide(x,y) PyNumber_InPlaceDivide(x,y) -#endif - -#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300) - #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b) - #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value) - #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b) -#else - #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0))) - #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1))) - #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \ - (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \ - (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \ - (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1))) -#endif - -#if PY_MAJOR_VERSION >= 3 - #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func)) -#endif - -#if PY_VERSION_HEX < 0x02050000 - #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),((char *)(n))) - #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a)) - #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),((char *)(n))) -#else - #define __Pyx_GetAttrString(o,n) PyObject_GetAttrString((o),(n)) - #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a)) - #define __Pyx_DelAttrString(o,n) PyObject_DelAttrString((o),(n)) -#endif - -#if PY_VERSION_HEX < 0x02050000 - #define __Pyx_NAMESTR(n) ((char *)(n)) - #define __Pyx_DOCSTR(n) ((char *)(n)) -#else - #define __Pyx_NAMESTR(n) (n) - #define __Pyx_DOCSTR(n) (n) -#endif - -#ifdef __cplusplus -#define __PYX_EXTERN_C extern "C" -#else -#define __PYX_EXTERN_C extern -#endif - -#if defined(WIN32) || defined(MS_WINDOWS) -#define _USE_MATH_DEFINES -#endif -#include -#define __PYX_HAVE_API___yaml -#include "_yaml.h" - -#ifdef PYREX_WITHOUT_ASSERTIONS -#define CYTHON_WITHOUT_ASSERTIONS -#endif - - -/* inline attribute */ -#ifndef CYTHON_INLINE - #if defined(__GNUC__) - #define CYTHON_INLINE __inline__ - #elif defined(_MSC_VER) - #define CYTHON_INLINE __inline - #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L - #define CYTHON_INLINE inline - #else - #define CYTHON_INLINE - #endif -#endif - -/* unused attribute */ -#ifndef CYTHON_UNUSED -# if defined(__GNUC__) -# if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4)) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -# elif defined(__ICC) || defined(__INTEL_COMPILER) -# define CYTHON_UNUSED __attribute__ ((__unused__)) -# else -# define CYTHON_UNUSED -# endif -#endif - -typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/ - - -/* Type Conversion Predeclarations */ - -#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s) -#define __Pyx_PyBytes_AsUString(s) ((unsigned char*) PyBytes_AsString(s)) - -#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False)) -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*); -static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x); - -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*); -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t); -static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*); - -#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x)) - - -#ifdef __GNUC__ -/* Test for GCC > 2.95 */ -#if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95)) -#define likely(x) __builtin_expect(!!(x), 1) -#define unlikely(x) __builtin_expect(!!(x), 0) -#else /* __GNUC__ > 2 ... */ -#define likely(x) (x) -#define unlikely(x) (x) -#endif /* __GNUC__ > 2 ... */ -#else /* __GNUC__ */ -#define likely(x) (x) -#define unlikely(x) (x) -#endif /* __GNUC__ */ - -static PyObject *__pyx_m; -static PyObject *__pyx_b; -static PyObject *__pyx_empty_tuple; -static PyObject *__pyx_empty_bytes; -static int __pyx_lineno; -static int __pyx_clineno = 0; -static const char * __pyx_cfilenm= __FILE__; -static const char *__pyx_filename; - - -static const char *__pyx_f[] = { - "_yaml.pyx", -}; - -/* Type declarations */ - -/* "_yaml.pyx":64 - * MappingNode = yaml.nodes.MappingNode - * - * cdef class Mark: # <<<<<<<<<<<<<< - * cdef readonly object name - * cdef readonly int index - */ - -struct __pyx_obj_5_yaml_Mark { - PyObject_HEAD - PyObject *name; - int index; - int line; - int column; - PyObject *buffer; - PyObject *pointer; -}; - -/* "_yaml.pyx":247 - * # self.style = style - * - * cdef class CParser: # <<<<<<<<<<<<<< - * - * cdef yaml_parser_t parser - */ - -struct __pyx_obj_5_yaml_CParser { - PyObject_HEAD - struct __pyx_vtabstruct_5_yaml_CParser *__pyx_vtab; - yaml_parser_t parser; - yaml_event_t parsed_event; - PyObject *stream; - PyObject *stream_name; - PyObject *current_token; - PyObject *current_event; - PyObject *anchors; - PyObject *stream_cache; - int stream_cache_len; - int stream_cache_pos; - int unicode_source; -}; - -/* "_yaml.pyx":935 - * return 1 - * - * cdef class CEmitter: # <<<<<<<<<<<<<< - * - * cdef yaml_emitter_t emitter - */ - -struct __pyx_obj_5_yaml_CEmitter { - PyObject_HEAD - struct __pyx_vtabstruct_5_yaml_CEmitter *__pyx_vtab; - yaml_emitter_t emitter; - PyObject *stream; - int document_start_implicit; - int document_end_implicit; - PyObject *use_version; - PyObject *use_tags; - PyObject *serialized_nodes; - PyObject *anchors; - int last_alias_id; - int closed; - int dump_unicode; - PyObject *use_encoding; -}; - - -/* "_yaml.pyx":247 - * # self.style = style - * - * cdef class CParser: # <<<<<<<<<<<<<< - * - * cdef yaml_parser_t parser - */ - -struct __pyx_vtabstruct_5_yaml_CParser { - PyObject *(*_parser_error)(struct __pyx_obj_5_yaml_CParser *); - PyObject *(*_scan)(struct __pyx_obj_5_yaml_CParser *); - PyObject *(*_token_to_object)(struct __pyx_obj_5_yaml_CParser *, yaml_token_t *); - PyObject *(*_parse)(struct __pyx_obj_5_yaml_CParser *); - PyObject *(*_event_to_object)(struct __pyx_obj_5_yaml_CParser *, yaml_event_t *); - PyObject *(*_compose_document)(struct __pyx_obj_5_yaml_CParser *); - PyObject *(*_compose_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *, PyObject *); - PyObject *(*_compose_scalar_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *); - PyObject *(*_compose_sequence_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *); - PyObject *(*_compose_mapping_node)(struct __pyx_obj_5_yaml_CParser *, PyObject *); - int (*_parse_next_event)(struct __pyx_obj_5_yaml_CParser *); -}; -static struct __pyx_vtabstruct_5_yaml_CParser *__pyx_vtabptr_5_yaml_CParser; - - -/* "_yaml.pyx":935 - * return 1 - * - * cdef class CEmitter: # <<<<<<<<<<<<<< - * - * cdef yaml_emitter_t emitter - */ - -struct __pyx_vtabstruct_5_yaml_CEmitter { - PyObject *(*_emitter_error)(struct __pyx_obj_5_yaml_CEmitter *); - int (*_object_to_event)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, yaml_event_t *); - int (*_anchor_node)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *); - int (*_serialize_node)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, PyObject *, PyObject *); -}; -static struct __pyx_vtabstruct_5_yaml_CEmitter *__pyx_vtabptr_5_yaml_CEmitter; - -#ifndef CYTHON_REFNANNY - #define CYTHON_REFNANNY 0 -#endif - -#if CYTHON_REFNANNY - typedef struct { - void (*INCREF)(void*, PyObject*, int); - void (*DECREF)(void*, PyObject*, int); - void (*GOTREF)(void*, PyObject*, int); - void (*GIVEREF)(void*, PyObject*, int); - void* (*SetupContext)(const char*, int, const char*); - void (*FinishContext)(void**); - } __Pyx_RefNannyAPIStruct; - static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL; - static __Pyx_RefNannyAPIStruct * __Pyx_RefNannyImportAPI(const char *modname) { - PyObject *m = NULL, *p = NULL; - void *r = NULL; - m = PyImport_ImportModule((char *)modname); - if (!m) goto end; - p = PyObject_GetAttrString(m, (char *)"RefNannyAPI"); - if (!p) goto end; - r = PyLong_AsVoidPtr(p); - end: - Py_XDECREF(p); - Py_XDECREF(m); - return (__Pyx_RefNannyAPIStruct *)r; - } - #define __Pyx_RefNannySetupContext(name) void *__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__) - #define __Pyx_RefNannyFinishContext() __Pyx_RefNanny->FinishContext(&__pyx_refnanny) - #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__) - #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r);} } while(0) -#else - #define __Pyx_RefNannySetupContext(name) - #define __Pyx_RefNannyFinishContext() - #define __Pyx_INCREF(r) Py_INCREF(r) - #define __Pyx_DECREF(r) Py_DECREF(r) - #define __Pyx_GOTREF(r) - #define __Pyx_GIVEREF(r) - #define __Pyx_XDECREF(r) Py_XDECREF(r) -#endif /* CYTHON_REFNANNY */ -#define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);} } while(0) -#define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r);} } while(0) - -static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/ - -static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact, - Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/ - -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, PyObject* kw_name); /*proto*/ - -static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[], PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args, const char* function_name); /*proto*/ - -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ - -static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ -static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ - -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ - -static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict, - const char* function_name, int kw_allowed); /*proto*/ - -static CYTHON_INLINE long __Pyx_NegateNonNeg(long b) { return unlikely(b < 0) ? b : !b; } -static CYTHON_INLINE PyObject* __Pyx_PyBoolOrNull_FromLong(long b) { - return unlikely(b < 0) ? NULL : __Pyx_PyBool_FromLong(b); -} - - -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) { - PyObject *r; - if (!j) return NULL; - r = PyObject_GetItem(o, j); - Py_DECREF(j); - return r; -} - - -#define __Pyx_GetItemInt_List(o, i, size, to_py_func) (((size) <= sizeof(Py_ssize_t)) ? \ - __Pyx_GetItemInt_List_Fast(o, i) : \ - __Pyx_GetItemInt_Generic(o, to_py_func(i))) - -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i) { - if (likely(o != Py_None)) { - if (likely((0 <= i) & (i < PyList_GET_SIZE(o)))) { - PyObject *r = PyList_GET_ITEM(o, i); - Py_INCREF(r); - return r; - } - else if ((-PyList_GET_SIZE(o) <= i) & (i < 0)) { - PyObject *r = PyList_GET_ITEM(o, PyList_GET_SIZE(o) + i); - Py_INCREF(r); - return r; - } - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - -#define __Pyx_GetItemInt_Tuple(o, i, size, to_py_func) (((size) <= sizeof(Py_ssize_t)) ? \ - __Pyx_GetItemInt_Tuple_Fast(o, i) : \ - __Pyx_GetItemInt_Generic(o, to_py_func(i))) - -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i) { - if (likely(o != Py_None)) { - if (likely((0 <= i) & (i < PyTuple_GET_SIZE(o)))) { - PyObject *r = PyTuple_GET_ITEM(o, i); - Py_INCREF(r); - return r; - } - else if ((-PyTuple_GET_SIZE(o) <= i) & (i < 0)) { - PyObject *r = PyTuple_GET_ITEM(o, PyTuple_GET_SIZE(o) + i); - Py_INCREF(r); - return r; - } - } - return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); -} - - -#define __Pyx_GetItemInt(o, i, size, to_py_func) (((size) <= sizeof(Py_ssize_t)) ? \ - __Pyx_GetItemInt_Fast(o, i) : \ - __Pyx_GetItemInt_Generic(o, to_py_func(i))) - -static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i) { - PyObject *r; - if (PyList_CheckExact(o) && ((0 <= i) & (i < PyList_GET_SIZE(o)))) { - r = PyList_GET_ITEM(o, i); - Py_INCREF(r); - } - else if (PyTuple_CheckExact(o) && ((0 <= i) & (i < PyTuple_GET_SIZE(o)))) { - r = PyTuple_GET_ITEM(o, i); - Py_INCREF(r); - } - else if (Py_TYPE(o)->tp_as_sequence && Py_TYPE(o)->tp_as_sequence->sq_item && (likely(i >= 0))) { - r = PySequence_GetItem(o, i); - } - else { - r = __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i)); - } - return r; -} - -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index); - -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected); - -static PyObject *__Pyx_UnpackItem(PyObject *, Py_ssize_t index); /*proto*/ -static int __Pyx_EndUnpack(PyObject *, Py_ssize_t expected); /*proto*/ - -static CYTHON_INLINE void __Pyx_ExceptionSave(PyObject **type, PyObject **value, PyObject **tb); /*proto*/ -static void __Pyx_ExceptionReset(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ - -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/ - -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *, PyObject *, PyObject *); /*proto*/ - -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/ - -static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *); - -static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *); - -static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *); - -static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *); - -static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *); - -static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *); - -static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *); - -static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *); - -static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *); - -static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *); - -static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *); - -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *); - -static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *); - -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *); - -static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *); - -static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *); - -static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/ - -static void __Pyx_AddTraceback(const char *funcname); /*proto*/ - -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ -/* Module declarations from _yaml */ - -static PyTypeObject *__pyx_ptype_5_yaml_Mark = 0; -static PyTypeObject *__pyx_ptype_5_yaml_CParser = 0; -static PyTypeObject *__pyx_ptype_5_yaml_CEmitter = 0; -static int __pyx_f_5_yaml_input_handler(void *, char *, int, int *); /*proto*/ -static int __pyx_f_5_yaml_output_handler(void *, char *, int); /*proto*/ -#define __Pyx_MODULE_NAME "_yaml" -static int __pyx_module_is_main__yaml = 0; - -/* Implementation of _yaml */ -static PyObject *__pyx_builtin_MemoryError; -static PyObject *__pyx_builtin_AttributeError; -static PyObject *__pyx_builtin_TypeError; -static PyObject *__pyx_builtin_ValueError; -static char __pyx_k_1[] = " in \"%s\", line %d, column %d"; -static char __pyx_k_2[] = ""; -static char __pyx_k_3[] = ""; -static char __pyx_k_4[] = ""; -static char __pyx_k_5[] = "a string or stream input is required"; -static char __pyx_k_8[] = "?"; -static char __pyx_k_9[] = "no parser error"; -static char __pyx_k_12[] = "utf-8"; -static char __pyx_k_13[] = "utf-16-le"; -static char __pyx_k_14[] = "utf-16-be"; -static char __pyx_k_15[] = "BlockSequenceStartToken"; -static char __pyx_k_16[] = "BlockMappingStartToken"; -static char __pyx_k_17[] = "FlowSequenceStartToken"; -static char __pyx_k_18[] = "FlowSequenceEndToken"; -static char __pyx_k_19[] = "FlowMappingStartToken"; -static char __pyx_k_20[] = ""; -static char __pyx_k_21[] = "'"; -static char __pyx_k_22[] = "\""; -static char __pyx_k_23[] = "|"; -static char __pyx_k_24[] = ">"; -static char __pyx_k_25[] = "unknown token type"; -static char __pyx_k_28[] = "unknown event type"; -static char __pyx_k_31[] = "expected a single document in the stream"; -static char __pyx_k_32[] = "but found another document"; -static char __pyx_k_33[] = "found undefined alias"; -static char __pyx_k_34[] = "found duplicate anchor; first occurence"; -static char __pyx_k_35[] = "second occurence"; -static char __pyx_k_36[] = "_compose_scalar_node"; -static char __pyx_k_37[] = "_compose_sequence_node"; -static char __pyx_k_38[] = "_compose_mapping_node"; -static char __pyx_k_39[] = "a string value is expected"; -static char __pyx_k_42[] = "\r"; -static char __pyx_k_43[] = "\n"; -static char __pyx_k_44[] = "\r\n"; -static char __pyx_k_45[] = "document_start_implicit"; -static char __pyx_k_46[] = "document_end_implicit"; -static char __pyx_k_47[] = "no emitter error"; -static char __pyx_k_50[] = "too many tags"; -static char __pyx_k_53[] = "tag handle must be a string"; -static char __pyx_k_56[] = "tag prefix must be a string"; -static char __pyx_k_59[] = "anchor must be a string"; -static char __pyx_k_64[] = "tag must be a string"; -static char __pyx_k_67[] = "value must be a string"; -static char __pyx_k_78[] = "invalid event %s"; -static char __pyx_k_79[] = "serializer is closed"; -static char __pyx_k_82[] = "serializer is already opened"; -static char __pyx_k_85[] = "serializer is not opened"; -static char __pyx_k_98[] = "id%03d"; -static char __pyx_k__TAG[] = "TAG"; -static char __pyx_k__end[] = "end"; -static char __pyx_k__tag[] = "tag"; -static char __pyx_k__YAML[] = "YAML"; -static char __pyx_k__data[] = "data"; -static char __pyx_k__line[] = "line"; -static char __pyx_k__name[] = "name"; -static char __pyx_k__read[] = "read"; -static char __pyx_k__tags[] = "tags"; -static char __pyx_k__type[] = "type"; -static char __pyx_k__yaml[] = "yaml"; -static char __pyx_k___scan[] = "_scan"; -static char __pyx_k___yaml[] = "_yaml"; -static char __pyx_k__alias[] = "alias"; -static char __pyx_k__error[] = "error"; -static char __pyx_k__index[] = "index"; -static char __pyx_k__major[] = "major"; -static char __pyx_k__minor[] = "minor"; -static char __pyx_k__nodes[] = "nodes"; -static char __pyx_k__start[] = "start"; -static char __pyx_k__style[] = "style"; -static char __pyx_k__value[] = "value"; -static char __pyx_k__width[] = "width"; -static char __pyx_k__write[] = "write"; -static char __pyx_k___parse[] = "_parse"; -static char __pyx_k__anchor[] = "anchor"; -static char __pyx_k__append[] = "append"; -static char __pyx_k__buffer[] = "buffer"; -static char __pyx_k__closed[] = "closed"; -static char __pyx_k__column[] = "column"; -static char __pyx_k__events[] = "events"; -static char __pyx_k__handle[] = "handle"; -static char __pyx_k__indent[] = "indent"; -static char __pyx_k__length[] = "length"; -static char __pyx_k__parser[] = "parser"; -static char __pyx_k__prefix[] = "prefix"; -static char __pyx_k__reader[] = "reader"; -static char __pyx_k__scalar[] = "scalar"; -static char __pyx_k__stream[] = "stream"; -static char __pyx_k__strict[] = "strict"; -static char __pyx_k__suffix[] = "suffix"; -static char __pyx_k__tokens[] = "tokens"; -static char __pyx_k__anchors[] = "anchors"; -static char __pyx_k__context[] = "context"; -static char __pyx_k__emitter[] = "emitter"; -static char __pyx_k__pointer[] = "pointer"; -static char __pyx_k__problem[] = "problem"; -static char __pyx_k__resolve[] = "resolve"; -static char __pyx_k__scanner[] = "scanner"; -static char __pyx_k__version[] = "version"; -static char __pyx_k__KeyToken[] = "KeyToken"; -static char __pyx_k__TagToken[] = "TagToken"; -static char __pyx_k____main__[] = "__main__"; -static char __pyx_k____test__[] = "__test__"; -static char __pyx_k__composer[] = "composer"; -static char __pyx_k__encoding[] = "encoding"; -static char __pyx_k__end_mark[] = "end_mark"; -static char __pyx_k__explicit[] = "explicit"; -static char __pyx_k__implicit[] = "implicit"; -static char __pyx_k__use_tags[] = "use_tags"; -static char __pyx_k__TypeError[] = "TypeError"; -static char __pyx_k__YAMLError[] = "YAMLError"; -static char __pyx_k____class__[] = "__class__"; -static char __pyx_k__canonical[] = "canonical"; -static char __pyx_k__AliasEvent[] = "AliasEvent"; -static char __pyx_k__AliasToken[] = "AliasToken"; -static char __pyx_k__ScalarNode[] = "ScalarNode"; -static char __pyx_k__ValueError[] = "ValueError"; -static char __pyx_k__ValueToken[] = "ValueToken"; -static char __pyx_k__flow_style[] = "flow_style"; -static char __pyx_k__line_break[] = "line_break"; -static char __pyx_k__serializer[] = "serializer"; -static char __pyx_k__start_mark[] = "start_mark"; -static char __pyx_k__AnchorToken[] = "AnchorToken"; -static char __pyx_k__MappingNode[] = "MappingNode"; -static char __pyx_k__MemoryError[] = "MemoryError"; -static char __pyx_k__ParserError[] = "ParserError"; -static char __pyx_k__ReaderError[] = "ReaderError"; -static char __pyx_k__ScalarEvent[] = "ScalarEvent"; -static char __pyx_k__ScalarToken[] = "ScalarToken"; -static char __pyx_k__constructor[] = "constructor"; -static char __pyx_k__get_version[] = "get_version"; -static char __pyx_k__representer[] = "representer"; -static char __pyx_k__stream_name[] = "stream_name"; -static char __pyx_k__use_version[] = "use_version"; -static char __pyx_k__EmitterError[] = "EmitterError"; -static char __pyx_k__ScannerError[] = "ScannerError"; -static char __pyx_k__SequenceNode[] = "SequenceNode"; -static char __pyx_k___anchor_node[] = "_anchor_node"; -static char __pyx_k__context_mark[] = "context_mark"; -static char __pyx_k__document_end[] = "document_end"; -static char __pyx_k__dump_unicode[] = "dump_unicode"; -static char __pyx_k__explicit_end[] = "explicit_end"; -static char __pyx_k__parsed_event[] = "parsed_event"; -static char __pyx_k__problem_mark[] = "problem_mark"; -static char __pyx_k__stream_cache[] = "stream_cache"; -static char __pyx_k__stream_start[] = "stream_start"; -static char __pyx_k__use_encoding[] = "use_encoding"; -static char __pyx_k__BlockEndToken[] = "BlockEndToken"; -static char __pyx_k__ComposerError[] = "ComposerError"; -static char __pyx_k___compose_node[] = "_compose_node"; -static char __pyx_k___parser_error[] = "_parser_error"; -static char __pyx_k__allow_unicode[] = "allow_unicode"; -static char __pyx_k__current_event[] = "current_event"; -static char __pyx_k__current_token[] = "current_token"; -static char __pyx_k__last_alias_id[] = "last_alias_id"; -static char __pyx_k__mapping_start[] = "mapping_start"; -static char __pyx_k__problem_value[] = "problem_value"; -static char __pyx_k__tag_directive[] = "tag_directive"; -static char __pyx_k__AttributeError[] = "AttributeError"; -static char __pyx_k__DirectiveToken[] = "DirectiveToken"; -static char __pyx_k__FlowEntryToken[] = "FlowEntryToken"; -static char __pyx_k__StreamEndEvent[] = "StreamEndEvent"; -static char __pyx_k__StreamEndToken[] = "StreamEndToken"; -static char __pyx_k___emitter_error[] = "_emitter_error"; -static char __pyx_k__document_start[] = "document_start"; -static char __pyx_k__explicit_start[] = "explicit_start"; -static char __pyx_k__plain_implicit[] = "plain_implicit"; -static char __pyx_k__problem_offset[] = "problem_offset"; -static char __pyx_k__sequence_start[] = "sequence_start"; -static char __pyx_k__tag_directives[] = "tag_directives"; -static char __pyx_k__unicode_source[] = "unicode_source"; -static char __pyx_k__BlockEntryToken[] = "BlockEntryToken"; -static char __pyx_k__MappingEndEvent[] = "MappingEndEvent"; -static char __pyx_k__SerializerError[] = "SerializerError"; -static char __pyx_k___serialize_node[] = "_serialize_node"; -static char __pyx_k__ascend_resolver[] = "ascend_resolver"; -static char __pyx_k__quoted_implicit[] = "quoted_implicit"; -static char __pyx_k__ConstructorError[] = "ConstructorError"; -static char __pyx_k__DocumentEndEvent[] = "DocumentEndEvent"; -static char __pyx_k__DocumentEndToken[] = "DocumentEndToken"; -static char __pyx_k__RepresenterError[] = "RepresenterError"; -static char __pyx_k__SequenceEndEvent[] = "SequenceEndEvent"; -static char __pyx_k__StreamStartEvent[] = "StreamStartEvent"; -static char __pyx_k__StreamStartToken[] = "StreamStartToken"; -static char __pyx_k___event_to_object[] = "_event_to_object"; -static char __pyx_k___object_to_event[] = "_object_to_event"; -static char __pyx_k___token_to_object[] = "_token_to_object"; -static char __pyx_k__descend_resolver[] = "descend_resolver"; -static char __pyx_k__serialized_nodes[] = "serialized_nodes"; -static char __pyx_k__stream_cache_len[] = "stream_cache_len"; -static char __pyx_k__stream_cache_pos[] = "stream_cache_pos"; -static char __pyx_k__MappingStartEvent[] = "MappingStartEvent"; -static char __pyx_k___compose_document[] = "_compose_document"; -static char __pyx_k___parse_next_event[] = "_parse_next_event"; -static char __pyx_k__version_directive[] = "version_directive"; -static char __pyx_k__DocumentStartEvent[] = "DocumentStartEvent"; -static char __pyx_k__DocumentStartToken[] = "DocumentStartToken"; -static char __pyx_k__SequenceStartEvent[] = "SequenceStartEvent"; -static char __pyx_k__get_version_string[] = "get_version_string"; -static char __pyx_k__FlowMappingEndToken[] = "FlowMappingEndToken"; -static PyObject *__pyx_kp_s_1; -static PyObject *__pyx_kp_u_12; -static PyObject *__pyx_kp_s_13; -static PyObject *__pyx_kp_u_13; -static PyObject *__pyx_kp_s_14; -static PyObject *__pyx_kp_u_14; -static PyObject *__pyx_n_s_15; -static PyObject *__pyx_n_s_16; -static PyObject *__pyx_n_s_17; -static PyObject *__pyx_n_s_18; -static PyObject *__pyx_n_s_19; -static PyObject *__pyx_kp_s_2; -static PyObject *__pyx_kp_u_2; -static PyObject *__pyx_kp_u_20; -static PyObject *__pyx_kp_s_21; -static PyObject *__pyx_kp_u_21; -static PyObject *__pyx_kp_s_22; -static PyObject *__pyx_kp_u_22; -static PyObject *__pyx_kp_s_23; -static PyObject *__pyx_kp_u_23; -static PyObject *__pyx_kp_s_24; -static PyObject *__pyx_kp_u_24; -static PyObject *__pyx_kp_s_25; -static PyObject *__pyx_kp_u_25; -static PyObject *__pyx_kp_s_28; -static PyObject *__pyx_kp_u_28; -static PyObject *__pyx_kp_s_3; -static PyObject *__pyx_kp_u_3; -static PyObject *__pyx_kp_s_31; -static PyObject *__pyx_kp_u_31; -static PyObject *__pyx_kp_s_32; -static PyObject *__pyx_kp_u_32; -static PyObject *__pyx_kp_s_33; -static PyObject *__pyx_kp_u_33; -static PyObject *__pyx_kp_s_34; -static PyObject *__pyx_kp_u_34; -static PyObject *__pyx_kp_s_35; -static PyObject *__pyx_kp_u_35; -static PyObject *__pyx_n_s_36; -static PyObject *__pyx_n_s_37; -static PyObject *__pyx_n_s_38; -static PyObject *__pyx_kp_s_39; -static PyObject *__pyx_kp_u_39; -static PyObject *__pyx_kp_s_4; -static PyObject *__pyx_kp_u_4; -static PyObject *__pyx_kp_s_42; -static PyObject *__pyx_kp_s_43; -static PyObject *__pyx_kp_s_44; -static PyObject *__pyx_n_s_45; -static PyObject *__pyx_n_s_46; -static PyObject *__pyx_kp_s_47; -static PyObject *__pyx_kp_u_47; -static PyObject *__pyx_kp_s_5; -static PyObject *__pyx_kp_u_5; -static PyObject *__pyx_kp_s_50; -static PyObject *__pyx_kp_u_50; -static PyObject *__pyx_kp_s_53; -static PyObject *__pyx_kp_u_53; -static PyObject *__pyx_kp_s_56; -static PyObject *__pyx_kp_u_56; -static PyObject *__pyx_kp_s_59; -static PyObject *__pyx_kp_u_59; -static PyObject *__pyx_kp_s_64; -static PyObject *__pyx_kp_u_64; -static PyObject *__pyx_kp_s_67; -static PyObject *__pyx_kp_u_67; -static PyObject *__pyx_kp_s_78; -static PyObject *__pyx_kp_u_78; -static PyObject *__pyx_kp_s_79; -static PyObject *__pyx_kp_u_79; -static PyObject *__pyx_kp_s_8; -static PyObject *__pyx_kp_u_8; -static PyObject *__pyx_kp_s_82; -static PyObject *__pyx_kp_u_82; -static PyObject *__pyx_kp_s_85; -static PyObject *__pyx_kp_u_85; -static PyObject *__pyx_kp_s_9; -static PyObject *__pyx_kp_u_9; -static PyObject *__pyx_kp_u_98; -static PyObject *__pyx_n_s__AliasEvent; -static PyObject *__pyx_n_s__AliasToken; -static PyObject *__pyx_n_s__AnchorToken; -static PyObject *__pyx_n_s__AttributeError; -static PyObject *__pyx_n_s__BlockEndToken; -static PyObject *__pyx_n_s__BlockEntryToken; -static PyObject *__pyx_n_s__ComposerError; -static PyObject *__pyx_n_s__ConstructorError; -static PyObject *__pyx_n_s__DirectiveToken; -static PyObject *__pyx_n_s__DocumentEndEvent; -static PyObject *__pyx_n_s__DocumentEndToken; -static PyObject *__pyx_n_s__DocumentStartEvent; -static PyObject *__pyx_n_s__DocumentStartToken; -static PyObject *__pyx_n_s__EmitterError; -static PyObject *__pyx_n_s__FlowEntryToken; -static PyObject *__pyx_n_s__FlowMappingEndToken; -static PyObject *__pyx_n_s__KeyToken; -static PyObject *__pyx_n_s__MappingEndEvent; -static PyObject *__pyx_n_s__MappingNode; -static PyObject *__pyx_n_s__MappingStartEvent; -static PyObject *__pyx_n_s__MemoryError; -static PyObject *__pyx_n_s__ParserError; -static PyObject *__pyx_n_s__ReaderError; -static PyObject *__pyx_n_s__RepresenterError; -static PyObject *__pyx_n_s__ScalarEvent; -static PyObject *__pyx_n_s__ScalarNode; -static PyObject *__pyx_n_s__ScalarToken; -static PyObject *__pyx_n_s__ScannerError; -static PyObject *__pyx_n_s__SequenceEndEvent; -static PyObject *__pyx_n_s__SequenceNode; -static PyObject *__pyx_n_s__SequenceStartEvent; -static PyObject *__pyx_n_s__SerializerError; -static PyObject *__pyx_n_s__StreamEndEvent; -static PyObject *__pyx_n_s__StreamEndToken; -static PyObject *__pyx_n_s__StreamStartEvent; -static PyObject *__pyx_n_s__StreamStartToken; -static PyObject *__pyx_n_u__TAG; -static PyObject *__pyx_n_s__TagToken; -static PyObject *__pyx_n_s__TypeError; -static PyObject *__pyx_n_s__ValueError; -static PyObject *__pyx_n_s__ValueToken; -static PyObject *__pyx_n_u__YAML; -static PyObject *__pyx_n_s__YAMLError; -static PyObject *__pyx_n_s____class__; -static PyObject *__pyx_n_s____main__; -static PyObject *__pyx_n_s____test__; -static PyObject *__pyx_n_s___anchor_node; -static PyObject *__pyx_n_s___compose_document; -static PyObject *__pyx_n_s___compose_node; -static PyObject *__pyx_n_s___emitter_error; -static PyObject *__pyx_n_s___event_to_object; -static PyObject *__pyx_n_s___object_to_event; -static PyObject *__pyx_n_s___parse; -static PyObject *__pyx_n_s___parse_next_event; -static PyObject *__pyx_n_s___parser_error; -static PyObject *__pyx_n_s___scan; -static PyObject *__pyx_n_s___serialize_node; -static PyObject *__pyx_n_s___token_to_object; -static PyObject *__pyx_n_s___yaml; -static PyObject *__pyx_n_s__alias; -static PyObject *__pyx_n_s__allow_unicode; -static PyObject *__pyx_n_s__anchor; -static PyObject *__pyx_n_s__anchors; -static PyObject *__pyx_n_s__append; -static PyObject *__pyx_n_s__ascend_resolver; -static PyObject *__pyx_n_s__buffer; -static PyObject *__pyx_n_s__canonical; -static PyObject *__pyx_n_s__closed; -static PyObject *__pyx_n_s__column; -static PyObject *__pyx_n_s__composer; -static PyObject *__pyx_n_s__constructor; -static PyObject *__pyx_n_s__context; -static PyObject *__pyx_n_s__context_mark; -static PyObject *__pyx_n_s__current_event; -static PyObject *__pyx_n_s__current_token; -static PyObject *__pyx_n_s__data; -static PyObject *__pyx_n_s__descend_resolver; -static PyObject *__pyx_n_s__document_end; -static PyObject *__pyx_n_s__document_start; -static PyObject *__pyx_n_s__dump_unicode; -static PyObject *__pyx_n_s__emitter; -static PyObject *__pyx_n_s__encoding; -static PyObject *__pyx_n_u__encoding; -static PyObject *__pyx_n_s__end; -static PyObject *__pyx_n_s__end_mark; -static PyObject *__pyx_n_s__error; -static PyObject *__pyx_n_s__events; -static PyObject *__pyx_n_s__explicit; -static PyObject *__pyx_n_s__explicit_end; -static PyObject *__pyx_n_s__explicit_start; -static PyObject *__pyx_n_s__flow_style; -static PyObject *__pyx_n_s__get_version; -static PyObject *__pyx_n_s__get_version_string; -static PyObject *__pyx_n_s__handle; -static PyObject *__pyx_n_s__implicit; -static PyObject *__pyx_n_s__indent; -static PyObject *__pyx_n_s__index; -static PyObject *__pyx_n_s__last_alias_id; -static PyObject *__pyx_n_s__length; -static PyObject *__pyx_n_s__line; -static PyObject *__pyx_n_s__line_break; -static PyObject *__pyx_n_s__major; -static PyObject *__pyx_n_s__mapping_start; -static PyObject *__pyx_n_s__minor; -static PyObject *__pyx_n_s__name; -static PyObject *__pyx_n_s__nodes; -static PyObject *__pyx_n_s__parsed_event; -static PyObject *__pyx_n_s__parser; -static PyObject *__pyx_n_s__plain_implicit; -static PyObject *__pyx_n_s__pointer; -static PyObject *__pyx_n_s__prefix; -static PyObject *__pyx_n_s__problem; -static PyObject *__pyx_n_s__problem_mark; -static PyObject *__pyx_n_s__problem_offset; -static PyObject *__pyx_n_s__problem_value; -static PyObject *__pyx_n_s__quoted_implicit; -static PyObject *__pyx_n_s__read; -static PyObject *__pyx_n_s__reader; -static PyObject *__pyx_n_s__representer; -static PyObject *__pyx_n_s__resolve; -static PyObject *__pyx_n_s__scalar; -static PyObject *__pyx_n_s__scanner; -static PyObject *__pyx_n_s__sequence_start; -static PyObject *__pyx_n_s__serialized_nodes; -static PyObject *__pyx_n_s__serializer; -static PyObject *__pyx_n_s__start; -static PyObject *__pyx_n_s__start_mark; -static PyObject *__pyx_n_s__stream; -static PyObject *__pyx_n_s__stream_cache; -static PyObject *__pyx_n_s__stream_cache_len; -static PyObject *__pyx_n_s__stream_cache_pos; -static PyObject *__pyx_n_s__stream_name; -static PyObject *__pyx_n_s__stream_start; -static PyObject *__pyx_n_s__style; -static PyObject *__pyx_n_s__suffix; -static PyObject *__pyx_n_s__tag; -static PyObject *__pyx_n_s__tag_directive; -static PyObject *__pyx_n_s__tag_directives; -static PyObject *__pyx_n_s__tags; -static PyObject *__pyx_n_s__tokens; -static PyObject *__pyx_n_s__type; -static PyObject *__pyx_n_s__unicode_source; -static PyObject *__pyx_n_s__use_encoding; -static PyObject *__pyx_n_s__use_tags; -static PyObject *__pyx_n_s__use_version; -static PyObject *__pyx_n_s__value; -static PyObject *__pyx_n_s__version; -static PyObject *__pyx_n_s__version_directive; -static PyObject *__pyx_n_s__width; -static PyObject *__pyx_n_s__write; -static PyObject *__pyx_n_s__yaml; -static PyObject *__pyx_int_0; -static PyObject *__pyx_int_1; -static PyObject *__pyx_k_tuple_6; -static PyObject *__pyx_k_tuple_7; -static PyObject *__pyx_k_tuple_10; -static PyObject *__pyx_k_tuple_11; -static PyObject *__pyx_k_tuple_26; -static PyObject *__pyx_k_tuple_27; -static PyObject *__pyx_k_tuple_29; -static PyObject *__pyx_k_tuple_30; -static PyObject *__pyx_k_tuple_40; -static PyObject *__pyx_k_tuple_41; -static PyObject *__pyx_k_tuple_48; -static PyObject *__pyx_k_tuple_49; -static PyObject *__pyx_k_tuple_51; -static PyObject *__pyx_k_tuple_52; -static PyObject *__pyx_k_tuple_54; -static PyObject *__pyx_k_tuple_55; -static PyObject *__pyx_k_tuple_57; -static PyObject *__pyx_k_tuple_58; -static PyObject *__pyx_k_tuple_60; -static PyObject *__pyx_k_tuple_61; -static PyObject *__pyx_k_tuple_62; -static PyObject *__pyx_k_tuple_63; -static PyObject *__pyx_k_tuple_65; -static PyObject *__pyx_k_tuple_66; -static PyObject *__pyx_k_tuple_68; -static PyObject *__pyx_k_tuple_69; -static PyObject *__pyx_k_tuple_70; -static PyObject *__pyx_k_tuple_71; -static PyObject *__pyx_k_tuple_72; -static PyObject *__pyx_k_tuple_73; -static PyObject *__pyx_k_tuple_74; -static PyObject *__pyx_k_tuple_75; -static PyObject *__pyx_k_tuple_76; -static PyObject *__pyx_k_tuple_77; -static PyObject *__pyx_k_tuple_80; -static PyObject *__pyx_k_tuple_81; -static PyObject *__pyx_k_tuple_83; -static PyObject *__pyx_k_tuple_84; -static PyObject *__pyx_k_tuple_86; -static PyObject *__pyx_k_tuple_87; -static PyObject *__pyx_k_tuple_88; -static PyObject *__pyx_k_tuple_89; -static PyObject *__pyx_k_tuple_90; -static PyObject *__pyx_k_tuple_91; -static PyObject *__pyx_k_tuple_92; -static PyObject *__pyx_k_tuple_93; -static PyObject *__pyx_k_tuple_94; -static PyObject *__pyx_k_tuple_95; -static PyObject *__pyx_k_tuple_96; -static PyObject *__pyx_k_tuple_97; -static PyObject *__pyx_k_tuple_99; -static PyObject *__pyx_k_tuple_100; -static PyObject *__pyx_k_tuple_101; -static PyObject *__pyx_k_tuple_102; -static PyObject *__pyx_k_tuple_103; -static PyObject *__pyx_k_tuple_104; -static PyObject *__pyx_k_tuple_105; -static PyObject *__pyx_k_tuple_106; -static PyObject *__pyx_k_tuple_107; -static PyObject *__pyx_k_tuple_108; - -/* "_yaml.pyx":4 - * import yaml - * - * def get_version_string(): # <<<<<<<<<<<<<< - * cdef char *value - * value = yaml_get_version_string() - */ - -static PyObject *__pyx_pf_5_yaml_get_version_string(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyMethodDef __pyx_mdef_5_yaml_get_version_string = {__Pyx_NAMESTR("get_version_string"), (PyCFunction)__pyx_pf_5_yaml_get_version_string, METH_NOARGS, __Pyx_DOCSTR(0)}; -static PyObject *__pyx_pf_5_yaml_get_version_string(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - char *__pyx_v_value; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("get_version_string"); - __pyx_self = __pyx_self; - - /* "_yaml.pyx":6 - * def get_version_string(): - * cdef char *value - * value = yaml_get_version_string() # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * return value - */ - __pyx_v_value = yaml_get_version_string(); - - /* "_yaml.pyx":7 - * cdef char *value - * value = yaml_get_version_string() - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * return value - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":8 - * value = yaml_get_version_string() - * if PY_MAJOR_VERSION < 3: - * return value # <<<<<<<<<<<<<< - * else: - * return PyUnicode_FromString(value) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = PyBytes_FromString(__pyx_v_value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 8; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __pyx_r = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":10 - * return value - * else: - * return PyUnicode_FromString(value) # <<<<<<<<<<<<<< - * - * def get_version(): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = PyUnicode_FromString(__pyx_v_value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 10; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - } - __pyx_L5:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("_yaml.get_version_string"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":12 - * return PyUnicode_FromString(value) - * - * def get_version(): # <<<<<<<<<<<<<< - * cdef int major, minor, patch - * yaml_get_version(&major, &minor, &patch) - */ - -static PyObject *__pyx_pf_5_yaml_1get_version(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyMethodDef __pyx_mdef_5_yaml_1get_version = {__Pyx_NAMESTR("get_version"), (PyCFunction)__pyx_pf_5_yaml_1get_version, METH_NOARGS, __Pyx_DOCSTR(0)}; -static PyObject *__pyx_pf_5_yaml_1get_version(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) { - int __pyx_v_major; - int __pyx_v_minor; - int __pyx_v_patch; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - __Pyx_RefNannySetupContext("get_version"); - __pyx_self = __pyx_self; - - /* "_yaml.pyx":14 - * def get_version(): - * cdef int major, minor, patch - * yaml_get_version(&major, &minor, &patch) # <<<<<<<<<<<<<< - * return (major, minor, patch) - * - */ - yaml_get_version((&__pyx_v_major), (&__pyx_v_minor), (&__pyx_v_patch)); - - /* "_yaml.pyx":15 - * cdef int major, minor, patch - * yaml_get_version(&major, &minor, &patch) - * return (major, minor, patch) # <<<<<<<<<<<<<< - * - * #Mark = yaml.error.Mark - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyInt_FromLong(__pyx_v_major); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyInt_FromLong(__pyx_v_minor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyInt_FromLong(__pyx_v_patch); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_r = ((PyObject *)__pyx_t_4); - __pyx_t_4 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.get_version"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":72 - * cdef readonly pointer - * - * def __init__(self, object name, int index, int line, int column, # <<<<<<<<<<<<<< - * object buffer, object pointer): - * self.name = name - */ - -static int __pyx_pf_5_yaml_4Mark___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pf_5_yaml_4Mark___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_name = 0; - int __pyx_v_index; - int __pyx_v_line; - int __pyx_v_column; - PyObject *__pyx_v_buffer = 0; - PyObject *__pyx_v_pointer = 0; - int __pyx_r; - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__name,&__pyx_n_s__index,&__pyx_n_s__line,&__pyx_n_s__column,&__pyx_n_s__buffer,&__pyx_n_s__pointer,0}; - __Pyx_RefNannySetupContext("__init__"); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args = PyDict_Size(__pyx_kwds); - PyObject* values[6] = {0,0,0,0,0,0}; - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 0: - values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__name); - if (likely(values[0])) kw_args--; - else goto __pyx_L5_argtuple_error; - case 1: - values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__index); - if (likely(values[1])) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - case 2: - values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__line); - if (likely(values[2])) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - case 3: - values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__column); - if (likely(values[3])) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 3); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - case 4: - values[4] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__buffer); - if (likely(values[4])) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 4); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - case 5: - values[5] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__pointer); - if (likely(values[5])) kw_args--; - else { - __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, 5); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - __pyx_v_name = values[0]; - __pyx_v_index = __Pyx_PyInt_AsInt(values[1]); if (unlikely((__pyx_v_index == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_v_line = __Pyx_PyInt_AsInt(values[2]); if (unlikely((__pyx_v_line == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_v_column = __Pyx_PyInt_AsInt(values[3]); if (unlikely((__pyx_v_column == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_v_buffer = values[4]; - __pyx_v_pointer = values[5]; - } else if (PyTuple_GET_SIZE(__pyx_args) != 6) { - goto __pyx_L5_argtuple_error; - } else { - __pyx_v_name = PyTuple_GET_ITEM(__pyx_args, 0); - __pyx_v_index = __Pyx_PyInt_AsInt(PyTuple_GET_ITEM(__pyx_args, 1)); if (unlikely((__pyx_v_index == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_v_line = __Pyx_PyInt_AsInt(PyTuple_GET_ITEM(__pyx_args, 2)); if (unlikely((__pyx_v_line == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_v_column = __Pyx_PyInt_AsInt(PyTuple_GET_ITEM(__pyx_args, 3)); if (unlikely((__pyx_v_column == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_v_buffer = PyTuple_GET_ITEM(__pyx_args, 4); - __pyx_v_pointer = PyTuple_GET_ITEM(__pyx_args, 5); - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 6, 6, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_L3_error:; - __Pyx_AddTraceback("_yaml.Mark.__init__"); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - - /* "_yaml.pyx":74 - * def __init__(self, object name, int index, int line, int column, - * object buffer, object pointer): - * self.name = name # <<<<<<<<<<<<<< - * self.index = index - * self.line = line - */ - __Pyx_INCREF(__pyx_v_name); - __Pyx_GIVEREF(__pyx_v_name); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name); - ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name = __pyx_v_name; - - /* "_yaml.pyx":75 - * object buffer, object pointer): - * self.name = name - * self.index = index # <<<<<<<<<<<<<< - * self.line = line - * self.column = column - */ - ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->index = __pyx_v_index; - - /* "_yaml.pyx":76 - * self.name = name - * self.index = index - * self.line = line # <<<<<<<<<<<<<< - * self.column = column - * self.buffer = buffer - */ - ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->line = __pyx_v_line; - - /* "_yaml.pyx":77 - * self.index = index - * self.line = line - * self.column = column # <<<<<<<<<<<<<< - * self.buffer = buffer - * self.pointer = pointer - */ - ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->column = __pyx_v_column; - - /* "_yaml.pyx":78 - * self.line = line - * self.column = column - * self.buffer = buffer # <<<<<<<<<<<<<< - * self.pointer = pointer - * - */ - __Pyx_INCREF(__pyx_v_buffer); - __Pyx_GIVEREF(__pyx_v_buffer); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->buffer); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->buffer); - ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->buffer = __pyx_v_buffer; - - /* "_yaml.pyx":79 - * self.column = column - * self.buffer = buffer - * self.pointer = pointer # <<<<<<<<<<<<<< - * - * def get_snippet(self): - */ - __Pyx_INCREF(__pyx_v_pointer); - __Pyx_GIVEREF(__pyx_v_pointer); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->pointer); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->pointer); - ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->pointer = __pyx_v_pointer; - - __pyx_r = 0; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":81 - * self.pointer = pointer - * - * def get_snippet(self): # <<<<<<<<<<<<<< - * return None - * - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_1get_snippet(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_1get_snippet(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannySetupContext("get_snippet"); - - /* "_yaml.pyx":82 - * - * def get_snippet(self): - * return None # <<<<<<<<<<<<<< - * - * def __str__(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_None); - __pyx_r = Py_None; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":84 - * return None - * - * def __str__(self): # <<<<<<<<<<<<<< - * where = " in \"%s\", line %d, column %d" \ - * % (self.name, self.line+1, self.column+1) - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_2__str__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_2__str__(PyObject *__pyx_v_self) { - PyObject *__pyx_v_where; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("__str__"); - __pyx_v_where = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":86 - * def __str__(self): - * where = " in \"%s\", line %d, column %d" \ - * % (self.name, self.line+1, self.column+1) # <<<<<<<<<<<<<< - * return where - * - */ - __pyx_t_1 = PyInt_FromLong((((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->line + 1)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyInt_FromLong((((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->column + 1)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name); - PyTuple_SET_ITEM(__pyx_t_3, 0, ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name); - __Pyx_GIVEREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyNumber_Remainder(((PyObject *)__pyx_kp_s_1), ((PyObject *)__pyx_t_3)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 86; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_v_where); - __pyx_v_where = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":87 - * where = " in \"%s\", line %d, column %d" \ - * % (self.name, self.line+1, self.column+1) - * return where # <<<<<<<<<<<<<< - * - * #class YAMLError(Exception): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_where); - __pyx_r = __pyx_v_where; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.Mark.__str__"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_where); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":65 - * - * cdef class Mark: - * cdef readonly object name # <<<<<<<<<<<<<< - * cdef readonly int index - * cdef readonly int line - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_4name___get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_4name___get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannySetupContext("__get__"); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name); - __pyx_r = ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->name; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":66 - * cdef class Mark: - * cdef readonly object name - * cdef readonly int index # <<<<<<<<<<<<<< - * cdef readonly int line - * cdef readonly int column - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_5index___get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_5index___get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__get__"); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyInt_FromLong(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 66; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("_yaml.Mark.index.__get__"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":67 - * cdef readonly object name - * cdef readonly int index - * cdef readonly int line # <<<<<<<<<<<<<< - * cdef readonly int column - * cdef readonly buffer - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_4line___get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_4line___get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__get__"); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyInt_FromLong(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->line); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 67; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("_yaml.Mark.line.__get__"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":68 - * cdef readonly int index - * cdef readonly int line - * cdef readonly int column # <<<<<<<<<<<<<< - * cdef readonly buffer - * cdef readonly pointer - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_6column___get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_6column___get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - __Pyx_RefNannySetupContext("__get__"); - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = PyInt_FromLong(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->column); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 68; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("_yaml.Mark.column.__get__"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":69 - * cdef readonly int line - * cdef readonly int column - * cdef readonly buffer # <<<<<<<<<<<<<< - * cdef readonly pointer - * - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_6buffer___get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_6buffer___get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannySetupContext("__get__"); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->buffer); - __pyx_r = ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->buffer; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":70 - * cdef readonly int column - * cdef readonly buffer - * cdef readonly pointer # <<<<<<<<<<<<<< - * - * def __init__(self, object name, int index, int line, int column, - */ - -static PyObject *__pyx_pf_5_yaml_4Mark_7pointer___get__(PyObject *__pyx_v_self); /*proto*/ -static PyObject *__pyx_pf_5_yaml_4Mark_7pointer___get__(PyObject *__pyx_v_self) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannySetupContext("__get__"); - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->pointer); - __pyx_r = ((struct __pyx_obj_5_yaml_Mark *)__pyx_v_self)->pointer; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":262 - * cdef int unicode_source - * - * def __init__(self, stream): # <<<<<<<<<<<<<< - * cdef is_readable - * if yaml_parser_initialize(&self.parser) == 0: - */ - -static int __pyx_pf_5_yaml_7CParser___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pf_5_yaml_7CParser___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_stream = 0; - PyObject *__pyx_v_is_readable; - int __pyx_r; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__stream,0}; - __Pyx_RefNannySetupContext("__init__"); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args = PyDict_Size(__pyx_kwds); - PyObject* values[1] = {0}; - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 0: - values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__stream); - if (likely(values[0])) kw_args--; - else goto __pyx_L5_argtuple_error; - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 262; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - __pyx_v_stream = values[0]; - } else if (PyTuple_GET_SIZE(__pyx_args) != 1) { - goto __pyx_L5_argtuple_error; - } else { - __pyx_v_stream = PyTuple_GET_ITEM(__pyx_args, 0); - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 1, 1, 1, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 262; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_L3_error:; - __Pyx_AddTraceback("_yaml.CParser.__init__"); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - __Pyx_INCREF(__pyx_v_stream); - __pyx_v_is_readable = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":264 - * def __init__(self, stream): - * cdef is_readable - * if yaml_parser_initialize(&self.parser) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * self.parsed_event.type = YAML_NO_EVENT - */ - __pyx_t_1 = (yaml_parser_initialize((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parser)) == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":265 - * cdef is_readable - * if yaml_parser_initialize(&self.parser) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * self.parsed_event.type = YAML_NO_EVENT - * is_readable = 1 - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 265; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":266 - * if yaml_parser_initialize(&self.parser) == 0: - * raise MemoryError - * self.parsed_event.type = YAML_NO_EVENT # <<<<<<<<<<<<<< - * is_readable = 1 - * try: - */ - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.type = YAML_NO_EVENT; - - /* "_yaml.pyx":267 - * raise MemoryError - * self.parsed_event.type = YAML_NO_EVENT - * is_readable = 1 # <<<<<<<<<<<<<< - * try: - * stream.read - */ - __Pyx_INCREF(__pyx_int_1); - __Pyx_DECREF(__pyx_v_is_readable); - __pyx_v_is_readable = __pyx_int_1; - - /* "_yaml.pyx":268 - * self.parsed_event.type = YAML_NO_EVENT - * is_readable = 1 - * try: # <<<<<<<<<<<<<< - * stream.read - * except AttributeError: - */ - { - PyObject *__pyx_save_exc_type, *__pyx_save_exc_value, *__pyx_save_exc_tb; - __Pyx_ExceptionSave(&__pyx_save_exc_type, &__pyx_save_exc_value, &__pyx_save_exc_tb); - __Pyx_XGOTREF(__pyx_save_exc_type); - __Pyx_XGOTREF(__pyx_save_exc_value); - __Pyx_XGOTREF(__pyx_save_exc_tb); - /*try:*/ { - - /* "_yaml.pyx":269 - * is_readable = 1 - * try: - * stream.read # <<<<<<<<<<<<<< - * except AttributeError: - * is_readable = 0 - */ - __pyx_t_2 = PyObject_GetAttr(__pyx_v_stream, __pyx_n_s__read); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 269; __pyx_clineno = __LINE__; goto __pyx_L7_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - } - __Pyx_XDECREF(__pyx_save_exc_type); __pyx_save_exc_type = 0; - __Pyx_XDECREF(__pyx_save_exc_value); __pyx_save_exc_value = 0; - __Pyx_XDECREF(__pyx_save_exc_tb); __pyx_save_exc_tb = 0; - goto __pyx_L14_try_end; - __pyx_L7_error:; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "_yaml.pyx":270 - * try: - * stream.read - * except AttributeError: # <<<<<<<<<<<<<< - * is_readable = 0 - * self.unicode_source = 0 - */ - __pyx_t_3 = PyErr_ExceptionMatches(__pyx_builtin_AttributeError); - if (__pyx_t_3) { - __Pyx_AddTraceback("_yaml.CParser.__init__"); - if (__Pyx_GetException(&__pyx_t_2, &__pyx_t_4, &__pyx_t_5) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L9_except_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GOTREF(__pyx_t_5); - - /* "_yaml.pyx":271 - * stream.read - * except AttributeError: - * is_readable = 0 # <<<<<<<<<<<<<< - * self.unicode_source = 0 - * if is_readable: - */ - __Pyx_INCREF(__pyx_int_0); - __Pyx_DECREF(__pyx_v_is_readable); - __pyx_v_is_readable = __pyx_int_0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - goto __pyx_L8_exception_handled; - } - __pyx_L9_except_error:; - __Pyx_XGIVEREF(__pyx_save_exc_type); - __Pyx_XGIVEREF(__pyx_save_exc_value); - __Pyx_XGIVEREF(__pyx_save_exc_tb); - __Pyx_ExceptionReset(__pyx_save_exc_type, __pyx_save_exc_value, __pyx_save_exc_tb); - goto __pyx_L1_error; - __pyx_L8_exception_handled:; - __Pyx_XGIVEREF(__pyx_save_exc_type); - __Pyx_XGIVEREF(__pyx_save_exc_value); - __Pyx_XGIVEREF(__pyx_save_exc_tb); - __Pyx_ExceptionReset(__pyx_save_exc_type, __pyx_save_exc_value, __pyx_save_exc_tb); - __pyx_L14_try_end:; - } - - /* "_yaml.pyx":272 - * except AttributeError: - * is_readable = 0 - * self.unicode_source = 0 # <<<<<<<<<<<<<< - * if is_readable: - * self.stream = stream - */ - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->unicode_source = 0; - - /* "_yaml.pyx":273 - * is_readable = 0 - * self.unicode_source = 0 - * if is_readable: # <<<<<<<<<<<<<< - * self.stream = stream - * try: - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_is_readable); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 273; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":274 - * self.unicode_source = 0 - * if is_readable: - * self.stream = stream # <<<<<<<<<<<<<< - * try: - * self.stream_name = stream.name - */ - __Pyx_INCREF(__pyx_v_stream); - __Pyx_GIVEREF(__pyx_v_stream); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream = __pyx_v_stream; - - /* "_yaml.pyx":275 - * if is_readable: - * self.stream = stream - * try: # <<<<<<<<<<<<<< - * self.stream_name = stream.name - * except AttributeError: - */ - { - PyObject *__pyx_save_exc_type, *__pyx_save_exc_value, *__pyx_save_exc_tb; - __Pyx_ExceptionSave(&__pyx_save_exc_type, &__pyx_save_exc_value, &__pyx_save_exc_tb); - __Pyx_XGOTREF(__pyx_save_exc_type); - __Pyx_XGOTREF(__pyx_save_exc_value); - __Pyx_XGOTREF(__pyx_save_exc_tb); - /*try:*/ { - - /* "_yaml.pyx":276 - * self.stream = stream - * try: - * self.stream_name = stream.name # <<<<<<<<<<<<<< - * except AttributeError: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_5 = PyObject_GetAttr(__pyx_v_stream, __pyx_n_s__name); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L18_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = __pyx_t_5; - __pyx_t_5 = 0; - } - __Pyx_XDECREF(__pyx_save_exc_type); __pyx_save_exc_type = 0; - __Pyx_XDECREF(__pyx_save_exc_value); __pyx_save_exc_value = 0; - __Pyx_XDECREF(__pyx_save_exc_tb); __pyx_save_exc_tb = 0; - goto __pyx_L25_try_end; - __pyx_L18_error:; - __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_XDECREF(__pyx_t_5); __pyx_t_5 = 0; - - /* "_yaml.pyx":277 - * try: - * self.stream_name = stream.name - * except AttributeError: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * self.stream_name = '' - */ - __pyx_t_3 = PyErr_ExceptionMatches(__pyx_builtin_AttributeError); - if (__pyx_t_3) { - __Pyx_AddTraceback("_yaml.CParser.__init__"); - if (__Pyx_GetException(&__pyx_t_5, &__pyx_t_4, &__pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 277; __pyx_clineno = __LINE__; goto __pyx_L20_except_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_GOTREF(__pyx_t_4); - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":278 - * self.stream_name = stream.name - * except AttributeError: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * self.stream_name = '' - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":279 - * except AttributeError: - * if PY_MAJOR_VERSION < 3: - * self.stream_name = '' # <<<<<<<<<<<<<< - * else: - * self.stream_name = u'' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_s_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = ((PyObject *)__pyx_kp_s_2); - goto __pyx_L28; - } - /*else*/ { - - /* "_yaml.pyx":281 - * self.stream_name = '' - * else: - * self.stream_name = u'' # <<<<<<<<<<<<<< - * self.stream_cache = None - * self.stream_cache_len = 0 - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = ((PyObject *)__pyx_kp_u_2); - } - __pyx_L28:; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - goto __pyx_L19_exception_handled; - } - __pyx_L20_except_error:; - __Pyx_XGIVEREF(__pyx_save_exc_type); - __Pyx_XGIVEREF(__pyx_save_exc_value); - __Pyx_XGIVEREF(__pyx_save_exc_tb); - __Pyx_ExceptionReset(__pyx_save_exc_type, __pyx_save_exc_value, __pyx_save_exc_tb); - goto __pyx_L1_error; - __pyx_L19_exception_handled:; - __Pyx_XGIVEREF(__pyx_save_exc_type); - __Pyx_XGIVEREF(__pyx_save_exc_value); - __Pyx_XGIVEREF(__pyx_save_exc_tb); - __Pyx_ExceptionReset(__pyx_save_exc_type, __pyx_save_exc_value, __pyx_save_exc_tb); - __pyx_L25_try_end:; - } - - /* "_yaml.pyx":282 - * else: - * self.stream_name = u'' - * self.stream_cache = None # <<<<<<<<<<<<<< - * self.stream_cache_len = 0 - * self.stream_cache_pos = 0 - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_cache); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_cache); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_cache = Py_None; - - /* "_yaml.pyx":283 - * self.stream_name = u'' - * self.stream_cache = None - * self.stream_cache_len = 0 # <<<<<<<<<<<<<< - * self.stream_cache_pos = 0 - * yaml_parser_set_input(&self.parser, input_handler, self) - */ - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_cache_len = 0; - - /* "_yaml.pyx":284 - * self.stream_cache = None - * self.stream_cache_len = 0 - * self.stream_cache_pos = 0 # <<<<<<<<<<<<<< - * yaml_parser_set_input(&self.parser, input_handler, self) - * else: - */ - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_cache_pos = 0; - - /* "_yaml.pyx":285 - * self.stream_cache_len = 0 - * self.stream_cache_pos = 0 - * yaml_parser_set_input(&self.parser, input_handler, self) # <<<<<<<<<<<<<< - * else: - * if PyUnicode_CheckExact(stream) != 0: - */ - yaml_parser_set_input((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parser), __pyx_f_5_yaml_input_handler, ((void *)__pyx_v_self)); - goto __pyx_L17; - } - /*else*/ { - - /* "_yaml.pyx":287 - * yaml_parser_set_input(&self.parser, input_handler, self) - * else: - * if PyUnicode_CheckExact(stream) != 0: # <<<<<<<<<<<<<< - * stream = PyUnicode_AsUTF8String(stream) - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_1 = (PyUnicode_CheckExact(__pyx_v_stream) != 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":288 - * else: - * if PyUnicode_CheckExact(stream) != 0: - * stream = PyUnicode_AsUTF8String(stream) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * self.stream_name = '' - */ - __pyx_t_2 = PyUnicode_AsUTF8String(__pyx_v_stream); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 288; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_stream); - __pyx_v_stream = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":289 - * if PyUnicode_CheckExact(stream) != 0: - * stream = PyUnicode_AsUTF8String(stream) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * self.stream_name = '' - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":290 - * stream = PyUnicode_AsUTF8String(stream) - * if PY_MAJOR_VERSION < 3: - * self.stream_name = '' # <<<<<<<<<<<<<< - * else: - * self.stream_name = u'' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_s_3)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_3)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = ((PyObject *)__pyx_kp_s_3); - goto __pyx_L30; - } - /*else*/ { - - /* "_yaml.pyx":292 - * self.stream_name = '' - * else: - * self.stream_name = u'' # <<<<<<<<<<<<<< - * self.unicode_source = 1 - * else: - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_3)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_3)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = ((PyObject *)__pyx_kp_u_3); - } - __pyx_L30:; - - /* "_yaml.pyx":293 - * else: - * self.stream_name = u'' - * self.unicode_source = 1 # <<<<<<<<<<<<<< - * else: - * if PY_MAJOR_VERSION < 3: - */ - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->unicode_source = 1; - goto __pyx_L29; - } - /*else*/ { - - /* "_yaml.pyx":295 - * self.unicode_source = 1 - * else: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * self.stream_name = '' - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":296 - * else: - * if PY_MAJOR_VERSION < 3: - * self.stream_name = '' # <<<<<<<<<<<<<< - * else: - * self.stream_name = u'' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_s_4)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_4)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = ((PyObject *)__pyx_kp_s_4); - goto __pyx_L31; - } - /*else*/ { - - /* "_yaml.pyx":298 - * self.stream_name = '' - * else: - * self.stream_name = u'' # <<<<<<<<<<<<<< - * if PyString_CheckExact(stream) == 0: - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_4)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_4)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name = ((PyObject *)__pyx_kp_u_4); - } - __pyx_L31:; - } - __pyx_L29:; - - /* "_yaml.pyx":299 - * else: - * self.stream_name = u'' - * if PyString_CheckExact(stream) == 0: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("a string or stream input is required") - */ - __pyx_t_1 = (PyString_CheckExact(__pyx_v_stream) == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":300 - * self.stream_name = u'' - * if PyString_CheckExact(stream) == 0: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("a string or stream input is required") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":301 - * if PyString_CheckExact(stream) == 0: - * if PY_MAJOR_VERSION < 3: - * raise TypeError("a string or stream input is required") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"a string or stream input is required") - */ - __pyx_t_2 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_6), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L33; - } - /*else*/ { - - /* "_yaml.pyx":303 - * raise TypeError("a string or stream input is required") - * else: - * raise TypeError(u"a string or stream input is required") # <<<<<<<<<<<<<< - * self.stream = stream - * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) - */ - __pyx_t_2 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_7), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 303; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 303; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L33:; - goto __pyx_L32; - } - __pyx_L32:; - - /* "_yaml.pyx":304 - * else: - * raise TypeError(u"a string or stream input is required") - * self.stream = stream # <<<<<<<<<<<<<< - * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) - * self.current_token = None - */ - __Pyx_INCREF(__pyx_v_stream); - __Pyx_GIVEREF(__pyx_v_stream); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream = __pyx_v_stream; - - /* "_yaml.pyx":305 - * raise TypeError(u"a string or stream input is required") - * self.stream = stream - * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) # <<<<<<<<<<<<<< - * self.current_token = None - * self.current_event = None - */ - yaml_parser_set_input_string((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parser), PyString_AS_STRING(__pyx_v_stream), PyString_GET_SIZE(__pyx_v_stream)); - } - __pyx_L17:; - - /* "_yaml.pyx":306 - * self.stream = stream - * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) - * self.current_token = None # <<<<<<<<<<<<<< - * self.current_event = None - * self.anchors = {} - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token = Py_None; - - /* "_yaml.pyx":307 - * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) - * self.current_token = None - * self.current_event = None # <<<<<<<<<<<<<< - * self.anchors = {} - * - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event = Py_None; - - /* "_yaml.pyx":308 - * self.current_token = None - * self.current_event = None - * self.anchors = {} # <<<<<<<<<<<<<< - * - * def __dealloc__(self): - */ - __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 308; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->anchors); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->anchors); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->anchors = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("_yaml.CParser.__init__"); - __pyx_r = -1; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_is_readable); - __Pyx_DECREF(__pyx_v_stream); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":310 - * self.anchors = {} - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * yaml_parser_delete(&self.parser) - * yaml_event_delete(&self.parsed_event) - */ - -static void __pyx_pf_5_yaml_7CParser_1__dealloc__(PyObject *__pyx_v_self); /*proto*/ -static void __pyx_pf_5_yaml_7CParser_1__dealloc__(PyObject *__pyx_v_self) { - __Pyx_RefNannySetupContext("__dealloc__"); - - /* "_yaml.pyx":311 - * - * def __dealloc__(self): - * yaml_parser_delete(&self.parser) # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * - */ - yaml_parser_delete((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parser)); - - /* "_yaml.pyx":312 - * def __dealloc__(self): - * yaml_parser_delete(&self.parser) - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * - * def dispose(self): - */ - yaml_event_delete((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event)); - - __Pyx_RefNannyFinishContext(); -} - -/* "_yaml.pyx":314 - * yaml_event_delete(&self.parsed_event) - * - * def dispose(self): # <<<<<<<<<<<<<< - * pass - * - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_2dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_2dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannySetupContext("dispose"); - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":317 - * pass - * - * cdef object _parser_error(self): # <<<<<<<<<<<<<< - * if self.parser.error == YAML_MEMORY_ERROR: - * return MemoryError - */ - -static PyObject *__pyx_f_5_yaml_7CParser__parser_error(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) { - PyObject *__pyx_v_context_mark; - PyObject *__pyx_v_problem_mark; - PyObject *__pyx_v_context; - PyObject *__pyx_v_problem; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_t_7; - int __pyx_t_8; - __Pyx_RefNannySetupContext("_parser_error"); - __pyx_v_context_mark = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_problem_mark = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_context = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_problem = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":318 - * - * cdef object _parser_error(self): - * if self.parser.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<< - * return MemoryError - * elif self.parser.error == YAML_READER_ERROR: - */ - __pyx_t_1 = (__pyx_v_self->parser.error == YAML_MEMORY_ERROR); - if (__pyx_t_1) { - - /* "_yaml.pyx":319 - * cdef object _parser_error(self): - * if self.parser.error == YAML_MEMORY_ERROR: - * return MemoryError # <<<<<<<<<<<<<< - * elif self.parser.error == YAML_READER_ERROR: - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_MemoryError); - __pyx_r = __pyx_builtin_MemoryError; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":320 - * if self.parser.error == YAML_MEMORY_ERROR: - * return MemoryError - * elif self.parser.error == YAML_READER_ERROR: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * return ReaderError(self.stream_name, self.parser.problem_offset, - */ - __pyx_t_1 = (__pyx_v_self->parser.error == YAML_READER_ERROR); - if (__pyx_t_1) { - - /* "_yaml.pyx":321 - * return MemoryError - * elif self.parser.error == YAML_READER_ERROR: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * return ReaderError(self.stream_name, self.parser.problem_offset, - * self.parser.problem_value, '?', self.parser.problem) - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":322 - * elif self.parser.error == YAML_READER_ERROR: - * if PY_MAJOR_VERSION < 3: - * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<< - * self.parser.problem_value, '?', self.parser.problem) - * else: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__ReaderError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parser.problem_offset); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":323 - * if PY_MAJOR_VERSION < 3: - * return ReaderError(self.stream_name, self.parser.problem_offset, - * self.parser.problem_value, '?', self.parser.problem) # <<<<<<<<<<<<<< - * else: - * return ReaderError(self.stream_name, self.parser.problem_offset, - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_self->parser.problem_value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 323; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PyBytes_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 323; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_5)); - __pyx_t_6 = PyTuple_New(5); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_8)); - PyTuple_SET_ITEM(__pyx_t_6, 3, ((PyObject *)__pyx_kp_s_8)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_8)); - PyTuple_SET_ITEM(__pyx_t_6, 4, ((PyObject *)__pyx_t_5)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_5)); - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_5 = 0; - __pyx_t_5 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_6), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 322; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - goto __pyx_L4; - } - /*else*/ { - - /* "_yaml.pyx":325 - * self.parser.problem_value, '?', self.parser.problem) - * else: - * return ReaderError(self.stream_name, self.parser.problem_offset, # <<<<<<<<<<<<<< - * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem)) - * elif self.parser.error == YAML_SCANNER_ERROR \ - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__ReaderError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = PyInt_FromLong(__pyx_v_self->parser.problem_offset); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - - /* "_yaml.pyx":326 - * else: - * return ReaderError(self.stream_name, self.parser.problem_offset, - * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem)) # <<<<<<<<<<<<<< - * elif self.parser.error == YAML_SCANNER_ERROR \ - * or self.parser.error == YAML_PARSER_ERROR: - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parser.problem_value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 326; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = PyUnicode_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 326; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = PyTuple_New(5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_8)); - PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_kp_u_8)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_8)); - PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __pyx_t_6 = 0; - __pyx_t_2 = 0; - __pyx_t_4 = 0; - __pyx_t_4 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 325; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - } - __pyx_L4:; - goto __pyx_L3; - } - - /* "_yaml.pyx":328 - * self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem)) - * elif self.parser.error == YAML_SCANNER_ERROR \ - * or self.parser.error == YAML_PARSER_ERROR: # <<<<<<<<<<<<<< - * context_mark = None - * problem_mark = None - */ - __pyx_t_1 = (__pyx_v_self->parser.error == YAML_SCANNER_ERROR); - if (!__pyx_t_1) { - __pyx_t_7 = (__pyx_v_self->parser.error == YAML_PARSER_ERROR); - __pyx_t_8 = __pyx_t_7; - } else { - __pyx_t_8 = __pyx_t_1; - } - if (__pyx_t_8) { - - /* "_yaml.pyx":329 - * elif self.parser.error == YAML_SCANNER_ERROR \ - * or self.parser.error == YAML_PARSER_ERROR: - * context_mark = None # <<<<<<<<<<<<<< - * problem_mark = None - * if self.parser.context != NULL: - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_context_mark); - __pyx_v_context_mark = Py_None; - - /* "_yaml.pyx":330 - * or self.parser.error == YAML_PARSER_ERROR: - * context_mark = None - * problem_mark = None # <<<<<<<<<<<<<< - * if self.parser.context != NULL: - * context_mark = Mark(self.stream_name, - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_problem_mark); - __pyx_v_problem_mark = Py_None; - - /* "_yaml.pyx":331 - * context_mark = None - * problem_mark = None - * if self.parser.context != NULL: # <<<<<<<<<<<<<< - * context_mark = Mark(self.stream_name, - * self.parser.context_mark.index, - */ - __pyx_t_8 = (__pyx_v_self->parser.context != NULL); - if (__pyx_t_8) { - - /* "_yaml.pyx":333 - * if self.parser.context != NULL: - * context_mark = Mark(self.stream_name, - * self.parser.context_mark.index, # <<<<<<<<<<<<<< - * self.parser.context_mark.line, - * self.parser.context_mark.column, None, None) - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_self->parser.context_mark.index); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 333; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":334 - * context_mark = Mark(self.stream_name, - * self.parser.context_mark.index, - * self.parser.context_mark.line, # <<<<<<<<<<<<<< - * self.parser.context_mark.column, None, None) - * if self.parser.problem != NULL: - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parser.context_mark.line); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 334; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":335 - * self.parser.context_mark.index, - * self.parser.context_mark.line, - * self.parser.context_mark.column, None, None) # <<<<<<<<<<<<<< - * if self.parser.problem != NULL: - * problem_mark = Mark(self.stream_name, - */ - __pyx_t_5 = PyInt_FromLong(__pyx_v_self->parser.context_mark.column); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 335; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_2 = PyTuple_New(6); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 332; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_2, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_2, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_4 = 0; - __pyx_t_3 = 0; - __pyx_t_5 = 0; - __pyx_t_5 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 332; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_v_context_mark); - __pyx_v_context_mark = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":336 - * self.parser.context_mark.line, - * self.parser.context_mark.column, None, None) - * if self.parser.problem != NULL: # <<<<<<<<<<<<<< - * problem_mark = Mark(self.stream_name, - * self.parser.problem_mark.index, - */ - __pyx_t_8 = (__pyx_v_self->parser.problem != NULL); - if (__pyx_t_8) { - - /* "_yaml.pyx":338 - * if self.parser.problem != NULL: - * problem_mark = Mark(self.stream_name, - * self.parser.problem_mark.index, # <<<<<<<<<<<<<< - * self.parser.problem_mark.line, - * self.parser.problem_mark.column, None, None) - */ - __pyx_t_5 = PyInt_FromLong(__pyx_v_self->parser.problem_mark.index); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 338; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - - /* "_yaml.pyx":339 - * problem_mark = Mark(self.stream_name, - * self.parser.problem_mark.index, - * self.parser.problem_mark.line, # <<<<<<<<<<<<<< - * self.parser.problem_mark.column, None, None) - * context = None - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parser.problem_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 339; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":340 - * self.parser.problem_mark.index, - * self.parser.problem_mark.line, - * self.parser.problem_mark.column, None, None) # <<<<<<<<<<<<<< - * context = None - * if self.parser.context != NULL: - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parser.problem_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 340; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 337; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_5 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 337; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_v_problem_mark); - __pyx_v_problem_mark = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":341 - * self.parser.problem_mark.line, - * self.parser.problem_mark.column, None, None) - * context = None # <<<<<<<<<<<<<< - * if self.parser.context != NULL: - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_context); - __pyx_v_context = Py_None; - - /* "_yaml.pyx":342 - * self.parser.problem_mark.column, None, None) - * context = None - * if self.parser.context != NULL: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * context = self.parser.context - */ - __pyx_t_8 = (__pyx_v_self->parser.context != NULL); - if (__pyx_t_8) { - - /* "_yaml.pyx":343 - * context = None - * if self.parser.context != NULL: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * context = self.parser.context - * else: - */ - __pyx_t_8 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_8) { - - /* "_yaml.pyx":344 - * if self.parser.context != NULL: - * if PY_MAJOR_VERSION < 3: - * context = self.parser.context # <<<<<<<<<<<<<< - * else: - * context = PyUnicode_FromString(self.parser.context) - */ - __pyx_t_3 = PyBytes_FromString(__pyx_v_self->parser.context); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 344; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_DECREF(__pyx_v_context); - __pyx_v_context = ((PyObject *)__pyx_t_3); - __pyx_t_3 = 0; - goto __pyx_L8; - } - /*else*/ { - - /* "_yaml.pyx":346 - * context = self.parser.context - * else: - * context = PyUnicode_FromString(self.parser.context) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * problem = self.parser.problem - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parser.context); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 346; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_context); - __pyx_v_context = __pyx_t_3; - __pyx_t_3 = 0; - } - __pyx_L8:; - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":347 - * else: - * context = PyUnicode_FromString(self.parser.context) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * problem = self.parser.problem - * else: - */ - __pyx_t_8 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_8) { - - /* "_yaml.pyx":348 - * context = PyUnicode_FromString(self.parser.context) - * if PY_MAJOR_VERSION < 3: - * problem = self.parser.problem # <<<<<<<<<<<<<< - * else: - * problem = PyUnicode_FromString(self.parser.problem) - */ - __pyx_t_3 = PyBytes_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 348; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_DECREF(__pyx_v_problem); - __pyx_v_problem = ((PyObject *)__pyx_t_3); - __pyx_t_3 = 0; - goto __pyx_L9; - } - /*else*/ { - - /* "_yaml.pyx":350 - * problem = self.parser.problem - * else: - * problem = PyUnicode_FromString(self.parser.problem) # <<<<<<<<<<<<<< - * if self.parser.error == YAML_SCANNER_ERROR: - * return ScannerError(context, context_mark, problem, problem_mark) - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parser.problem); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 350; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_problem); - __pyx_v_problem = __pyx_t_3; - __pyx_t_3 = 0; - } - __pyx_L9:; - - /* "_yaml.pyx":351 - * else: - * problem = PyUnicode_FromString(self.parser.problem) - * if self.parser.error == YAML_SCANNER_ERROR: # <<<<<<<<<<<<<< - * return ScannerError(context, context_mark, problem, problem_mark) - * else: - */ - __pyx_t_8 = (__pyx_v_self->parser.error == YAML_SCANNER_ERROR); - if (__pyx_t_8) { - - /* "_yaml.pyx":352 - * problem = PyUnicode_FromString(self.parser.problem) - * if self.parser.error == YAML_SCANNER_ERROR: - * return ScannerError(context, context_mark, problem, problem_mark) # <<<<<<<<<<<<<< - * else: - * return ParserError(context, context_mark, problem, problem_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScannerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 352; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 352; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_context); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_context); - __Pyx_GIVEREF(__pyx_v_context); - __Pyx_INCREF(__pyx_v_context_mark); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_context_mark); - __Pyx_GIVEREF(__pyx_v_context_mark); - __Pyx_INCREF(__pyx_v_problem); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_problem); - __Pyx_GIVEREF(__pyx_v_problem); - __Pyx_INCREF(__pyx_v_problem_mark); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_problem_mark); - __Pyx_GIVEREF(__pyx_v_problem_mark); - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 352; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L10; - } - /*else*/ { - - /* "_yaml.pyx":354 - * return ScannerError(context, context_mark, problem, problem_mark) - * else: - * return ParserError(context, context_mark, problem, problem_mark) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ValueError("no parser error") - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__ParserError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 354; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 354; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_context); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_context); - __Pyx_GIVEREF(__pyx_v_context); - __Pyx_INCREF(__pyx_v_context_mark); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_context_mark); - __Pyx_GIVEREF(__pyx_v_context_mark); - __Pyx_INCREF(__pyx_v_problem); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_v_problem); - __Pyx_GIVEREF(__pyx_v_problem); - __Pyx_INCREF(__pyx_v_problem_mark); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_v_problem_mark); - __Pyx_GIVEREF(__pyx_v_problem_mark); - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 354; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - } - __pyx_L10:; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":355 - * else: - * return ParserError(context, context_mark, problem, problem_mark) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ValueError("no parser error") - * else: - */ - __pyx_t_8 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_8) { - - /* "_yaml.pyx":356 - * return ParserError(context, context_mark, problem, problem_mark) - * if PY_MAJOR_VERSION < 3: - * raise ValueError("no parser error") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"no parser error") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_10), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L11; - } - /*else*/ { - - /* "_yaml.pyx":358 - * raise ValueError("no parser error") - * else: - * raise ValueError(u"no parser error") # <<<<<<<<<<<<<< - * - * def raw_scan(self): - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_11), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 358; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 358; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L11:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("_yaml.CParser._parser_error"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_context_mark); - __Pyx_DECREF(__pyx_v_problem_mark); - __Pyx_DECREF(__pyx_v_context); - __Pyx_DECREF(__pyx_v_problem); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":360 - * raise ValueError(u"no parser error") - * - * def raw_scan(self): # <<<<<<<<<<<<<< - * cdef yaml_token_t token - * cdef int done - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_3raw_scan(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_3raw_scan(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - yaml_token_t __pyx_v_token; - int __pyx_v_done; - int __pyx_v_count; - PyObject *__pyx_v_error; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("raw_scan"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":364 - * cdef int done - * cdef int count - * count = 0 # <<<<<<<<<<<<<< - * done = 0 - * while done == 0: - */ - __pyx_v_count = 0; - - /* "_yaml.pyx":365 - * cdef int count - * count = 0 - * done = 0 # <<<<<<<<<<<<<< - * while done == 0: - * if yaml_parser_scan(&self.parser, &token) == 0: - */ - __pyx_v_done = 0; - - /* "_yaml.pyx":366 - * count = 0 - * done = 0 - * while done == 0: # <<<<<<<<<<<<<< - * if yaml_parser_scan(&self.parser, &token) == 0: - * error = self._parser_error() - */ - while (1) { - __pyx_t_1 = (__pyx_v_done == 0); - if (!__pyx_t_1) break; - - /* "_yaml.pyx":367 - * done = 0 - * while done == 0: - * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<< - * error = self._parser_error() - * raise error - */ - __pyx_t_2 = yaml_parser_scan((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parser), (&__pyx_v_token)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 367; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_1 = (__pyx_t_2 == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":368 - * while done == 0: - * if yaml_parser_scan(&self.parser, &token) == 0: - * error = self._parser_error() # <<<<<<<<<<<<<< - * raise error - * if token.type == YAML_NO_TOKEN: - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parser_error(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 368; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":369 - * if yaml_parser_scan(&self.parser, &token) == 0: - * error = self._parser_error() - * raise error # <<<<<<<<<<<<<< - * if token.type == YAML_NO_TOKEN: - * done = 1 - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":370 - * error = self._parser_error() - * raise error - * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<< - * done = 1 - * else: - */ - __pyx_t_1 = (__pyx_v_token.type == YAML_NO_TOKEN); - if (__pyx_t_1) { - - /* "_yaml.pyx":371 - * raise error - * if token.type == YAML_NO_TOKEN: - * done = 1 # <<<<<<<<<<<<<< - * else: - * count = count+1 - */ - __pyx_v_done = 1; - goto __pyx_L8; - } - /*else*/ { - - /* "_yaml.pyx":373 - * done = 1 - * else: - * count = count+1 # <<<<<<<<<<<<<< - * yaml_token_delete(&token) - * return count - */ - __pyx_v_count = (__pyx_v_count + 1); - } - __pyx_L8:; - - /* "_yaml.pyx":374 - * else: - * count = count+1 - * yaml_token_delete(&token) # <<<<<<<<<<<<<< - * return count - * - */ - yaml_token_delete((&__pyx_v_token)); - } - - /* "_yaml.pyx":375 - * count = count+1 - * yaml_token_delete(&token) - * return count # <<<<<<<<<<<<<< - * - * cdef object _scan(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = PyInt_FromLong(__pyx_v_count); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 375; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser.raw_scan"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":377 - * return count - * - * cdef object _scan(self): # <<<<<<<<<<<<<< - * cdef yaml_token_t token - * if yaml_parser_scan(&self.parser, &token) == 0: - */ - -static PyObject *__pyx_f_5_yaml_7CParser__scan(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) { - yaml_token_t __pyx_v_token; - PyObject *__pyx_v_error; - PyObject *__pyx_v_token_object; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("_scan"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_token_object = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":379 - * cdef object _scan(self): - * cdef yaml_token_t token - * if yaml_parser_scan(&self.parser, &token) == 0: # <<<<<<<<<<<<<< - * error = self._parser_error() - * raise error - */ - __pyx_t_1 = yaml_parser_scan((&__pyx_v_self->parser), (&__pyx_v_token)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 379; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = (__pyx_t_1 == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":380 - * cdef yaml_token_t token - * if yaml_parser_scan(&self.parser, &token) == 0: - * error = self._parser_error() # <<<<<<<<<<<<<< - * raise error - * token_object = self._token_to_object(&token) - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 380; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":381 - * if yaml_parser_scan(&self.parser, &token) == 0: - * error = self._parser_error() - * raise error # <<<<<<<<<<<<<< - * token_object = self._token_to_object(&token) - * yaml_token_delete(&token) - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 381; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":382 - * error = self._parser_error() - * raise error - * token_object = self._token_to_object(&token) # <<<<<<<<<<<<<< - * yaml_token_delete(&token) - * return token_object - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_token_to_object(__pyx_v_self, (&__pyx_v_token)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 382; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_token_object); - __pyx_v_token_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":383 - * raise error - * token_object = self._token_to_object(&token) - * yaml_token_delete(&token) # <<<<<<<<<<<<<< - * return token_object - * - */ - yaml_token_delete((&__pyx_v_token)); - - /* "_yaml.pyx":384 - * token_object = self._token_to_object(&token) - * yaml_token_delete(&token) - * return token_object # <<<<<<<<<<<<<< - * - * cdef object _token_to_object(self, yaml_token_t *token): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_token_object); - __pyx_r = __pyx_v_token_object; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser._scan"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_DECREF(__pyx_v_token_object); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":386 - * return token_object - * - * cdef object _token_to_object(self, yaml_token_t *token): # <<<<<<<<<<<<<< - * start_mark = Mark(self.stream_name, - * token.start_mark.index, - */ - -static PyObject *__pyx_f_5_yaml_7CParser__token_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_token_t *__pyx_v_token) { - struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark; - struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark; - PyObject *__pyx_v_encoding; - PyObject *__pyx_v_handle; - PyObject *__pyx_v_prefix; - PyObject *__pyx_v_value; - PyObject *__pyx_v_suffix; - int __pyx_v_plain; - PyObject *__pyx_v_style; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - __Pyx_RefNannySetupContext("_token_to_object"); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_encoding = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_handle = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_prefix = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_suffix = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_style = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":388 - * cdef object _token_to_object(self, yaml_token_t *token): - * start_mark = Mark(self.stream_name, - * token.start_mark.index, # <<<<<<<<<<<<<< - * token.start_mark.line, - * token.start_mark.column, - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_token->start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 388; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":389 - * start_mark = Mark(self.stream_name, - * token.start_mark.index, - * token.start_mark.line, # <<<<<<<<<<<<<< - * token.start_mark.column, - * None, None) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_token->start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 389; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":390 - * token.start_mark.index, - * token.start_mark.line, - * token.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * end_mark = Mark(self.stream_name, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_token->start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 390; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":391 - * token.start_mark.line, - * token.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * end_mark = Mark(self.stream_name, - * token.end_mark.index, - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 387; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 387; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_start_mark)); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "_yaml.pyx":393 - * None, None) - * end_mark = Mark(self.stream_name, - * token.end_mark.index, # <<<<<<<<<<<<<< - * token.end_mark.line, - * token.end_mark.column, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_token->end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 393; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":394 - * end_mark = Mark(self.stream_name, - * token.end_mark.index, - * token.end_mark.line, # <<<<<<<<<<<<<< - * token.end_mark.column, - * None, None) - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_token->end_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 394; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":395 - * token.end_mark.index, - * token.end_mark.line, - * token.end_mark.column, # <<<<<<<<<<<<<< - * None, None) - * if token.type == YAML_NO_TOKEN: - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_token->end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 395; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":396 - * token.end_mark.line, - * token.end_mark.column, - * None, None) # <<<<<<<<<<<<<< - * if token.type == YAML_NO_TOKEN: - * return None - */ - __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 392; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 392; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_end_mark)); - __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":397 - * token.end_mark.column, - * None, None) - * if token.type == YAML_NO_TOKEN: # <<<<<<<<<<<<<< - * return None - * elif token.type == YAML_STREAM_START_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_NO_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":398 - * None, None) - * if token.type == YAML_NO_TOKEN: - * return None # <<<<<<<<<<<<<< - * elif token.type == YAML_STREAM_START_TOKEN: - * encoding = None - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_None); - __pyx_r = Py_None; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":399 - * if token.type == YAML_NO_TOKEN: - * return None - * elif token.type == YAML_STREAM_START_TOKEN: # <<<<<<<<<<<<<< - * encoding = None - * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_STREAM_START_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":400 - * return None - * elif token.type == YAML_STREAM_START_TOKEN: - * encoding = None # <<<<<<<<<<<<<< - * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: - * if self.unicode_source == 0: - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = Py_None; - - /* "_yaml.pyx":401 - * elif token.type == YAML_STREAM_START_TOKEN: - * encoding = None - * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<< - * if self.unicode_source == 0: - * encoding = u"utf-8" - */ - __pyx_t_5 = (__pyx_v_token->data.stream_start.encoding == YAML_UTF8_ENCODING); - if (__pyx_t_5) { - - /* "_yaml.pyx":402 - * encoding = None - * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: - * if self.unicode_source == 0: # <<<<<<<<<<<<<< - * encoding = u"utf-8" - * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - */ - __pyx_t_5 = (__pyx_v_self->unicode_source == 0); - if (__pyx_t_5) { - - /* "_yaml.pyx":403 - * if token.data.stream_start.encoding == YAML_UTF8_ENCODING: - * if self.unicode_source == 0: - * encoding = u"utf-8" # <<<<<<<<<<<<<< - * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - * encoding = u"utf-16-le" - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_12)); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = ((PyObject *)__pyx_kp_u_12); - goto __pyx_L5; - } - __pyx_L5:; - goto __pyx_L4; - } - - /* "_yaml.pyx":404 - * if self.unicode_source == 0: - * encoding = u"utf-8" - * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: # <<<<<<<<<<<<<< - * encoding = u"utf-16-le" - * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - */ - __pyx_t_5 = (__pyx_v_token->data.stream_start.encoding == YAML_UTF16LE_ENCODING); - if (__pyx_t_5) { - - /* "_yaml.pyx":405 - * encoding = u"utf-8" - * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - * encoding = u"utf-16-le" # <<<<<<<<<<<<<< - * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - * encoding = u"utf-16-be" - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_13)); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = ((PyObject *)__pyx_kp_u_13); - goto __pyx_L4; - } - - /* "_yaml.pyx":406 - * elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - * encoding = u"utf-16-le" - * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: # <<<<<<<<<<<<<< - * encoding = u"utf-16-be" - * return StreamStartToken(start_mark, end_mark, encoding) - */ - __pyx_t_5 = (__pyx_v_token->data.stream_start.encoding == YAML_UTF16BE_ENCODING); - if (__pyx_t_5) { - - /* "_yaml.pyx":407 - * encoding = u"utf-16-le" - * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - * encoding = u"utf-16-be" # <<<<<<<<<<<<<< - * return StreamStartToken(start_mark, end_mark, encoding) - * elif token.type == YAML_STREAM_END_TOKEN: - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_14)); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = ((PyObject *)__pyx_kp_u_14); - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":408 - * elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - * encoding = u"utf-16-be" - * return StreamStartToken(start_mark, end_mark, encoding) # <<<<<<<<<<<<<< - * elif token.type == YAML_STREAM_END_TOKEN: - * return StreamEndToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__StreamStartToken); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 408; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 408; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_encoding); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_encoding); - __Pyx_GIVEREF(__pyx_v_encoding); - __pyx_t_4 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 408; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":409 - * encoding = u"utf-16-be" - * return StreamStartToken(start_mark, end_mark, encoding) - * elif token.type == YAML_STREAM_END_TOKEN: # <<<<<<<<<<<<<< - * return StreamEndToken(start_mark, end_mark) - * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_STREAM_END_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":410 - * return StreamStartToken(start_mark, end_mark, encoding) - * elif token.type == YAML_STREAM_END_TOKEN: - * return StreamEndToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: - * return DirectiveToken(u"YAML", - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__StreamEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 410; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 410; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_2 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 410; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":411 - * elif token.type == YAML_STREAM_END_TOKEN: - * return StreamEndToken(start_mark, end_mark) - * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: # <<<<<<<<<<<<<< - * return DirectiveToken(u"YAML", - * (token.data.version_directive.major, - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_VERSION_DIRECTIVE_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":412 - * return StreamEndToken(start_mark, end_mark) - * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: - * return DirectiveToken(u"YAML", # <<<<<<<<<<<<<< - * (token.data.version_directive.major, - * token.data.version_directive.minor), - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__DirectiveToken); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":413 - * elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: - * return DirectiveToken(u"YAML", - * (token.data.version_directive.major, # <<<<<<<<<<<<<< - * token.data.version_directive.minor), - * start_mark, end_mark) - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_token->data.version_directive.major); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 413; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":414 - * return DirectiveToken(u"YAML", - * (token.data.version_directive.major, - * token.data.version_directive.minor), # <<<<<<<<<<<<<< - * start_mark, end_mark) - * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_token->data.version_directive.minor); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 413; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __pyx_t_1 = 0; - __pyx_t_4 = 0; - - /* "_yaml.pyx":415 - * (token.data.version_directive.major, - * token.data.version_directive.minor), - * start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: - * handle = PyUnicode_FromString(token.data.tag_directive.handle) - */ - __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(((PyObject *)__pyx_n_u__YAML)); - PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_n_u__YAML)); - __Pyx_GIVEREF(((PyObject *)__pyx_n_u__YAML)); - PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_t_3)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 3, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":416 - * token.data.version_directive.minor), - * start_mark, end_mark) - * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: # <<<<<<<<<<<<<< - * handle = PyUnicode_FromString(token.data.tag_directive.handle) - * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_TAG_DIRECTIVE_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":417 - * start_mark, end_mark) - * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: - * handle = PyUnicode_FromString(token.data.tag_directive.handle) # <<<<<<<<<<<<<< - * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) - * return DirectiveToken(u"TAG", (handle, prefix), - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag_directive.handle); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 417; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":418 - * elif token.type == YAML_TAG_DIRECTIVE_TOKEN: - * handle = PyUnicode_FromString(token.data.tag_directive.handle) - * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) # <<<<<<<<<<<<<< - * return DirectiveToken(u"TAG", (handle, prefix), - * start_mark, end_mark) - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag_directive.prefix); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 418; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_prefix); - __pyx_v_prefix = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":419 - * handle = PyUnicode_FromString(token.data.tag_directive.handle) - * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) - * return DirectiveToken(u"TAG", (handle, prefix), # <<<<<<<<<<<<<< - * start_mark, end_mark) - * elif token.type == YAML_DOCUMENT_START_TOKEN: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DirectiveToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_handle); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_handle); - __Pyx_GIVEREF(__pyx_v_handle); - __Pyx_INCREF(__pyx_v_prefix); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_prefix); - __Pyx_GIVEREF(__pyx_v_prefix); - - /* "_yaml.pyx":420 - * prefix = PyUnicode_FromString(token.data.tag_directive.prefix) - * return DirectiveToken(u"TAG", (handle, prefix), - * start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_DOCUMENT_START_TOKEN: - * return DocumentStartToken(start_mark, end_mark) - */ - __pyx_t_2 = PyTuple_New(4); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_n_u__TAG)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_n_u__TAG)); - __Pyx_GIVEREF(((PyObject *)__pyx_n_u__TAG)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_t_4)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 3, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = 0; - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 419; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":421 - * return DirectiveToken(u"TAG", (handle, prefix), - * start_mark, end_mark) - * elif token.type == YAML_DOCUMENT_START_TOKEN: # <<<<<<<<<<<<<< - * return DocumentStartToken(start_mark, end_mark) - * elif token.type == YAML_DOCUMENT_END_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_DOCUMENT_START_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":422 - * start_mark, end_mark) - * elif token.type == YAML_DOCUMENT_START_TOKEN: - * return DocumentStartToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_DOCUMENT_END_TOKEN: - * return DocumentEndToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DocumentStartToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 422; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 422; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 422; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":423 - * elif token.type == YAML_DOCUMENT_START_TOKEN: - * return DocumentStartToken(start_mark, end_mark) - * elif token.type == YAML_DOCUMENT_END_TOKEN: # <<<<<<<<<<<<<< - * return DocumentEndToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_DOCUMENT_END_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":424 - * return DocumentStartToken(start_mark, end_mark) - * elif token.type == YAML_DOCUMENT_END_TOKEN: - * return DocumentEndToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: - * return BlockSequenceStartToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DocumentEndToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 424; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 424; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 424; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":425 - * elif token.type == YAML_DOCUMENT_END_TOKEN: - * return DocumentEndToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: # <<<<<<<<<<<<<< - * return BlockSequenceStartToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_BLOCK_SEQUENCE_START_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":426 - * return DocumentEndToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: - * return BlockSequenceStartToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: - * return BlockMappingStartToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s_15); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 426; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 426; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 426; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":427 - * elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: - * return BlockSequenceStartToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: # <<<<<<<<<<<<<< - * return BlockMappingStartToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_END_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_BLOCK_MAPPING_START_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":428 - * return BlockSequenceStartToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: - * return BlockMappingStartToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_BLOCK_END_TOKEN: - * return BlockEndToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s_16); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 428; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 428; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 428; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":429 - * elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: - * return BlockMappingStartToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_END_TOKEN: # <<<<<<<<<<<<<< - * return BlockEndToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_BLOCK_END_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":430 - * return BlockMappingStartToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_END_TOKEN: - * return BlockEndToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: - * return FlowSequenceStartToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__BlockEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 430; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 430; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 430; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":431 - * elif token.type == YAML_BLOCK_END_TOKEN: - * return BlockEndToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: # <<<<<<<<<<<<<< - * return FlowSequenceStartToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_FLOW_SEQUENCE_START_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":432 - * return BlockEndToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: - * return FlowSequenceStartToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: - * return FlowSequenceEndToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s_17); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 432; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 432; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 432; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":433 - * elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: - * return FlowSequenceStartToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: # <<<<<<<<<<<<<< - * return FlowSequenceEndToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_FLOW_SEQUENCE_END_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":434 - * return FlowSequenceStartToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: - * return FlowSequenceEndToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: - * return FlowMappingStartToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s_18); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 434; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 434; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 434; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":435 - * elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: - * return FlowSequenceEndToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: # <<<<<<<<<<<<<< - * return FlowMappingStartToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_FLOW_MAPPING_START_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":436 - * return FlowSequenceEndToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: - * return FlowMappingStartToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: - * return FlowMappingEndToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s_19); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 436; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 436; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 436; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":437 - * elif token.type == YAML_FLOW_MAPPING_START_TOKEN: - * return FlowMappingStartToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: # <<<<<<<<<<<<<< - * return FlowMappingEndToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_ENTRY_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_FLOW_MAPPING_END_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":438 - * return FlowMappingStartToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: - * return FlowMappingEndToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_BLOCK_ENTRY_TOKEN: - * return BlockEntryToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__FlowMappingEndToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 438; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 438; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 438; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":439 - * elif token.type == YAML_FLOW_MAPPING_END_TOKEN: - * return FlowMappingEndToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_ENTRY_TOKEN: # <<<<<<<<<<<<<< - * return BlockEntryToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_ENTRY_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_BLOCK_ENTRY_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":440 - * return FlowMappingEndToken(start_mark, end_mark) - * elif token.type == YAML_BLOCK_ENTRY_TOKEN: - * return BlockEntryToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_FLOW_ENTRY_TOKEN: - * return FlowEntryToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__BlockEntryToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 440; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 440; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 440; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":441 - * elif token.type == YAML_BLOCK_ENTRY_TOKEN: - * return BlockEntryToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_ENTRY_TOKEN: # <<<<<<<<<<<<<< - * return FlowEntryToken(start_mark, end_mark) - * elif token.type == YAML_KEY_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_FLOW_ENTRY_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":442 - * return BlockEntryToken(start_mark, end_mark) - * elif token.type == YAML_FLOW_ENTRY_TOKEN: - * return FlowEntryToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_KEY_TOKEN: - * return KeyToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__FlowEntryToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 442; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 442; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 442; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":443 - * elif token.type == YAML_FLOW_ENTRY_TOKEN: - * return FlowEntryToken(start_mark, end_mark) - * elif token.type == YAML_KEY_TOKEN: # <<<<<<<<<<<<<< - * return KeyToken(start_mark, end_mark) - * elif token.type == YAML_VALUE_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_KEY_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":444 - * return FlowEntryToken(start_mark, end_mark) - * elif token.type == YAML_KEY_TOKEN: - * return KeyToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_VALUE_TOKEN: - * return ValueToken(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__KeyToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 444; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 444; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 444; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":445 - * elif token.type == YAML_KEY_TOKEN: - * return KeyToken(start_mark, end_mark) - * elif token.type == YAML_VALUE_TOKEN: # <<<<<<<<<<<<<< - * return ValueToken(start_mark, end_mark) - * elif token.type == YAML_ALIAS_TOKEN: - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_VALUE_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":446 - * return KeyToken(start_mark, end_mark) - * elif token.type == YAML_VALUE_TOKEN: - * return ValueToken(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_ALIAS_TOKEN: - * value = PyUnicode_FromString(token.data.alias.value) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__ValueToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 446; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 446; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 446; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":447 - * elif token.type == YAML_VALUE_TOKEN: - * return ValueToken(start_mark, end_mark) - * elif token.type == YAML_ALIAS_TOKEN: # <<<<<<<<<<<<<< - * value = PyUnicode_FromString(token.data.alias.value) - * return AliasToken(value, start_mark, end_mark) - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_ALIAS_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":448 - * return ValueToken(start_mark, end_mark) - * elif token.type == YAML_ALIAS_TOKEN: - * value = PyUnicode_FromString(token.data.alias.value) # <<<<<<<<<<<<<< - * return AliasToken(value, start_mark, end_mark) - * elif token.type == YAML_ANCHOR_TOKEN: - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.alias.value); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 448; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":449 - * elif token.type == YAML_ALIAS_TOKEN: - * value = PyUnicode_FromString(token.data.alias.value) - * return AliasToken(value, start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_ANCHOR_TOKEN: - * value = PyUnicode_FromString(token.data.anchor.value) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__AliasToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 449; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 449; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 449; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":450 - * value = PyUnicode_FromString(token.data.alias.value) - * return AliasToken(value, start_mark, end_mark) - * elif token.type == YAML_ANCHOR_TOKEN: # <<<<<<<<<<<<<< - * value = PyUnicode_FromString(token.data.anchor.value) - * return AnchorToken(value, start_mark, end_mark) - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_ANCHOR_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":451 - * return AliasToken(value, start_mark, end_mark) - * elif token.type == YAML_ANCHOR_TOKEN: - * value = PyUnicode_FromString(token.data.anchor.value) # <<<<<<<<<<<<<< - * return AnchorToken(value, start_mark, end_mark) - * elif token.type == YAML_TAG_TOKEN: - */ - __pyx_t_4 = PyUnicode_FromString(__pyx_v_token->data.anchor.value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 451; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":452 - * elif token.type == YAML_ANCHOR_TOKEN: - * value = PyUnicode_FromString(token.data.anchor.value) - * return AnchorToken(value, start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_TAG_TOKEN: - * handle = PyUnicode_FromString(token.data.tag.handle) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__AnchorToken); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 452; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = PyTuple_New(3); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 452; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 452; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":453 - * value = PyUnicode_FromString(token.data.anchor.value) - * return AnchorToken(value, start_mark, end_mark) - * elif token.type == YAML_TAG_TOKEN: # <<<<<<<<<<<<<< - * handle = PyUnicode_FromString(token.data.tag.handle) - * suffix = PyUnicode_FromString(token.data.tag.suffix) - */ - __pyx_t_5 = (__pyx_v_token->type == YAML_TAG_TOKEN); - if (__pyx_t_5) { - - /* "_yaml.pyx":454 - * return AnchorToken(value, start_mark, end_mark) - * elif token.type == YAML_TAG_TOKEN: - * handle = PyUnicode_FromString(token.data.tag.handle) # <<<<<<<<<<<<<< - * suffix = PyUnicode_FromString(token.data.tag.suffix) - * if not handle: - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag.handle); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 454; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":455 - * elif token.type == YAML_TAG_TOKEN: - * handle = PyUnicode_FromString(token.data.tag.handle) - * suffix = PyUnicode_FromString(token.data.tag.suffix) # <<<<<<<<<<<<<< - * if not handle: - * handle = None - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_token->data.tag.suffix); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_suffix); - __pyx_v_suffix = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":456 - * handle = PyUnicode_FromString(token.data.tag.handle) - * suffix = PyUnicode_FromString(token.data.tag.suffix) - * if not handle: # <<<<<<<<<<<<<< - * handle = None - * return TagToken((handle, suffix), start_mark, end_mark) - */ - __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_v_handle); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 456; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_6 = (!__pyx_t_5); - if (__pyx_t_6) { - - /* "_yaml.pyx":457 - * suffix = PyUnicode_FromString(token.data.tag.suffix) - * if not handle: - * handle = None # <<<<<<<<<<<<<< - * return TagToken((handle, suffix), start_mark, end_mark) - * elif token.type == YAML_SCALAR_TOKEN: - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = Py_None; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":458 - * if not handle: - * handle = None - * return TagToken((handle, suffix), start_mark, end_mark) # <<<<<<<<<<<<<< - * elif token.type == YAML_SCALAR_TOKEN: - * value = PyUnicode_DecodeUTF8(token.data.scalar.value, - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__TagToken); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(__pyx_v_handle); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_handle); - __Pyx_GIVEREF(__pyx_v_handle); - __Pyx_INCREF(__pyx_v_suffix); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_suffix); - __Pyx_GIVEREF(__pyx_v_suffix); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_t_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 458; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":459 - * handle = None - * return TagToken((handle, suffix), start_mark, end_mark) - * elif token.type == YAML_SCALAR_TOKEN: # <<<<<<<<<<<<<< - * value = PyUnicode_DecodeUTF8(token.data.scalar.value, - * token.data.scalar.length, 'strict') - */ - __pyx_t_6 = (__pyx_v_token->type == YAML_SCALAR_TOKEN); - if (__pyx_t_6) { - - /* "_yaml.pyx":461 - * elif token.type == YAML_SCALAR_TOKEN: - * value = PyUnicode_DecodeUTF8(token.data.scalar.value, - * token.data.scalar.length, 'strict') # <<<<<<<<<<<<<< - * plain = False - * style = None - */ - __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_token->data.scalar.value, __pyx_v_token->data.scalar.length, __pyx_k__strict); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 460; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":462 - * value = PyUnicode_DecodeUTF8(token.data.scalar.value, - * token.data.scalar.length, 'strict') - * plain = False # <<<<<<<<<<<<<< - * style = None - * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - */ - __pyx_v_plain = 0; - - /* "_yaml.pyx":463 - * token.data.scalar.length, 'strict') - * plain = False - * style = None # <<<<<<<<<<<<<< - * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * plain = True - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = Py_None; - - /* "_yaml.pyx":464 - * plain = False - * style = None - * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<< - * plain = True - * style = u'' - */ - __pyx_t_6 = (__pyx_v_token->data.scalar.style == YAML_PLAIN_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":465 - * style = None - * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * plain = True # <<<<<<<<<<<<<< - * style = u'' - * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - */ - __pyx_v_plain = 1; - - /* "_yaml.pyx":466 - * if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * plain = True - * style = u'' # <<<<<<<<<<<<<< - * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_20)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_20); - goto __pyx_L7; - } - - /* "_yaml.pyx":467 - * plain = True - * style = u'' - * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'\'' - * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_token->data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":468 - * style = u'' - * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' # <<<<<<<<<<<<<< - * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_21)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_21); - goto __pyx_L7; - } - - /* "_yaml.pyx":469 - * elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' - * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'"' - * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_token->data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":470 - * style = u'\'' - * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' # <<<<<<<<<<<<<< - * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_22)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_22); - goto __pyx_L7; - } - - /* "_yaml.pyx":471 - * elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' - * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'|' - * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_token->data.scalar.style == YAML_LITERAL_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":472 - * style = u'"' - * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' # <<<<<<<<<<<<<< - * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_23)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_23); - goto __pyx_L7; - } - - /* "_yaml.pyx":473 - * elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' - * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'>' - * return ScalarToken(value, plain, - */ - __pyx_t_6 = (__pyx_v_token->data.scalar.style == YAML_FOLDED_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":474 - * style = u'|' - * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' # <<<<<<<<<<<<<< - * return ScalarToken(value, plain, - * start_mark, end_mark, style) - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_24)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_24); - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":475 - * elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' - * return ScalarToken(value, plain, # <<<<<<<<<<<<<< - * start_mark, end_mark, style) - * else: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarToken); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_plain); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":476 - * style = u'>' - * return ScalarToken(value, plain, - * start_mark, end_mark, style) # <<<<<<<<<<<<<< - * else: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyTuple_New(5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_style); - PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_v_style); - __Pyx_GIVEREF(__pyx_v_style); - __pyx_t_4 = 0; - __pyx_t_4 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 475; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - /*else*/ { - - /* "_yaml.pyx":478 - * start_mark, end_mark, style) - * else: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ValueError("unknown token type") - * else: - */ - __pyx_t_6 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_6) { - - /* "_yaml.pyx":479 - * else: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("unknown token type") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"unknown token type") - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_26), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 479; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 479; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L8; - } - /*else*/ { - - /* "_yaml.pyx":481 - * raise ValueError("unknown token type") - * else: - * raise ValueError(u"unknown token type") # <<<<<<<<<<<<<< - * - * def get_token(self): - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_27), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 481; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 481; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L8:; - } - __pyx_L3:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.CParser._token_to_object"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_start_mark); - __Pyx_DECREF((PyObject *)__pyx_v_end_mark); - __Pyx_DECREF(__pyx_v_encoding); - __Pyx_DECREF(__pyx_v_handle); - __Pyx_DECREF(__pyx_v_prefix); - __Pyx_DECREF(__pyx_v_value); - __Pyx_DECREF(__pyx_v_suffix); - __Pyx_DECREF(__pyx_v_style); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":483 - * raise ValueError(u"unknown token type") - * - * def get_token(self): # <<<<<<<<<<<<<< - * if self.current_token is not None: - * value = self.current_token - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_4get_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_4get_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_v_value; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("get_token"); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":484 - * - * def get_token(self): - * if self.current_token is not None: # <<<<<<<<<<<<<< - * value = self.current_token - * self.current_token = None - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token != Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":485 - * def get_token(self): - * if self.current_token is not None: - * value = self.current_token # <<<<<<<<<<<<<< - * self.current_token = None - * else: - */ - __Pyx_INCREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token; - - /* "_yaml.pyx":486 - * if self.current_token is not None: - * value = self.current_token - * self.current_token = None # <<<<<<<<<<<<<< - * else: - * value = self._scan() - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token = Py_None; - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":488 - * self.current_token = None - * else: - * value = self._scan() # <<<<<<<<<<<<<< - * return value - * - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_scan(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 488; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_L5:; - - /* "_yaml.pyx":489 - * else: - * value = self._scan() - * return value # <<<<<<<<<<<<<< - * - * def peek_token(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_value); - __pyx_r = __pyx_v_value; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("_yaml.CParser.get_token"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_value); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":491 - * return value - * - * def peek_token(self): # <<<<<<<<<<<<<< - * if self.current_token is None: - * self.current_token = self._scan() - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_5peek_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_5peek_token(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("peek_token"); - - /* "_yaml.pyx":492 - * - * def peek_token(self): - * if self.current_token is None: # <<<<<<<<<<<<<< - * self.current_token = self._scan() - * return self.current_token - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":493 - * def peek_token(self): - * if self.current_token is None: - * self.current_token = self._scan() # <<<<<<<<<<<<<< - * return self.current_token - * - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_scan(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 493; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":494 - * if self.current_token is None: - * self.current_token = self._scan() - * return self.current_token # <<<<<<<<<<<<<< - * - * def check_token(self, *choices): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - __pyx_r = ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("_yaml.CParser.peek_token"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":496 - * return self.current_token - * - * def check_token(self, *choices): # <<<<<<<<<<<<<< - * if self.current_token is None: - * self.current_token = self._scan() - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_6check_token(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_6check_token(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_choices = 0; - PyObject *__pyx_v_token_class; - PyObject *__pyx_v_choice; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - Py_ssize_t __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - __Pyx_RefNannySetupContext("check_token"); - if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "check_token", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_choices = __pyx_args; - __pyx_v_token_class = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_choice = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":497 - * - * def check_token(self, *choices): - * if self.current_token is None: # <<<<<<<<<<<<<< - * self.current_token = self._scan() - * if self.current_token is None: - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":498 - * def check_token(self, *choices): - * if self.current_token is None: - * self.current_token = self._scan() # <<<<<<<<<<<<<< - * if self.current_token is None: - * return False - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_scan(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 498; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":499 - * if self.current_token is None: - * self.current_token = self._scan() - * if self.current_token is None: # <<<<<<<<<<<<<< - * return False - * if not choices: - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":500 - * self.current_token = self._scan() - * if self.current_token is None: - * return False # <<<<<<<<<<<<<< - * if not choices: - * return True - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 500; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":501 - * if self.current_token is None: - * return False - * if not choices: # <<<<<<<<<<<<<< - * return True - * token_class = self.current_token.__class__ - */ - __pyx_t_1 = (((PyObject *)__pyx_v_choices) != Py_None) && (PyTuple_GET_SIZE(((PyObject *)__pyx_v_choices)) != 0); - __pyx_t_3 = (!__pyx_t_1); - if (__pyx_t_3) { - - /* "_yaml.pyx":502 - * return False - * if not choices: - * return True # <<<<<<<<<<<<<< - * token_class = self.current_token.__class__ - * for choice in choices: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 502; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":503 - * if not choices: - * return True - * token_class = self.current_token.__class__ # <<<<<<<<<<<<<< - * for choice in choices: - * if token_class is choice: - */ - __pyx_t_2 = PyObject_GetAttr(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_token, __pyx_n_s____class__); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 503; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_token_class); - __pyx_v_token_class = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":504 - * return True - * token_class = self.current_token.__class__ - * for choice in choices: # <<<<<<<<<<<<<< - * if token_class is choice: - * return True - */ - if (unlikely(__pyx_v_choices == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 504; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_4 = 0; __pyx_t_2 = ((PyObject *)__pyx_v_choices); __Pyx_INCREF(__pyx_t_2); - for (;;) { - if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_2)) break; - __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; - __Pyx_DECREF(__pyx_v_choice); - __pyx_v_choice = __pyx_t_5; - __pyx_t_5 = 0; - - /* "_yaml.pyx":505 - * token_class = self.current_token.__class__ - * for choice in choices: - * if token_class is choice: # <<<<<<<<<<<<<< - * return True - * return False - */ - __pyx_t_3 = (__pyx_v_token_class == __pyx_v_choice); - if (__pyx_t_3) { - - /* "_yaml.pyx":506 - * for choice in choices: - * if token_class is choice: - * return True # <<<<<<<<<<<<<< - * return False - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 506; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L10; - } - __pyx_L10:; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "_yaml.pyx":507 - * if token_class is choice: - * return True - * return False # <<<<<<<<<<<<<< - * - * def raw_parse(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 507; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("_yaml.CParser.check_token"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_choices); - __Pyx_DECREF(__pyx_v_token_class); - __Pyx_DECREF(__pyx_v_choice); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":509 - * return False - * - * def raw_parse(self): # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * cdef int done - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_7raw_parse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_7raw_parse(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - yaml_event_t __pyx_v_event; - int __pyx_v_done; - int __pyx_v_count; - PyObject *__pyx_v_error; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("raw_parse"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":513 - * cdef int done - * cdef int count - * count = 0 # <<<<<<<<<<<<<< - * done = 0 - * while done == 0: - */ - __pyx_v_count = 0; - - /* "_yaml.pyx":514 - * cdef int count - * count = 0 - * done = 0 # <<<<<<<<<<<<<< - * while done == 0: - * if yaml_parser_parse(&self.parser, &event) == 0: - */ - __pyx_v_done = 0; - - /* "_yaml.pyx":515 - * count = 0 - * done = 0 - * while done == 0: # <<<<<<<<<<<<<< - * if yaml_parser_parse(&self.parser, &event) == 0: - * error = self._parser_error() - */ - while (1) { - __pyx_t_1 = (__pyx_v_done == 0); - if (!__pyx_t_1) break; - - /* "_yaml.pyx":516 - * done = 0 - * while done == 0: - * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<< - * error = self._parser_error() - * raise error - */ - __pyx_t_2 = yaml_parser_parse((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parser), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 516; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_1 = (__pyx_t_2 == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":517 - * while done == 0: - * if yaml_parser_parse(&self.parser, &event) == 0: - * error = self._parser_error() # <<<<<<<<<<<<<< - * raise error - * if event.type == YAML_NO_EVENT: - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parser_error(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 517; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":518 - * if yaml_parser_parse(&self.parser, &event) == 0: - * error = self._parser_error() - * raise error # <<<<<<<<<<<<<< - * if event.type == YAML_NO_EVENT: - * done = 1 - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 518; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":519 - * error = self._parser_error() - * raise error - * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<< - * done = 1 - * else: - */ - __pyx_t_1 = (__pyx_v_event.type == YAML_NO_EVENT); - if (__pyx_t_1) { - - /* "_yaml.pyx":520 - * raise error - * if event.type == YAML_NO_EVENT: - * done = 1 # <<<<<<<<<<<<<< - * else: - * count = count+1 - */ - __pyx_v_done = 1; - goto __pyx_L8; - } - /*else*/ { - - /* "_yaml.pyx":522 - * done = 1 - * else: - * count = count+1 # <<<<<<<<<<<<<< - * yaml_event_delete(&event) - * return count - */ - __pyx_v_count = (__pyx_v_count + 1); - } - __pyx_L8:; - - /* "_yaml.pyx":523 - * else: - * count = count+1 - * yaml_event_delete(&event) # <<<<<<<<<<<<<< - * return count - * - */ - yaml_event_delete((&__pyx_v_event)); - } - - /* "_yaml.pyx":524 - * count = count+1 - * yaml_event_delete(&event) - * return count # <<<<<<<<<<<<<< - * - * cdef object _parse(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = PyInt_FromLong(__pyx_v_count); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 524; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser.raw_parse"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":526 - * return count - * - * cdef object _parse(self): # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * if yaml_parser_parse(&self.parser, &event) == 0: - */ - -static PyObject *__pyx_f_5_yaml_7CParser__parse(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) { - yaml_event_t __pyx_v_event; - PyObject *__pyx_v_error; - PyObject *__pyx_v_event_object; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("_parse"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_event_object = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":528 - * cdef object _parse(self): - * cdef yaml_event_t event - * if yaml_parser_parse(&self.parser, &event) == 0: # <<<<<<<<<<<<<< - * error = self._parser_error() - * raise error - */ - __pyx_t_1 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 528; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = (__pyx_t_1 == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":529 - * cdef yaml_event_t event - * if yaml_parser_parse(&self.parser, &event) == 0: - * error = self._parser_error() # <<<<<<<<<<<<<< - * raise error - * event_object = self._event_to_object(&event) - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 529; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":530 - * if yaml_parser_parse(&self.parser, &event) == 0: - * error = self._parser_error() - * raise error # <<<<<<<<<<<<<< - * event_object = self._event_to_object(&event) - * yaml_event_delete(&event) - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 530; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":531 - * error = self._parser_error() - * raise error - * event_object = self._event_to_object(&event) # <<<<<<<<<<<<<< - * yaml_event_delete(&event) - * return event_object - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_event_to_object(__pyx_v_self, (&__pyx_v_event)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 531; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_event_object); - __pyx_v_event_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":532 - * raise error - * event_object = self._event_to_object(&event) - * yaml_event_delete(&event) # <<<<<<<<<<<<<< - * return event_object - * - */ - yaml_event_delete((&__pyx_v_event)); - - /* "_yaml.pyx":533 - * event_object = self._event_to_object(&event) - * yaml_event_delete(&event) - * return event_object # <<<<<<<<<<<<<< - * - * cdef object _event_to_object(self, yaml_event_t *event): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_event_object); - __pyx_r = __pyx_v_event_object; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser._parse"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_DECREF(__pyx_v_event_object); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":535 - * return event_object - * - * cdef object _event_to_object(self, yaml_event_t *event): # <<<<<<<<<<<<<< - * cdef yaml_tag_directive_t *tag_directive - * start_mark = Mark(self.stream_name, - */ - -static PyObject *__pyx_f_5_yaml_7CParser__event_to_object(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, yaml_event_t *__pyx_v_event) { - yaml_tag_directive_t *__pyx_v_tag_directive; - struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark; - struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark; - PyObject *__pyx_v_encoding; - int __pyx_v_explicit; - PyObject *__pyx_v_version; - PyObject *__pyx_v_tags; - PyObject *__pyx_v_handle; - PyObject *__pyx_v_prefix; - PyObject *__pyx_v_anchor; - PyObject *__pyx_v_tag; - PyObject *__pyx_v_value; - int __pyx_v_plain_implicit; - int __pyx_v_quoted_implicit; - PyObject *__pyx_v_style; - int __pyx_v_implicit; - PyObject *__pyx_v_flow_style; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - __Pyx_RefNannySetupContext("_event_to_object"); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_encoding = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_version = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_tags = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_handle = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_prefix = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_anchor = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_tag = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_style = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_flow_style = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":538 - * cdef yaml_tag_directive_t *tag_directive - * start_mark = Mark(self.stream_name, - * event.start_mark.index, # <<<<<<<<<<<<<< - * event.start_mark.line, - * event.start_mark.column, - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_event->start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 538; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":539 - * start_mark = Mark(self.stream_name, - * event.start_mark.index, - * event.start_mark.line, # <<<<<<<<<<<<<< - * event.start_mark.column, - * None, None) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_event->start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 539; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":540 - * event.start_mark.index, - * event.start_mark.line, - * event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * end_mark = Mark(self.stream_name, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_event->start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 540; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":541 - * event.start_mark.line, - * event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * end_mark = Mark(self.stream_name, - * event.end_mark.index, - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 537; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 537; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_start_mark)); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "_yaml.pyx":543 - * None, None) - * end_mark = Mark(self.stream_name, - * event.end_mark.index, # <<<<<<<<<<<<<< - * event.end_mark.line, - * event.end_mark.column, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_event->end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 543; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":544 - * end_mark = Mark(self.stream_name, - * event.end_mark.index, - * event.end_mark.line, # <<<<<<<<<<<<<< - * event.end_mark.column, - * None, None) - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_event->end_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 544; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":545 - * event.end_mark.index, - * event.end_mark.line, - * event.end_mark.column, # <<<<<<<<<<<<<< - * None, None) - * if event.type == YAML_NO_EVENT: - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_event->end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 545; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":546 - * event.end_mark.line, - * event.end_mark.column, - * None, None) # <<<<<<<<<<<<<< - * if event.type == YAML_NO_EVENT: - * return None - */ - __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 542; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 542; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_end_mark)); - __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":547 - * event.end_mark.column, - * None, None) - * if event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<< - * return None - * elif event.type == YAML_STREAM_START_EVENT: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_NO_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":548 - * None, None) - * if event.type == YAML_NO_EVENT: - * return None # <<<<<<<<<<<<<< - * elif event.type == YAML_STREAM_START_EVENT: - * encoding = None - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(Py_None); - __pyx_r = Py_None; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":549 - * if event.type == YAML_NO_EVENT: - * return None - * elif event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<< - * encoding = None - * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_STREAM_START_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":550 - * return None - * elif event.type == YAML_STREAM_START_EVENT: - * encoding = None # <<<<<<<<<<<<<< - * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: - * if self.unicode_source == 0: - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = Py_None; - - /* "_yaml.pyx":551 - * elif event.type == YAML_STREAM_START_EVENT: - * encoding = None - * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: # <<<<<<<<<<<<<< - * if self.unicode_source == 0: - * encoding = u"utf-8" - */ - __pyx_t_5 = (__pyx_v_event->data.stream_start.encoding == YAML_UTF8_ENCODING); - if (__pyx_t_5) { - - /* "_yaml.pyx":552 - * encoding = None - * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: - * if self.unicode_source == 0: # <<<<<<<<<<<<<< - * encoding = u"utf-8" - * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - */ - __pyx_t_5 = (__pyx_v_self->unicode_source == 0); - if (__pyx_t_5) { - - /* "_yaml.pyx":553 - * if event.data.stream_start.encoding == YAML_UTF8_ENCODING: - * if self.unicode_source == 0: - * encoding = u"utf-8" # <<<<<<<<<<<<<< - * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - * encoding = u"utf-16-le" - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_12)); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = ((PyObject *)__pyx_kp_u_12); - goto __pyx_L5; - } - __pyx_L5:; - goto __pyx_L4; - } - - /* "_yaml.pyx":554 - * if self.unicode_source == 0: - * encoding = u"utf-8" - * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: # <<<<<<<<<<<<<< - * encoding = u"utf-16-le" - * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - */ - __pyx_t_5 = (__pyx_v_event->data.stream_start.encoding == YAML_UTF16LE_ENCODING); - if (__pyx_t_5) { - - /* "_yaml.pyx":555 - * encoding = u"utf-8" - * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - * encoding = u"utf-16-le" # <<<<<<<<<<<<<< - * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - * encoding = u"utf-16-be" - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_13)); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = ((PyObject *)__pyx_kp_u_13); - goto __pyx_L4; - } - - /* "_yaml.pyx":556 - * elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - * encoding = u"utf-16-le" - * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: # <<<<<<<<<<<<<< - * encoding = u"utf-16-be" - * return StreamStartEvent(start_mark, end_mark, encoding) - */ - __pyx_t_5 = (__pyx_v_event->data.stream_start.encoding == YAML_UTF16BE_ENCODING); - if (__pyx_t_5) { - - /* "_yaml.pyx":557 - * encoding = u"utf-16-le" - * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - * encoding = u"utf-16-be" # <<<<<<<<<<<<<< - * return StreamStartEvent(start_mark, end_mark, encoding) - * elif event.type == YAML_STREAM_END_EVENT: - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_14)); - __Pyx_DECREF(__pyx_v_encoding); - __pyx_v_encoding = ((PyObject *)__pyx_kp_u_14); - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":558 - * elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - * encoding = u"utf-16-be" - * return StreamStartEvent(start_mark, end_mark, encoding) # <<<<<<<<<<<<<< - * elif event.type == YAML_STREAM_END_EVENT: - * return StreamEndEvent(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__StreamStartEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 558; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 558; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_encoding); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_v_encoding); - __Pyx_GIVEREF(__pyx_v_encoding); - __pyx_t_4 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 558; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":559 - * encoding = u"utf-16-be" - * return StreamStartEvent(start_mark, end_mark, encoding) - * elif event.type == YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<< - * return StreamEndEvent(start_mark, end_mark) - * elif event.type == YAML_DOCUMENT_START_EVENT: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_STREAM_END_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":560 - * return StreamStartEvent(start_mark, end_mark, encoding) - * elif event.type == YAML_STREAM_END_EVENT: - * return StreamEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif event.type == YAML_DOCUMENT_START_EVENT: - * explicit = False - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__StreamEndEvent); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 560; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyTuple_New(2); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 560; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_2 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 560; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":561 - * elif event.type == YAML_STREAM_END_EVENT: - * return StreamEndEvent(start_mark, end_mark) - * elif event.type == YAML_DOCUMENT_START_EVENT: # <<<<<<<<<<<<<< - * explicit = False - * if event.data.document_start.implicit == 0: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_DOCUMENT_START_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":562 - * return StreamEndEvent(start_mark, end_mark) - * elif event.type == YAML_DOCUMENT_START_EVENT: - * explicit = False # <<<<<<<<<<<<<< - * if event.data.document_start.implicit == 0: - * explicit = True - */ - __pyx_v_explicit = 0; - - /* "_yaml.pyx":563 - * elif event.type == YAML_DOCUMENT_START_EVENT: - * explicit = False - * if event.data.document_start.implicit == 0: # <<<<<<<<<<<<<< - * explicit = True - * version = None - */ - __pyx_t_5 = (__pyx_v_event->data.document_start.implicit == 0); - if (__pyx_t_5) { - - /* "_yaml.pyx":564 - * explicit = False - * if event.data.document_start.implicit == 0: - * explicit = True # <<<<<<<<<<<<<< - * version = None - * if event.data.document_start.version_directive != NULL: - */ - __pyx_v_explicit = 1; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":565 - * if event.data.document_start.implicit == 0: - * explicit = True - * version = None # <<<<<<<<<<<<<< - * if event.data.document_start.version_directive != NULL: - * version = (event.data.document_start.version_directive.major, - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_version); - __pyx_v_version = Py_None; - - /* "_yaml.pyx":566 - * explicit = True - * version = None - * if event.data.document_start.version_directive != NULL: # <<<<<<<<<<<<<< - * version = (event.data.document_start.version_directive.major, - * event.data.document_start.version_directive.minor) - */ - __pyx_t_5 = (__pyx_v_event->data.document_start.version_directive != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":567 - * version = None - * if event.data.document_start.version_directive != NULL: - * version = (event.data.document_start.version_directive.major, # <<<<<<<<<<<<<< - * event.data.document_start.version_directive.minor) - * tags = None - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_event->data.document_start.version_directive->major); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 567; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":568 - * if event.data.document_start.version_directive != NULL: - * version = (event.data.document_start.version_directive.major, - * event.data.document_start.version_directive.minor) # <<<<<<<<<<<<<< - * tags = None - * if event.data.document_start.tag_directives.start != NULL: - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_event->data.document_start.version_directive->minor); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 568; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 567; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __pyx_t_2 = 0; - __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_v_version); - __pyx_v_version = ((PyObject *)__pyx_t_4); - __pyx_t_4 = 0; - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":569 - * version = (event.data.document_start.version_directive.major, - * event.data.document_start.version_directive.minor) - * tags = None # <<<<<<<<<<<<<< - * if event.data.document_start.tag_directives.start != NULL: - * tags = {} - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_tags); - __pyx_v_tags = Py_None; - - /* "_yaml.pyx":570 - * event.data.document_start.version_directive.minor) - * tags = None - * if event.data.document_start.tag_directives.start != NULL: # <<<<<<<<<<<<<< - * tags = {} - * tag_directive = event.data.document_start.tag_directives.start - */ - __pyx_t_5 = (__pyx_v_event->data.document_start.tag_directives.start != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":571 - * tags = None - * if event.data.document_start.tag_directives.start != NULL: - * tags = {} # <<<<<<<<<<<<<< - * tag_directive = event.data.document_start.tag_directives.start - * while tag_directive != event.data.document_start.tag_directives.end: - */ - __pyx_t_4 = PyDict_New(); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 571; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_DECREF(__pyx_v_tags); - __pyx_v_tags = ((PyObject *)__pyx_t_4); - __pyx_t_4 = 0; - - /* "_yaml.pyx":572 - * if event.data.document_start.tag_directives.start != NULL: - * tags = {} - * tag_directive = event.data.document_start.tag_directives.start # <<<<<<<<<<<<<< - * while tag_directive != event.data.document_start.tag_directives.end: - * handle = PyUnicode_FromString(tag_directive.handle) - */ - __pyx_v_tag_directive = __pyx_v_event->data.document_start.tag_directives.start; - - /* "_yaml.pyx":573 - * tags = {} - * tag_directive = event.data.document_start.tag_directives.start - * while tag_directive != event.data.document_start.tag_directives.end: # <<<<<<<<<<<<<< - * handle = PyUnicode_FromString(tag_directive.handle) - * prefix = PyUnicode_FromString(tag_directive.prefix) - */ - while (1) { - __pyx_t_5 = (__pyx_v_tag_directive != __pyx_v_event->data.document_start.tag_directives.end); - if (!__pyx_t_5) break; - - /* "_yaml.pyx":574 - * tag_directive = event.data.document_start.tag_directives.start - * while tag_directive != event.data.document_start.tag_directives.end: - * handle = PyUnicode_FromString(tag_directive.handle) # <<<<<<<<<<<<<< - * prefix = PyUnicode_FromString(tag_directive.prefix) - * tags[handle] = prefix - */ - __pyx_t_4 = PyUnicode_FromString(__pyx_v_tag_directive->handle); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 574; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":575 - * while tag_directive != event.data.document_start.tag_directives.end: - * handle = PyUnicode_FromString(tag_directive.handle) - * prefix = PyUnicode_FromString(tag_directive.prefix) # <<<<<<<<<<<<<< - * tags[handle] = prefix - * tag_directive = tag_directive+1 - */ - __pyx_t_4 = PyUnicode_FromString(__pyx_v_tag_directive->prefix); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 575; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_prefix); - __pyx_v_prefix = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":576 - * handle = PyUnicode_FromString(tag_directive.handle) - * prefix = PyUnicode_FromString(tag_directive.prefix) - * tags[handle] = prefix # <<<<<<<<<<<<<< - * tag_directive = tag_directive+1 - * return DocumentStartEvent(start_mark, end_mark, - */ - if (PyObject_SetItem(__pyx_v_tags, __pyx_v_handle, __pyx_v_prefix) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 576; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":577 - * prefix = PyUnicode_FromString(tag_directive.prefix) - * tags[handle] = prefix - * tag_directive = tag_directive+1 # <<<<<<<<<<<<<< - * return DocumentStartEvent(start_mark, end_mark, - * explicit, version, tags) - */ - __pyx_v_tag_directive = (__pyx_v_tag_directive + 1); - } - goto __pyx_L8; - } - __pyx_L8:; - - /* "_yaml.pyx":578 - * tags[handle] = prefix - * tag_directive = tag_directive+1 - * return DocumentStartEvent(start_mark, end_mark, # <<<<<<<<<<<<<< - * explicit, version, tags) - * elif event.type == YAML_DOCUMENT_END_EVENT: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__DocumentStartEvent); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 578; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":579 - * tag_directive = tag_directive+1 - * return DocumentStartEvent(start_mark, end_mark, - * explicit, version, tags) # <<<<<<<<<<<<<< - * elif event.type == YAML_DOCUMENT_END_EVENT: - * explicit = False - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_explicit); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 579; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyTuple_New(5); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 578; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 2, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_version); - PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_version); - __Pyx_GIVEREF(__pyx_v_version); - __Pyx_INCREF(__pyx_v_tags); - PyTuple_SET_ITEM(__pyx_t_2, 4, __pyx_v_tags); - __Pyx_GIVEREF(__pyx_v_tags); - __pyx_t_1 = 0; - __pyx_t_1 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 578; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":580 - * return DocumentStartEvent(start_mark, end_mark, - * explicit, version, tags) - * elif event.type == YAML_DOCUMENT_END_EVENT: # <<<<<<<<<<<<<< - * explicit = False - * if event.data.document_end.implicit == 0: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_DOCUMENT_END_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":581 - * explicit, version, tags) - * elif event.type == YAML_DOCUMENT_END_EVENT: - * explicit = False # <<<<<<<<<<<<<< - * if event.data.document_end.implicit == 0: - * explicit = True - */ - __pyx_v_explicit = 0; - - /* "_yaml.pyx":582 - * elif event.type == YAML_DOCUMENT_END_EVENT: - * explicit = False - * if event.data.document_end.implicit == 0: # <<<<<<<<<<<<<< - * explicit = True - * return DocumentEndEvent(start_mark, end_mark, explicit) - */ - __pyx_t_5 = (__pyx_v_event->data.document_end.implicit == 0); - if (__pyx_t_5) { - - /* "_yaml.pyx":583 - * explicit = False - * if event.data.document_end.implicit == 0: - * explicit = True # <<<<<<<<<<<<<< - * return DocumentEndEvent(start_mark, end_mark, explicit) - * elif event.type == YAML_ALIAS_EVENT: - */ - __pyx_v_explicit = 1; - goto __pyx_L11; - } - __pyx_L11:; - - /* "_yaml.pyx":584 - * if event.data.document_end.implicit == 0: - * explicit = True - * return DocumentEndEvent(start_mark, end_mark, explicit) # <<<<<<<<<<<<<< - * elif event.type == YAML_ALIAS_EVENT: - * anchor = PyUnicode_FromString(event.data.alias.anchor) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__DocumentEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_explicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(__pyx_t_1, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 584; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":585 - * explicit = True - * return DocumentEndEvent(start_mark, end_mark, explicit) - * elif event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(event.data.alias.anchor) - * return AliasEvent(anchor, start_mark, end_mark) - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_ALIAS_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":586 - * return DocumentEndEvent(start_mark, end_mark, explicit) - * elif event.type == YAML_ALIAS_EVENT: - * anchor = PyUnicode_FromString(event.data.alias.anchor) # <<<<<<<<<<<<<< - * return AliasEvent(anchor, start_mark, end_mark) - * elif event.type == YAML_SCALAR_EVENT: - */ - __pyx_t_2 = PyUnicode_FromString(__pyx_v_event->data.alias.anchor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 586; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":587 - * elif event.type == YAML_ALIAS_EVENT: - * anchor = PyUnicode_FromString(event.data.alias.anchor) - * return AliasEvent(anchor, start_mark, end_mark) # <<<<<<<<<<<<<< - * elif event.type == YAML_SCALAR_EVENT: - * anchor = None - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__AliasEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 587; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 587; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_anchor); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_anchor); - __Pyx_GIVEREF(__pyx_v_anchor); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_1 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 587; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":588 - * anchor = PyUnicode_FromString(event.data.alias.anchor) - * return AliasEvent(anchor, start_mark, end_mark) - * elif event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<< - * anchor = None - * if event.data.scalar.anchor != NULL: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_SCALAR_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":589 - * return AliasEvent(anchor, start_mark, end_mark) - * elif event.type == YAML_SCALAR_EVENT: - * anchor = None # <<<<<<<<<<<<<< - * if event.data.scalar.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.scalar.anchor) - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = Py_None; - - /* "_yaml.pyx":590 - * elif event.type == YAML_SCALAR_EVENT: - * anchor = None - * if event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(event.data.scalar.anchor) - * tag = None - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.anchor != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":591 - * anchor = None - * if event.data.scalar.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.scalar.anchor) # <<<<<<<<<<<<<< - * tag = None - * if event.data.scalar.tag != NULL: - */ - __pyx_t_1 = PyUnicode_FromString(__pyx_v_event->data.scalar.anchor); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 591; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L12; - } - __pyx_L12:; - - /* "_yaml.pyx":592 - * if event.data.scalar.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.scalar.anchor) - * tag = None # <<<<<<<<<<<<<< - * if event.data.scalar.tag != NULL: - * tag = PyUnicode_FromString(event.data.scalar.tag) - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = Py_None; - - /* "_yaml.pyx":593 - * anchor = PyUnicode_FromString(event.data.scalar.anchor) - * tag = None - * if event.data.scalar.tag != NULL: # <<<<<<<<<<<<<< - * tag = PyUnicode_FromString(event.data.scalar.tag) - * value = PyUnicode_DecodeUTF8(event.data.scalar.value, - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.tag != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":594 - * tag = None - * if event.data.scalar.tag != NULL: - * tag = PyUnicode_FromString(event.data.scalar.tag) # <<<<<<<<<<<<<< - * value = PyUnicode_DecodeUTF8(event.data.scalar.value, - * event.data.scalar.length, 'strict') - */ - __pyx_t_1 = PyUnicode_FromString(__pyx_v_event->data.scalar.tag); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 594; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L13; - } - __pyx_L13:; - - /* "_yaml.pyx":596 - * tag = PyUnicode_FromString(event.data.scalar.tag) - * value = PyUnicode_DecodeUTF8(event.data.scalar.value, - * event.data.scalar.length, 'strict') # <<<<<<<<<<<<<< - * plain_implicit = False - * if event.data.scalar.plain_implicit == 1: - */ - __pyx_t_1 = PyUnicode_DecodeUTF8(__pyx_v_event->data.scalar.value, __pyx_v_event->data.scalar.length, __pyx_k__strict); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 595; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":597 - * value = PyUnicode_DecodeUTF8(event.data.scalar.value, - * event.data.scalar.length, 'strict') - * plain_implicit = False # <<<<<<<<<<<<<< - * if event.data.scalar.plain_implicit == 1: - * plain_implicit = True - */ - __pyx_v_plain_implicit = 0; - - /* "_yaml.pyx":598 - * event.data.scalar.length, 'strict') - * plain_implicit = False - * if event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<< - * plain_implicit = True - * quoted_implicit = False - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.plain_implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":599 - * plain_implicit = False - * if event.data.scalar.plain_implicit == 1: - * plain_implicit = True # <<<<<<<<<<<<<< - * quoted_implicit = False - * if event.data.scalar.quoted_implicit == 1: - */ - __pyx_v_plain_implicit = 1; - goto __pyx_L14; - } - __pyx_L14:; - - /* "_yaml.pyx":600 - * if event.data.scalar.plain_implicit == 1: - * plain_implicit = True - * quoted_implicit = False # <<<<<<<<<<<<<< - * if event.data.scalar.quoted_implicit == 1: - * quoted_implicit = True - */ - __pyx_v_quoted_implicit = 0; - - /* "_yaml.pyx":601 - * plain_implicit = True - * quoted_implicit = False - * if event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<< - * quoted_implicit = True - * style = None - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.quoted_implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":602 - * quoted_implicit = False - * if event.data.scalar.quoted_implicit == 1: - * quoted_implicit = True # <<<<<<<<<<<<<< - * style = None - * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - */ - __pyx_v_quoted_implicit = 1; - goto __pyx_L15; - } - __pyx_L15:; - - /* "_yaml.pyx":603 - * if event.data.scalar.quoted_implicit == 1: - * quoted_implicit = True - * style = None # <<<<<<<<<<<<<< - * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * style = u'' - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = Py_None; - - /* "_yaml.pyx":604 - * quoted_implicit = True - * style = None - * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'' - * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.style == YAML_PLAIN_SCALAR_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":605 - * style = None - * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * style = u'' # <<<<<<<<<<<<<< - * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_20)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_20); - goto __pyx_L16; - } - - /* "_yaml.pyx":606 - * if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * style = u'' - * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'\'' - * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":607 - * style = u'' - * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' # <<<<<<<<<<<<<< - * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_21)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_21); - goto __pyx_L16; - } - - /* "_yaml.pyx":608 - * elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' - * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'"' - * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":609 - * style = u'\'' - * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' # <<<<<<<<<<<<<< - * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_22)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_22); - goto __pyx_L16; - } - - /* "_yaml.pyx":610 - * elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' - * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'|' - * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.style == YAML_LITERAL_SCALAR_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":611 - * style = u'"' - * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' # <<<<<<<<<<<<<< - * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_23)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_23); - goto __pyx_L16; - } - - /* "_yaml.pyx":612 - * elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' - * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'>' - * return ScalarEvent(anchor, tag, - */ - __pyx_t_5 = (__pyx_v_event->data.scalar.style == YAML_FOLDED_SCALAR_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":613 - * style = u'|' - * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' # <<<<<<<<<<<<<< - * return ScalarEvent(anchor, tag, - * (plain_implicit, quoted_implicit), - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_24)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_24); - goto __pyx_L16; - } - __pyx_L16:; - - /* "_yaml.pyx":614 - * elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' - * return ScalarEvent(anchor, tag, # <<<<<<<<<<<<<< - * (plain_implicit, quoted_implicit), - * value, start_mark, end_mark, style) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 614; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":615 - * style = u'>' - * return ScalarEvent(anchor, tag, - * (plain_implicit, quoted_implicit), # <<<<<<<<<<<<<< - * value, start_mark, end_mark, style) - * elif event.type == YAML_SEQUENCE_START_EVENT: - */ - __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_plain_implicit); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 615; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_quoted_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 615; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 615; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_4 = 0; - __pyx_t_2 = 0; - - /* "_yaml.pyx":616 - * return ScalarEvent(anchor, tag, - * (plain_implicit, quoted_implicit), - * value, start_mark, end_mark, style) # <<<<<<<<<<<<<< - * elif event.type == YAML_SEQUENCE_START_EVENT: - * anchor = None - */ - __pyx_t_2 = PyTuple_New(7); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 614; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_INCREF(__pyx_v_anchor); - PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_v_anchor); - __Pyx_GIVEREF(__pyx_v_anchor); - __Pyx_INCREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_tag); - __Pyx_GIVEREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_2, 2, ((PyObject *)__pyx_t_3)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_2, 3, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 4, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_2, 5, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_style); - PyTuple_SET_ITEM(__pyx_t_2, 6, __pyx_v_style); - __Pyx_GIVEREF(__pyx_v_style); - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(__pyx_t_1, ((PyObject *)__pyx_t_2), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 614; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0; - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":617 - * (plain_implicit, quoted_implicit), - * value, start_mark, end_mark, style) - * elif event.type == YAML_SEQUENCE_START_EVENT: # <<<<<<<<<<<<<< - * anchor = None - * if event.data.sequence_start.anchor != NULL: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_SEQUENCE_START_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":618 - * value, start_mark, end_mark, style) - * elif event.type == YAML_SEQUENCE_START_EVENT: - * anchor = None # <<<<<<<<<<<<<< - * if event.data.sequence_start.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = Py_None; - - /* "_yaml.pyx":619 - * elif event.type == YAML_SEQUENCE_START_EVENT: - * anchor = None - * if event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) - * tag = None - */ - __pyx_t_5 = (__pyx_v_event->data.sequence_start.anchor != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":620 - * anchor = None - * if event.data.sequence_start.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) # <<<<<<<<<<<<<< - * tag = None - * if event.data.sequence_start.tag != NULL: - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_event->data.sequence_start.anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 620; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L17; - } - __pyx_L17:; - - /* "_yaml.pyx":621 - * if event.data.sequence_start.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) - * tag = None # <<<<<<<<<<<<<< - * if event.data.sequence_start.tag != NULL: - * tag = PyUnicode_FromString(event.data.sequence_start.tag) - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = Py_None; - - /* "_yaml.pyx":622 - * anchor = PyUnicode_FromString(event.data.sequence_start.anchor) - * tag = None - * if event.data.sequence_start.tag != NULL: # <<<<<<<<<<<<<< - * tag = PyUnicode_FromString(event.data.sequence_start.tag) - * implicit = False - */ - __pyx_t_5 = (__pyx_v_event->data.sequence_start.tag != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":623 - * tag = None - * if event.data.sequence_start.tag != NULL: - * tag = PyUnicode_FromString(event.data.sequence_start.tag) # <<<<<<<<<<<<<< - * implicit = False - * if event.data.sequence_start.implicit == 1: - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_event->data.sequence_start.tag); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 623; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L18; - } - __pyx_L18:; - - /* "_yaml.pyx":624 - * if event.data.sequence_start.tag != NULL: - * tag = PyUnicode_FromString(event.data.sequence_start.tag) - * implicit = False # <<<<<<<<<<<<<< - * if event.data.sequence_start.implicit == 1: - * implicit = True - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":625 - * tag = PyUnicode_FromString(event.data.sequence_start.tag) - * implicit = False - * if event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<< - * implicit = True - * flow_style = None - */ - __pyx_t_5 = (__pyx_v_event->data.sequence_start.implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":626 - * implicit = False - * if event.data.sequence_start.implicit == 1: - * implicit = True # <<<<<<<<<<<<<< - * flow_style = None - * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - */ - __pyx_v_implicit = 1; - goto __pyx_L19; - } - __pyx_L19:; - - /* "_yaml.pyx":627 - * if event.data.sequence_start.implicit == 1: - * implicit = True - * flow_style = None # <<<<<<<<<<<<<< - * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - * flow_style = True - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = Py_None; - - /* "_yaml.pyx":628 - * implicit = True - * flow_style = None - * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<< - * flow_style = True - * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - */ - __pyx_t_5 = (__pyx_v_event->data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":629 - * flow_style = None - * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - * flow_style = True # <<<<<<<<<<<<<< - * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - * flow_style = False - */ - __pyx_t_3 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 629; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L20; - } - - /* "_yaml.pyx":630 - * if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - * flow_style = True - * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<< - * flow_style = False - * return SequenceStartEvent(anchor, tag, implicit, - */ - __pyx_t_5 = (__pyx_v_event->data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":631 - * flow_style = True - * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - * flow_style = False # <<<<<<<<<<<<<< - * return SequenceStartEvent(anchor, tag, implicit, - * start_mark, end_mark, flow_style) - */ - __pyx_t_3 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 631; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L20; - } - __pyx_L20:; - - /* "_yaml.pyx":632 - * elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - * flow_style = False - * return SequenceStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<< - * start_mark, end_mark, flow_style) - * elif event.type == YAML_MAPPING_START_EVENT: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceStartEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":633 - * flow_style = False - * return SequenceStartEvent(anchor, tag, implicit, - * start_mark, end_mark, flow_style) # <<<<<<<<<<<<<< - * elif event.type == YAML_MAPPING_START_EVENT: - * anchor = None - */ - __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(__pyx_v_anchor); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_anchor); - __Pyx_GIVEREF(__pyx_v_anchor); - __Pyx_INCREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_v_tag); - __Pyx_GIVEREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 3, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 4, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_flow_style); - PyTuple_SET_ITEM(__pyx_t_1, 5, __pyx_v_flow_style); - __Pyx_GIVEREF(__pyx_v_flow_style); - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 632; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":634 - * return SequenceStartEvent(anchor, tag, implicit, - * start_mark, end_mark, flow_style) - * elif event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<< - * anchor = None - * if event.data.mapping_start.anchor != NULL: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_MAPPING_START_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":635 - * start_mark, end_mark, flow_style) - * elif event.type == YAML_MAPPING_START_EVENT: - * anchor = None # <<<<<<<<<<<<<< - * if event.data.mapping_start.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = Py_None; - - /* "_yaml.pyx":636 - * elif event.type == YAML_MAPPING_START_EVENT: - * anchor = None - * if event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) - * tag = None - */ - __pyx_t_5 = (__pyx_v_event->data.mapping_start.anchor != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":637 - * anchor = None - * if event.data.mapping_start.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) # <<<<<<<<<<<<<< - * tag = None - * if event.data.mapping_start.tag != NULL: - */ - __pyx_t_2 = PyUnicode_FromString(__pyx_v_event->data.mapping_start.anchor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 637; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L21; - } - __pyx_L21:; - - /* "_yaml.pyx":638 - * if event.data.mapping_start.anchor != NULL: - * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) - * tag = None # <<<<<<<<<<<<<< - * if event.data.mapping_start.tag != NULL: - * tag = PyUnicode_FromString(event.data.mapping_start.tag) - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = Py_None; - - /* "_yaml.pyx":639 - * anchor = PyUnicode_FromString(event.data.mapping_start.anchor) - * tag = None - * if event.data.mapping_start.tag != NULL: # <<<<<<<<<<<<<< - * tag = PyUnicode_FromString(event.data.mapping_start.tag) - * implicit = False - */ - __pyx_t_5 = (__pyx_v_event->data.mapping_start.tag != NULL); - if (__pyx_t_5) { - - /* "_yaml.pyx":640 - * tag = None - * if event.data.mapping_start.tag != NULL: - * tag = PyUnicode_FromString(event.data.mapping_start.tag) # <<<<<<<<<<<<<< - * implicit = False - * if event.data.mapping_start.implicit == 1: - */ - __pyx_t_2 = PyUnicode_FromString(__pyx_v_event->data.mapping_start.tag); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 640; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L22; - } - __pyx_L22:; - - /* "_yaml.pyx":641 - * if event.data.mapping_start.tag != NULL: - * tag = PyUnicode_FromString(event.data.mapping_start.tag) - * implicit = False # <<<<<<<<<<<<<< - * if event.data.mapping_start.implicit == 1: - * implicit = True - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":642 - * tag = PyUnicode_FromString(event.data.mapping_start.tag) - * implicit = False - * if event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<< - * implicit = True - * flow_style = None - */ - __pyx_t_5 = (__pyx_v_event->data.mapping_start.implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":643 - * implicit = False - * if event.data.mapping_start.implicit == 1: - * implicit = True # <<<<<<<<<<<<<< - * flow_style = None - * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - */ - __pyx_v_implicit = 1; - goto __pyx_L23; - } - __pyx_L23:; - - /* "_yaml.pyx":644 - * if event.data.mapping_start.implicit == 1: - * implicit = True - * flow_style = None # <<<<<<<<<<<<<< - * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - * flow_style = True - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = Py_None; - - /* "_yaml.pyx":645 - * implicit = True - * flow_style = None - * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<< - * flow_style = True - * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - */ - __pyx_t_5 = (__pyx_v_event->data.mapping_start.style == YAML_FLOW_MAPPING_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":646 - * flow_style = None - * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - * flow_style = True # <<<<<<<<<<<<<< - * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - * flow_style = False - */ - __pyx_t_2 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 646; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L24; - } - - /* "_yaml.pyx":647 - * if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - * flow_style = True - * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<< - * flow_style = False - * return MappingStartEvent(anchor, tag, implicit, - */ - __pyx_t_5 = (__pyx_v_event->data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE); - if (__pyx_t_5) { - - /* "_yaml.pyx":648 - * flow_style = True - * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - * flow_style = False # <<<<<<<<<<<<<< - * return MappingStartEvent(anchor, tag, implicit, - * start_mark, end_mark, flow_style) - */ - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 648; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L24; - } - __pyx_L24:; - - /* "_yaml.pyx":649 - * elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - * flow_style = False - * return MappingStartEvent(anchor, tag, implicit, # <<<<<<<<<<<<<< - * start_mark, end_mark, flow_style) - * elif event.type == YAML_SEQUENCE_END_EVENT: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingStartEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":650 - * flow_style = False - * return MappingStartEvent(anchor, tag, implicit, - * start_mark, end_mark, flow_style) # <<<<<<<<<<<<<< - * elif event.type == YAML_SEQUENCE_END_EVENT: - * return SequenceEndEvent(start_mark, end_mark) - */ - __pyx_t_3 = PyTuple_New(6); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_anchor); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_anchor); - __Pyx_GIVEREF(__pyx_v_anchor); - __Pyx_INCREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_tag); - __Pyx_GIVEREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 4, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_flow_style); - PyTuple_SET_ITEM(__pyx_t_3, 5, __pyx_v_flow_style); - __Pyx_GIVEREF(__pyx_v_flow_style); - __pyx_t_1 = 0; - __pyx_t_1 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 649; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":651 - * return MappingStartEvent(anchor, tag, implicit, - * start_mark, end_mark, flow_style) - * elif event.type == YAML_SEQUENCE_END_EVENT: # <<<<<<<<<<<<<< - * return SequenceEndEvent(start_mark, end_mark) - * elif event.type == YAML_MAPPING_END_EVENT: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_SEQUENCE_END_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":652 - * start_mark, end_mark, flow_style) - * elif event.type == YAML_SEQUENCE_END_EVENT: - * return SequenceEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<< - * elif event.type == YAML_MAPPING_END_EVENT: - * return MappingEndEvent(start_mark, end_mark) - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 652; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 652; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_2 = PyObject_Call(__pyx_t_1, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 652; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":653 - * elif event.type == YAML_SEQUENCE_END_EVENT: - * return SequenceEndEvent(start_mark, end_mark) - * elif event.type == YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<< - * return MappingEndEvent(start_mark, end_mark) - * else: - */ - __pyx_t_5 = (__pyx_v_event->type == YAML_MAPPING_END_EVENT); - if (__pyx_t_5) { - - /* "_yaml.pyx":654 - * return SequenceEndEvent(start_mark, end_mark) - * elif event.type == YAML_MAPPING_END_EVENT: - * return MappingEndEvent(start_mark, end_mark) # <<<<<<<<<<<<<< - * else: - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingEndEvent); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 654; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 654; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 1, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __pyx_t_1 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 654; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __pyx_r = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - /*else*/ { - - /* "_yaml.pyx":656 - * return MappingEndEvent(start_mark, end_mark) - * else: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ValueError("unknown event type") - * else: - */ - __pyx_t_5 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_5) { - - /* "_yaml.pyx":657 - * else: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("unknown event type") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"unknown event type") - */ - __pyx_t_1 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_29), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 657; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 657; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L25; - } - /*else*/ { - - /* "_yaml.pyx":659 - * raise ValueError("unknown event type") - * else: - * raise ValueError(u"unknown event type") # <<<<<<<<<<<<<< - * - * def get_event(self): - */ - __pyx_t_1 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_30), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 659; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 659; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L25:; - } - __pyx_L3:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.CParser._event_to_object"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_start_mark); - __Pyx_DECREF((PyObject *)__pyx_v_end_mark); - __Pyx_DECREF(__pyx_v_encoding); - __Pyx_DECREF(__pyx_v_version); - __Pyx_DECREF(__pyx_v_tags); - __Pyx_DECREF(__pyx_v_handle); - __Pyx_DECREF(__pyx_v_prefix); - __Pyx_DECREF(__pyx_v_anchor); - __Pyx_DECREF(__pyx_v_tag); - __Pyx_DECREF(__pyx_v_value); - __Pyx_DECREF(__pyx_v_style); - __Pyx_DECREF(__pyx_v_flow_style); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":661 - * raise ValueError(u"unknown event type") - * - * def get_event(self): # <<<<<<<<<<<<<< - * if self.current_event is not None: - * value = self.current_event - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_8get_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_8get_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_v_value; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("get_event"); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":662 - * - * def get_event(self): - * if self.current_event is not None: # <<<<<<<<<<<<<< - * value = self.current_event - * self.current_event = None - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event != Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":663 - * def get_event(self): - * if self.current_event is not None: - * value = self.current_event # <<<<<<<<<<<<<< - * self.current_event = None - * else: - */ - __Pyx_INCREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event; - - /* "_yaml.pyx":664 - * if self.current_event is not None: - * value = self.current_event - * self.current_event = None # <<<<<<<<<<<<<< - * else: - * value = self._parse() - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event = Py_None; - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":666 - * self.current_event = None - * else: - * value = self._parse() # <<<<<<<<<<<<<< - * return value - * - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 666; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_L5:; - - /* "_yaml.pyx":667 - * else: - * value = self._parse() - * return value # <<<<<<<<<<<<<< - * - * def peek_event(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_value); - __pyx_r = __pyx_v_value; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("_yaml.CParser.get_event"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_value); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":669 - * return value - * - * def peek_event(self): # <<<<<<<<<<<<<< - * if self.current_event is None: - * self.current_event = self._parse() - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_9peek_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_9peek_event(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - __Pyx_RefNannySetupContext("peek_event"); - - /* "_yaml.pyx":670 - * - * def peek_event(self): - * if self.current_event is None: # <<<<<<<<<<<<<< - * self.current_event = self._parse() - * return self.current_event - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":671 - * def peek_event(self): - * if self.current_event is None: - * self.current_event = self._parse() # <<<<<<<<<<<<<< - * return self.current_event - * - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 671; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":672 - * if self.current_event is None: - * self.current_event = self._parse() - * return self.current_event # <<<<<<<<<<<<<< - * - * def check_event(self, *choices): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - __pyx_r = ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("_yaml.CParser.peek_event"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":674 - * return self.current_event - * - * def check_event(self, *choices): # <<<<<<<<<<<<<< - * if self.current_event is None: - * self.current_event = self._parse() - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_10check_event(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_10check_event(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_choices = 0; - PyObject *__pyx_v_event_class; - PyObject *__pyx_v_choice; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - Py_ssize_t __pyx_t_4; - PyObject *__pyx_t_5 = NULL; - __Pyx_RefNannySetupContext("check_event"); - if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "check_event", 0))) return NULL; - __Pyx_INCREF(__pyx_args); - __pyx_v_choices = __pyx_args; - __pyx_v_event_class = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_choice = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":675 - * - * def check_event(self, *choices): - * if self.current_event is None: # <<<<<<<<<<<<<< - * self.current_event = self._parse() - * if self.current_event is None: - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":676 - * def check_event(self, *choices): - * if self.current_event is None: - * self.current_event = self._parse() # <<<<<<<<<<<<<< - * if self.current_event is None: - * return False - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 676; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event); - ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":677 - * if self.current_event is None: - * self.current_event = self._parse() - * if self.current_event is None: # <<<<<<<<<<<<<< - * return False - * if not choices: - */ - __pyx_t_1 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":678 - * self.current_event = self._parse() - * if self.current_event is None: - * return False # <<<<<<<<<<<<<< - * if not choices: - * return True - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 678; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":679 - * if self.current_event is None: - * return False - * if not choices: # <<<<<<<<<<<<<< - * return True - * event_class = self.current_event.__class__ - */ - __pyx_t_1 = (((PyObject *)__pyx_v_choices) != Py_None) && (PyTuple_GET_SIZE(((PyObject *)__pyx_v_choices)) != 0); - __pyx_t_3 = (!__pyx_t_1); - if (__pyx_t_3) { - - /* "_yaml.pyx":680 - * return False - * if not choices: - * return True # <<<<<<<<<<<<<< - * event_class = self.current_event.__class__ - * for choice in choices: - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 680; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":681 - * if not choices: - * return True - * event_class = self.current_event.__class__ # <<<<<<<<<<<<<< - * for choice in choices: - * if event_class is choice: - */ - __pyx_t_2 = PyObject_GetAttr(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->current_event, __pyx_n_s____class__); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 681; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_event_class); - __pyx_v_event_class = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":682 - * return True - * event_class = self.current_event.__class__ - * for choice in choices: # <<<<<<<<<<<<<< - * if event_class is choice: - * return True - */ - if (unlikely(__pyx_v_choices == Py_None)) { - PyErr_SetString(PyExc_TypeError, "'NoneType' object is not iterable"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 682; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_4 = 0; __pyx_t_2 = ((PyObject *)__pyx_v_choices); __Pyx_INCREF(__pyx_t_2); - for (;;) { - if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_2)) break; - __pyx_t_5 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_4); __Pyx_INCREF(__pyx_t_5); __pyx_t_4++; - __Pyx_DECREF(__pyx_v_choice); - __pyx_v_choice = __pyx_t_5; - __pyx_t_5 = 0; - - /* "_yaml.pyx":683 - * event_class = self.current_event.__class__ - * for choice in choices: - * if event_class is choice: # <<<<<<<<<<<<<< - * return True - * return False - */ - __pyx_t_3 = (__pyx_v_event_class == __pyx_v_choice); - if (__pyx_t_3) { - - /* "_yaml.pyx":684 - * for choice in choices: - * if event_class is choice: - * return True # <<<<<<<<<<<<<< - * return False - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 684; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - goto __pyx_L0; - goto __pyx_L10; - } - __pyx_L10:; - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "_yaml.pyx":685 - * if event_class is choice: - * return True - * return False # <<<<<<<<<<<<<< - * - * def check_node(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 685; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_r = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_AddTraceback("_yaml.CParser.check_event"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_choices); - __Pyx_DECREF(__pyx_v_event_class); - __Pyx_DECREF(__pyx_v_choice); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":687 - * return False - * - * def check_node(self): # <<<<<<<<<<<<<< - * self._parse_next_event() - * if self.parsed_event.type == YAML_STREAM_START_EVENT: - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_11check_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_11check_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("check_node"); - - /* "_yaml.pyx":688 - * - * def check_node(self): - * self._parse_next_event() # <<<<<<<<<<<<<< - * if self.parsed_event.type == YAML_STREAM_START_EVENT: - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse_next_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 688; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":689 - * def check_node(self): - * self._parse_next_event() - * if self.parsed_event.type == YAML_STREAM_START_EVENT: # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() - */ - __pyx_t_2 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.type == YAML_STREAM_START_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":690 - * self._parse_next_event() - * if self.parsed_event.type == YAML_STREAM_START_EVENT: - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - */ - yaml_event_delete((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event)); - - /* "_yaml.pyx":691 - * if self.parsed_event.type == YAML_STREAM_START_EVENT: - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() # <<<<<<<<<<<<<< - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * return True - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse_next_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 691; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":692 - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<< - * return True - * return False - */ - __pyx_t_2 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.type != YAML_STREAM_END_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":693 - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * return True # <<<<<<<<<<<<<< - * return False - * - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 693; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":694 - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * return True - * return False # <<<<<<<<<<<<<< - * - * def get_node(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 694; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser.check_node"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":696 - * return False - * - * def get_node(self): # <<<<<<<<<<<<<< - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_12get_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_12get_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("get_node"); - - /* "_yaml.pyx":697 - * - * def get_node(self): - * self._parse_next_event() # <<<<<<<<<<<<<< - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * return self._compose_document() - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse_next_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 697; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":698 - * def get_node(self): - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<< - * return self._compose_document() - * - */ - __pyx_t_2 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.type != YAML_STREAM_END_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":699 - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * return self._compose_document() # <<<<<<<<<<<<<< - * - * def get_single_node(self): - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_compose_document(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 699; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_r = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L0; - goto __pyx_L5; - } - __pyx_L5:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser.get_node"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":701 - * return self._compose_document() - * - * def get_single_node(self): # <<<<<<<<<<<<<< - * self._parse_next_event() - * yaml_event_delete(&self.parsed_event) - */ - -static PyObject *__pyx_pf_5_yaml_7CParser_13get_single_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_7CParser_13get_single_node(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_v_document; - struct __pyx_obj_5_yaml_Mark *__pyx_v_mark; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - __Pyx_RefNannySetupContext("get_single_node"); - __pyx_v_document = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":702 - * - * def get_single_node(self): - * self._parse_next_event() # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse_next_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 702; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":703 - * def get_single_node(self): - * self._parse_next_event() - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * self._parse_next_event() - * document = None - */ - yaml_event_delete((&((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event)); - - /* "_yaml.pyx":704 - * self._parse_next_event() - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() # <<<<<<<<<<<<<< - * document = None - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse_next_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 704; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":705 - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() - * document = None # <<<<<<<<<<<<<< - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * document = self._compose_document() - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_document); - __pyx_v_document = Py_None; - - /* "_yaml.pyx":706 - * self._parse_next_event() - * document = None - * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<< - * document = self._compose_document() - * self._parse_next_event() - */ - __pyx_t_2 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.type != YAML_STREAM_END_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":707 - * document = None - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * document = self._compose_document() # <<<<<<<<<<<<<< - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_compose_document(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 707; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_document); - __pyx_v_document = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":708 - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * document = self._compose_document() - * self._parse_next_event() # <<<<<<<<<<<<<< - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * mark = Mark(self.stream_name, - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->__pyx_vtab)->_parse_next_event(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 708; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":709 - * document = self._compose_document() - * self._parse_next_event() - * if self.parsed_event.type != YAML_STREAM_END_EVENT: # <<<<<<<<<<<<<< - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - */ - __pyx_t_2 = (((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.type != YAML_STREAM_END_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":711 - * if self.parsed_event.type != YAML_STREAM_END_EVENT: - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - */ - __pyx_t_3 = PyInt_FromLong(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.start_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 711; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":712 - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.column, - * None, None) - */ - __pyx_t_4 = PyInt_FromLong(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.start_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 712; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":713 - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_5 = PyInt_FromLong(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->parsed_event.start_mark.column); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 713; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - - /* "_yaml.pyx":714 - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ComposerError("expected a single document in the stream", - */ - __pyx_t_6 = PyTuple_New(6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 710; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - __Pyx_INCREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - PyTuple_SET_ITEM(__pyx_t_6, 0, ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - __Pyx_GIVEREF(((struct __pyx_obj_5_yaml_CParser *)__pyx_v_self)->stream_name); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_6, 3, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_5 = 0; - __pyx_t_5 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_6), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 710; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_mark)); - __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_5); - __pyx_t_5 = 0; - - /* "_yaml.pyx":715 - * self.parsed_event.start_mark.column, - * None, None) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ComposerError("expected a single document in the stream", - * document.start_mark, "but found another document", mark) - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":716 - * None, None) - * if PY_MAJOR_VERSION < 3: - * raise ComposerError("expected a single document in the stream", # <<<<<<<<<<<<<< - * document.start_mark, "but found another document", mark) - * else: - */ - __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - - /* "_yaml.pyx":717 - * if PY_MAJOR_VERSION < 3: - * raise ComposerError("expected a single document in the stream", - * document.start_mark, "but found another document", mark) # <<<<<<<<<<<<<< - * else: - * raise ComposerError(u"expected a single document in the stream", - */ - __pyx_t_6 = PyObject_GetAttr(__pyx_v_document, __pyx_n_s__start_mark); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 717; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_4 = PyTuple_New(4); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_31)); - PyTuple_SET_ITEM(__pyx_t_4, 0, ((PyObject *)__pyx_kp_s_31)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_31)); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_6); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_32)); - PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_kp_s_32)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_32)); - __Pyx_INCREF(((PyObject *)__pyx_v_mark)); - PyTuple_SET_ITEM(__pyx_t_4, 3, ((PyObject *)__pyx_v_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_mark)); - __pyx_t_6 = 0; - __pyx_t_6 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 716; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L7; - } - /*else*/ { - - /* "_yaml.pyx":719 - * document.start_mark, "but found another document", mark) - * else: - * raise ComposerError(u"expected a single document in the stream", # <<<<<<<<<<<<<< - * document.start_mark, u"but found another document", mark) - * return document - */ - __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - - /* "_yaml.pyx":720 - * else: - * raise ComposerError(u"expected a single document in the stream", - * document.start_mark, u"but found another document", mark) # <<<<<<<<<<<<<< - * return document - * - */ - __pyx_t_4 = PyObject_GetAttr(__pyx_v_document, __pyx_n_s__start_mark); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 720; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_5 = PyTuple_New(4); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_5)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_31)); - PyTuple_SET_ITEM(__pyx_t_5, 0, ((PyObject *)__pyx_kp_u_31)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_31)); - PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_32)); - PyTuple_SET_ITEM(__pyx_t_5, 2, ((PyObject *)__pyx_kp_u_32)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_32)); - __Pyx_INCREF(((PyObject *)__pyx_v_mark)); - PyTuple_SET_ITEM(__pyx_t_5, 3, ((PyObject *)__pyx_v_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_mark)); - __pyx_t_4 = 0; - __pyx_t_4 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_t_5), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 719; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L7:; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":721 - * raise ComposerError(u"expected a single document in the stream", - * document.start_mark, u"but found another document", mark) - * return document # <<<<<<<<<<<<<< - * - * cdef object _compose_document(self): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_document); - __pyx_r = __pyx_v_document; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("_yaml.CParser.get_single_node"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_document); - __Pyx_DECREF((PyObject *)__pyx_v_mark); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":723 - * return document - * - * cdef object _compose_document(self): # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * node = self._compose_node(None, None) - */ - -static PyObject *__pyx_f_5_yaml_7CParser__compose_document(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) { - PyObject *__pyx_v_node; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - __Pyx_RefNannySetupContext("_compose_document"); - __pyx_v_node = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":724 - * - * cdef object _compose_document(self): - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * node = self._compose_node(None, None) - * self._parse_next_event() - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":725 - * cdef object _compose_document(self): - * yaml_event_delete(&self.parsed_event) - * node = self._compose_node(None, None) # <<<<<<<<<<<<<< - * self._parse_next_event() - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, Py_None, Py_None); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 725; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":726 - * yaml_event_delete(&self.parsed_event) - * node = self._compose_node(None, None) - * self._parse_next_event() # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * self.anchors = {} - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_2 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 726; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":727 - * node = self._compose_node(None, None) - * self._parse_next_event() - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * self.anchors = {} - * return node - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":728 - * self._parse_next_event() - * yaml_event_delete(&self.parsed_event) - * self.anchors = {} # <<<<<<<<<<<<<< - * return node - * - */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 728; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_1)); - __Pyx_GOTREF(__pyx_v_self->anchors); - __Pyx_DECREF(__pyx_v_self->anchors); - __pyx_v_self->anchors = ((PyObject *)__pyx_t_1); - __pyx_t_1 = 0; - - /* "_yaml.pyx":729 - * yaml_event_delete(&self.parsed_event) - * self.anchors = {} - * return node # <<<<<<<<<<<<<< - * - * cdef object _compose_node(self, object parent, object index): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_node); - __pyx_r = __pyx_v_node; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_AddTraceback("_yaml.CParser._compose_document"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_node); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":731 - * return node - * - * cdef object _compose_node(self, object parent, object index): # <<<<<<<<<<<<<< - * self._parse_next_event() - * if self.parsed_event.type == YAML_ALIAS_EVENT: - */ - -static PyObject *__pyx_f_5_yaml_7CParser__compose_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_parent, PyObject *__pyx_v_index) { - PyObject *__pyx_v_anchor; - struct __pyx_obj_5_yaml_Mark *__pyx_v_mark; - PyObject *__pyx_v_node; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - int __pyx_t_7; - int __pyx_t_8; - __Pyx_RefNannySetupContext("_compose_node"); - __pyx_v_anchor = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_node = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":732 - * - * cdef object _compose_node(self, object parent, object index): - * self._parse_next_event() # <<<<<<<<<<<<<< - * if self.parsed_event.type == YAML_ALIAS_EVENT: - * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 732; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":733 - * cdef object _compose_node(self, object parent, object index): - * self._parse_next_event() - * if self.parsed_event.type == YAML_ALIAS_EVENT: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) - * if anchor not in self.anchors: - */ - __pyx_t_2 = (__pyx_v_self->parsed_event.type == YAML_ALIAS_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":734 - * self._parse_next_event() - * if self.parsed_event.type == YAML_ALIAS_EVENT: - * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) # <<<<<<<<<<<<<< - * if anchor not in self.anchors: - * mark = Mark(self.stream_name, - */ - __pyx_t_3 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.alias.anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 734; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":735 - * if self.parsed_event.type == YAML_ALIAS_EVENT: - * anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) - * if anchor not in self.anchors: # <<<<<<<<<<<<<< - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - */ - __pyx_t_2 = (__Pyx_NegateNonNeg(PySequence_Contains(__pyx_v_self->anchors, __pyx_v_anchor))); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 735; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_2) { - - /* "_yaml.pyx":737 - * if anchor not in self.anchors: - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 737; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":738 - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.column, - * None, None) - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 738; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":739 - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_5 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 739; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - - /* "_yaml.pyx":740 - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ComposerError(None, None, "found undefined alias", mark) - */ - __pyx_t_6 = PyTuple_New(6); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 736; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_6, 3, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_5 = 0; - __pyx_t_5 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_6), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 736; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_mark)); - __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_5); - __pyx_t_5 = 0; - - /* "_yaml.pyx":741 - * self.parsed_event.start_mark.column, - * None, None) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ComposerError(None, None, "found undefined alias", mark) - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":742 - * None, None) - * if PY_MAJOR_VERSION < 3: - * raise ComposerError(None, None, "found undefined alias", mark) # <<<<<<<<<<<<<< - * else: - * raise ComposerError(None, None, u"found undefined alias", mark) - */ - __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_6 = PyTuple_New(4); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 0, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 1, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_33)); - PyTuple_SET_ITEM(__pyx_t_6, 2, ((PyObject *)__pyx_kp_s_33)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_33)); - __Pyx_INCREF(((PyObject *)__pyx_v_mark)); - PyTuple_SET_ITEM(__pyx_t_6, 3, ((PyObject *)__pyx_v_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_mark)); - __pyx_t_4 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_6), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 742; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":744 - * raise ComposerError(None, None, "found undefined alias", mark) - * else: - * raise ComposerError(None, None, u"found undefined alias", mark) # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * return self.anchors[anchor] - */ - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_6 = PyTuple_New(4); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 0, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_6, 1, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_33)); - PyTuple_SET_ITEM(__pyx_t_6, 2, ((PyObject *)__pyx_kp_u_33)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_33)); - __Pyx_INCREF(((PyObject *)__pyx_v_mark)); - PyTuple_SET_ITEM(__pyx_t_6, 3, ((PyObject *)__pyx_v_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_mark)); - __pyx_t_5 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_6), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_5, 0, 0); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 744; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L5:; - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":745 - * else: - * raise ComposerError(None, None, u"found undefined alias", mark) - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * return self.anchors[anchor] - * anchor = None - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":746 - * raise ComposerError(None, None, u"found undefined alias", mark) - * yaml_event_delete(&self.parsed_event) - * return self.anchors[anchor] # <<<<<<<<<<<<<< - * anchor = None - * if self.parsed_event.type == YAML_SCALAR_EVENT \ - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_5 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (!__pyx_t_5) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 746; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_r = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":747 - * yaml_event_delete(&self.parsed_event) - * return self.anchors[anchor] - * anchor = None # <<<<<<<<<<<<<< - * if self.parsed_event.type == YAML_SCALAR_EVENT \ - * and self.parsed_event.data.scalar.anchor != NULL: - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = Py_None; - - /* "_yaml.pyx":749 - * anchor = None - * if self.parsed_event.type == YAML_SCALAR_EVENT \ - * and self.parsed_event.data.scalar.anchor != NULL: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor) - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ - */ - __pyx_t_2 = (__pyx_v_self->parsed_event.type == YAML_SCALAR_EVENT); - if (__pyx_t_2) { - __pyx_t_7 = (__pyx_v_self->parsed_event.data.scalar.anchor != NULL); - __pyx_t_8 = __pyx_t_7; - } else { - __pyx_t_8 = __pyx_t_2; - } - if (__pyx_t_8) { - - /* "_yaml.pyx":750 - * if self.parsed_event.type == YAML_SCALAR_EVENT \ - * and self.parsed_event.data.scalar.anchor != NULL: - * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor) # <<<<<<<<<<<<<< - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ - * and self.parsed_event.data.sequence_start.anchor != NULL: - */ - __pyx_t_5 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.scalar.anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 750; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L6; - } - - /* "_yaml.pyx":752 - * anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor) - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ - * and self.parsed_event.data.sequence_start.anchor != NULL: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor) - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ - */ - __pyx_t_8 = (__pyx_v_self->parsed_event.type == YAML_SEQUENCE_START_EVENT); - if (__pyx_t_8) { - __pyx_t_2 = (__pyx_v_self->parsed_event.data.sequence_start.anchor != NULL); - __pyx_t_7 = __pyx_t_2; - } else { - __pyx_t_7 = __pyx_t_8; - } - if (__pyx_t_7) { - - /* "_yaml.pyx":753 - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ - * and self.parsed_event.data.sequence_start.anchor != NULL: - * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor) # <<<<<<<<<<<<<< - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ - * and self.parsed_event.data.mapping_start.anchor != NULL: - */ - __pyx_t_5 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.sequence_start.anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 753; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L6; - } - - /* "_yaml.pyx":755 - * anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor) - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ - * and self.parsed_event.data.mapping_start.anchor != NULL: # <<<<<<<<<<<<<< - * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) - * if anchor is not None: - */ - __pyx_t_7 = (__pyx_v_self->parsed_event.type == YAML_MAPPING_START_EVENT); - if (__pyx_t_7) { - __pyx_t_8 = (__pyx_v_self->parsed_event.data.mapping_start.anchor != NULL); - __pyx_t_2 = __pyx_t_8; - } else { - __pyx_t_2 = __pyx_t_7; - } - if (__pyx_t_2) { - - /* "_yaml.pyx":756 - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ - * and self.parsed_event.data.mapping_start.anchor != NULL: - * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) # <<<<<<<<<<<<<< - * if anchor is not None: - * if anchor in self.anchors: - */ - __pyx_t_5 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.mapping_start.anchor); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 756; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_v_anchor); - __pyx_v_anchor = __pyx_t_5; - __pyx_t_5 = 0; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":757 - * and self.parsed_event.data.mapping_start.anchor != NULL: - * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) - * if anchor is not None: # <<<<<<<<<<<<<< - * if anchor in self.anchors: - * mark = Mark(self.stream_name, - */ - __pyx_t_2 = (__pyx_v_anchor != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":758 - * anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) - * if anchor is not None: - * if anchor in self.anchors: # <<<<<<<<<<<<<< - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - */ - __pyx_t_2 = ((PySequence_Contains(__pyx_v_self->anchors, __pyx_v_anchor))); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 758; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_2) { - - /* "_yaml.pyx":760 - * if anchor in self.anchors: - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - */ - __pyx_t_5 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 760; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - - /* "_yaml.pyx":761 - * mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.column, - * None, None) - */ - __pyx_t_6 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 761; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - - /* "_yaml.pyx":762 - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 762; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":763 - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ComposerError("found duplicate anchor; first occurence", - */ - __pyx_t_3 = PyTuple_New(6); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 759; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_3, 2, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_3, 3, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_3, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_3, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_5 = 0; - __pyx_t_6 = 0; - __pyx_t_4 = 0; - __pyx_t_4 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 759; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_mark)); - __pyx_v_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_4); - __pyx_t_4 = 0; - - /* "_yaml.pyx":764 - * self.parsed_event.start_mark.column, - * None, None) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ComposerError("found duplicate anchor; first occurence", - * self.anchors[anchor].start_mark, "second occurence", mark) - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":765 - * None, None) - * if PY_MAJOR_VERSION < 3: - * raise ComposerError("found duplicate anchor; first occurence", # <<<<<<<<<<<<<< - * self.anchors[anchor].start_mark, "second occurence", mark) - * else: - */ - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":766 - * if PY_MAJOR_VERSION < 3: - * raise ComposerError("found duplicate anchor; first occurence", - * self.anchors[anchor].start_mark, "second occurence", mark) # <<<<<<<<<<<<<< - * else: - * raise ComposerError(u"found duplicate anchor; first occurence", - */ - __pyx_t_3 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (!__pyx_t_3) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 766; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_6 = PyObject_GetAttr(__pyx_t_3, __pyx_n_s__start_mark); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 766; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyTuple_New(4); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_34)); - PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_kp_s_34)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_34)); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_6); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_35)); - PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_kp_s_35)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_35)); - __Pyx_INCREF(((PyObject *)__pyx_v_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_mark)); - __pyx_t_6 = 0; - __pyx_t_6 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 765; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L9; - } - /*else*/ { - - /* "_yaml.pyx":768 - * self.anchors[anchor].start_mark, "second occurence", mark) - * else: - * raise ComposerError(u"found duplicate anchor; first occurence", # <<<<<<<<<<<<<< - * self.anchors[anchor].start_mark, u"second occurence", mark) - * self.descend_resolver(parent, index) - */ - __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - - /* "_yaml.pyx":769 - * else: - * raise ComposerError(u"found duplicate anchor; first occurence", - * self.anchors[anchor].start_mark, u"second occurence", mark) # <<<<<<<<<<<<<< - * self.descend_resolver(parent, index) - * if self.parsed_event.type == YAML_SCALAR_EVENT: - */ - __pyx_t_3 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_anchor); if (!__pyx_t_3) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyObject_GetAttr(__pyx_t_3, __pyx_n_s__start_mark); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 769; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_3 = PyTuple_New(4); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_34)); - PyTuple_SET_ITEM(__pyx_t_3, 0, ((PyObject *)__pyx_kp_u_34)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_34)); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_35)); - PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_kp_u_35)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_35)); - __Pyx_INCREF(((PyObject *)__pyx_v_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_mark)); - __pyx_t_4 = 0; - __pyx_t_4 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 768; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L9:; - goto __pyx_L8; - } - __pyx_L8:; - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":770 - * raise ComposerError(u"found duplicate anchor; first occurence", - * self.anchors[anchor].start_mark, u"second occurence", mark) - * self.descend_resolver(parent, index) # <<<<<<<<<<<<<< - * if self.parsed_event.type == YAML_SCALAR_EVENT: - * node = self._compose_scalar_node(anchor) - */ - __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__descend_resolver); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 770; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 770; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_parent); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_parent); - __Pyx_GIVEREF(__pyx_v_parent); - __Pyx_INCREF(__pyx_v_index); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_index); - __Pyx_GIVEREF(__pyx_v_index); - __pyx_t_6 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 770; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - - /* "_yaml.pyx":771 - * self.anchors[anchor].start_mark, u"second occurence", mark) - * self.descend_resolver(parent, index) - * if self.parsed_event.type == YAML_SCALAR_EVENT: # <<<<<<<<<<<<<< - * node = self._compose_scalar_node(anchor) - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: - */ - __pyx_t_2 = (__pyx_v_self->parsed_event.type == YAML_SCALAR_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":772 - * self.descend_resolver(parent, index) - * if self.parsed_event.type == YAML_SCALAR_EVENT: - * node = self._compose_scalar_node(anchor) # <<<<<<<<<<<<<< - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: - * node = self._compose_sequence_node(anchor) - */ - __pyx_t_6 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_scalar_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 772; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L10; - } - - /* "_yaml.pyx":773 - * if self.parsed_event.type == YAML_SCALAR_EVENT: - * node = self._compose_scalar_node(anchor) - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: # <<<<<<<<<<<<<< - * node = self._compose_sequence_node(anchor) - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: - */ - __pyx_t_2 = (__pyx_v_self->parsed_event.type == YAML_SEQUENCE_START_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":774 - * node = self._compose_scalar_node(anchor) - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: - * node = self._compose_sequence_node(anchor) # <<<<<<<<<<<<<< - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: - * node = self._compose_mapping_node(anchor) - */ - __pyx_t_6 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_sequence_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 774; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L10; - } - - /* "_yaml.pyx":775 - * elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: - * node = self._compose_sequence_node(anchor) - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: # <<<<<<<<<<<<<< - * node = self._compose_mapping_node(anchor) - * self.ascend_resolver() - */ - __pyx_t_2 = (__pyx_v_self->parsed_event.type == YAML_MAPPING_START_EVENT); - if (__pyx_t_2) { - - /* "_yaml.pyx":776 - * node = self._compose_sequence_node(anchor) - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: - * node = self._compose_mapping_node(anchor) # <<<<<<<<<<<<<< - * self.ascend_resolver() - * return node - */ - __pyx_t_6 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_mapping_node(__pyx_v_self, __pyx_v_anchor); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 776; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L10; - } - __pyx_L10:; - - /* "_yaml.pyx":777 - * elif self.parsed_event.type == YAML_MAPPING_START_EVENT: - * node = self._compose_mapping_node(anchor) - * self.ascend_resolver() # <<<<<<<<<<<<<< - * return node - * - */ - __pyx_t_6 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__ascend_resolver); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 777; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_3 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_empty_tuple), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 777; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - - /* "_yaml.pyx":778 - * node = self._compose_mapping_node(anchor) - * self.ascend_resolver() - * return node # <<<<<<<<<<<<<< - * - * cdef _compose_scalar_node(self, object anchor): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_node); - __pyx_r = __pyx_v_node; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("_yaml.CParser._compose_node"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_anchor); - __Pyx_DECREF((PyObject *)__pyx_v_mark); - __Pyx_DECREF(__pyx_v_node); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":780 - * return node - * - * cdef _compose_scalar_node(self, object anchor): # <<<<<<<<<<<<<< - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - */ - -static PyObject *__pyx_f_5_yaml_7CParser__compose_scalar_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) { - struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark; - struct __pyx_obj_5_yaml_Mark *__pyx_v_end_mark; - PyObject *__pyx_v_value; - int __pyx_v_plain_implicit; - int __pyx_v_quoted_implicit; - PyObject *__pyx_v_tag; - PyObject *__pyx_v_style; - PyObject *__pyx_v_node; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - int __pyx_t_8; - PyObject *__pyx_t_9 = NULL; - __Pyx_RefNannySetupContext("_compose_scalar_node"); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_tag = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_style = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_node = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":782 - * cdef _compose_scalar_node(self, object anchor): - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 782; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":783 - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.column, - * None, None) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 783; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":784 - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * end_mark = Mark(self.stream_name, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 784; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":785 - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 781; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_start_mark)); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "_yaml.pyx":787 - * None, None) - * end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 787; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":788 - * end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.column, - * None, None) - */ - __pyx_t_4 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 788; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - - /* "_yaml.pyx":789 - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<< - * None, None) - * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 789; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":790 - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, - * None, None) # <<<<<<<<<<<<<< - * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, - * self.parsed_event.data.scalar.length, 'strict') - */ - __pyx_t_1 = PyTuple_New(6); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 786; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_1, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_1, 3, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_3 = 0; - __pyx_t_4 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 786; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_end_mark)); - __pyx_v_end_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":792 - * None, None) - * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, - * self.parsed_event.data.scalar.length, 'strict') # <<<<<<<<<<<<<< - * plain_implicit = False - * if self.parsed_event.data.scalar.plain_implicit == 1: - */ - __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_self->parsed_event.data.scalar.value, __pyx_v_self->parsed_event.data.scalar.length, __pyx_k__strict); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 791; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":793 - * value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, - * self.parsed_event.data.scalar.length, 'strict') - * plain_implicit = False # <<<<<<<<<<<<<< - * if self.parsed_event.data.scalar.plain_implicit == 1: - * plain_implicit = True - */ - __pyx_v_plain_implicit = 0; - - /* "_yaml.pyx":794 - * self.parsed_event.data.scalar.length, 'strict') - * plain_implicit = False - * if self.parsed_event.data.scalar.plain_implicit == 1: # <<<<<<<<<<<<<< - * plain_implicit = True - * quoted_implicit = False - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.scalar.plain_implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":795 - * plain_implicit = False - * if self.parsed_event.data.scalar.plain_implicit == 1: - * plain_implicit = True # <<<<<<<<<<<<<< - * quoted_implicit = False - * if self.parsed_event.data.scalar.quoted_implicit == 1: - */ - __pyx_v_plain_implicit = 1; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":796 - * if self.parsed_event.data.scalar.plain_implicit == 1: - * plain_implicit = True - * quoted_implicit = False # <<<<<<<<<<<<<< - * if self.parsed_event.data.scalar.quoted_implicit == 1: - * quoted_implicit = True - */ - __pyx_v_quoted_implicit = 0; - - /* "_yaml.pyx":797 - * plain_implicit = True - * quoted_implicit = False - * if self.parsed_event.data.scalar.quoted_implicit == 1: # <<<<<<<<<<<<<< - * quoted_implicit = True - * if self.parsed_event.data.scalar.tag == NULL \ - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.scalar.quoted_implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":798 - * quoted_implicit = False - * if self.parsed_event.data.scalar.quoted_implicit == 1: - * quoted_implicit = True # <<<<<<<<<<<<<< - * if self.parsed_event.data.scalar.tag == NULL \ - * or (self.parsed_event.data.scalar.tag[0] == c'!' - */ - __pyx_v_quoted_implicit = 1; - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":799 - * if self.parsed_event.data.scalar.quoted_implicit == 1: - * quoted_implicit = True - * if self.parsed_event.data.scalar.tag == NULL \ # <<<<<<<<<<<<<< - * or (self.parsed_event.data.scalar.tag[0] == c'!' - * and self.parsed_event.data.scalar.tag[1] == c'\0'): - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.scalar.tag == NULL); - if (!__pyx_t_5) { - - /* "_yaml.pyx":800 - * quoted_implicit = True - * if self.parsed_event.data.scalar.tag == NULL \ - * or (self.parsed_event.data.scalar.tag[0] == c'!' # <<<<<<<<<<<<<< - * and self.parsed_event.data.scalar.tag[1] == c'\0'): - * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) - */ - __pyx_t_6 = ((__pyx_v_self->parsed_event.data.scalar.tag[0]) == '!'); - if (__pyx_t_6) { - - /* "_yaml.pyx":801 - * if self.parsed_event.data.scalar.tag == NULL \ - * or (self.parsed_event.data.scalar.tag[0] == c'!' - * and self.parsed_event.data.scalar.tag[1] == c'\0'): # <<<<<<<<<<<<<< - * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) - * else: - */ - __pyx_t_7 = ((__pyx_v_self->parsed_event.data.scalar.tag[1]) == '\x00'); - __pyx_t_8 = __pyx_t_7; - } else { - __pyx_t_8 = __pyx_t_6; - } - __pyx_t_6 = __pyx_t_8; - } else { - __pyx_t_6 = __pyx_t_5; - } - if (__pyx_t_6) { - - /* "_yaml.pyx":802 - * or (self.parsed_event.data.scalar.tag[0] == c'!' - * and self.parsed_event.data.scalar.tag[1] == c'\0'): - * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) # <<<<<<<<<<<<<< - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) - */ - __pyx_t_2 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = __Pyx_PyBool_FromLong(__pyx_v_plain_implicit); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_3 = __Pyx_PyBool_FromLong(__pyx_v_quoted_implicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_9 = PyTuple_New(2); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_9)); - PyTuple_SET_ITEM(__pyx_t_9, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_9, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_4 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyTuple_New(3); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_t_9)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_9)); - __pyx_t_1 = 0; - __pyx_t_9 = 0; - __pyx_t_9 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 802; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_9); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_9; - __pyx_t_9 = 0; - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":804 - * tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) # <<<<<<<<<<<<<< - * style = None - * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - */ - __pyx_t_9 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.scalar.tag); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 804; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_9); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_9; - __pyx_t_9 = 0; - } - __pyx_L5:; - - /* "_yaml.pyx":805 - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) - * style = None # <<<<<<<<<<<<<< - * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * style = u'' - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = Py_None; - - /* "_yaml.pyx":806 - * tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) - * style = None - * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'' - * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":807 - * style = None - * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * style = u'' # <<<<<<<<<<<<<< - * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_20)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_20); - goto __pyx_L6; - } - - /* "_yaml.pyx":808 - * if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - * style = u'' - * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'\'' - * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":809 - * style = u'' - * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' # <<<<<<<<<<<<<< - * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_21)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_21); - goto __pyx_L6; - } - - /* "_yaml.pyx":810 - * elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - * style = u'\'' - * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'"' - * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":811 - * style = u'\'' - * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' # <<<<<<<<<<<<<< - * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_22)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_22); - goto __pyx_L6; - } - - /* "_yaml.pyx":812 - * elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - * style = u'"' - * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'|' - * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":813 - * style = u'"' - * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' # <<<<<<<<<<<<<< - * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_23)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_23); - goto __pyx_L6; - } - - /* "_yaml.pyx":814 - * elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - * style = u'|' - * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: # <<<<<<<<<<<<<< - * style = u'>' - * node = ScalarNode(tag, value, start_mark, end_mark, style) - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":815 - * style = u'|' - * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' # <<<<<<<<<<<<<< - * node = ScalarNode(tag, value, start_mark, end_mark, style) - * if anchor is not None: - */ - __Pyx_INCREF(((PyObject *)__pyx_kp_u_24)); - __Pyx_DECREF(__pyx_v_style); - __pyx_v_style = ((PyObject *)__pyx_kp_u_24); - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":816 - * elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - * style = u'>' - * node = ScalarNode(tag, value, start_mark, end_mark, style) # <<<<<<<<<<<<<< - * if anchor is not None: - * self.anchors[anchor] = node - */ - __pyx_t_9 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarNode); if (unlikely(!__pyx_t_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 816; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_9); - __pyx_t_3 = PyTuple_New(5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 816; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_tag); - __Pyx_GIVEREF(__pyx_v_tag); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 2, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(((PyObject *)__pyx_v_end_mark)); - PyTuple_SET_ITEM(__pyx_t_3, 3, ((PyObject *)__pyx_v_end_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_end_mark)); - __Pyx_INCREF(__pyx_v_style); - PyTuple_SET_ITEM(__pyx_t_3, 4, __pyx_v_style); - __Pyx_GIVEREF(__pyx_v_style); - __pyx_t_2 = PyObject_Call(__pyx_t_9, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 816; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_9); __pyx_t_9 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":817 - * style = u'>' - * node = ScalarNode(tag, value, start_mark, end_mark, style) - * if anchor is not None: # <<<<<<<<<<<<<< - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_6 = (__pyx_v_anchor != Py_None); - if (__pyx_t_6) { - - /* "_yaml.pyx":818 - * node = ScalarNode(tag, value, start_mark, end_mark, style) - * if anchor is not None: - * self.anchors[anchor] = node # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * return node - */ - if (PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 818; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":819 - * if anchor is not None: - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * return node - * - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":820 - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) - * return node # <<<<<<<<<<<<<< - * - * cdef _compose_sequence_node(self, object anchor): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_node); - __pyx_r = __pyx_v_node; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_9); - __Pyx_AddTraceback("_yaml.CParser._compose_scalar_node"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_start_mark); - __Pyx_DECREF((PyObject *)__pyx_v_end_mark); - __Pyx_DECREF(__pyx_v_value); - __Pyx_DECREF(__pyx_v_tag); - __Pyx_DECREF(__pyx_v_style); - __Pyx_DECREF(__pyx_v_node); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":822 - * return node - * - * cdef _compose_sequence_node(self, object anchor): # <<<<<<<<<<<<<< - * cdef int index - * start_mark = Mark(self.stream_name, - */ - -static PyObject *__pyx_f_5_yaml_7CParser__compose_sequence_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) { - int __pyx_v_index; - struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark; - int __pyx_v_implicit; - PyObject *__pyx_v_tag; - PyObject *__pyx_v_flow_style; - PyObject *__pyx_v_value; - PyObject *__pyx_v_node; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - int __pyx_t_8; - int __pyx_t_9; - __Pyx_RefNannySetupContext("_compose_sequence_node"); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_tag = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_flow_style = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value = ((PyObject*)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_node = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":825 - * cdef int index - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 825; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":826 - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.column, - * None, None) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 826; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":827 - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * implicit = False - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 827; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":828 - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * implicit = False - * if self.parsed_event.data.sequence_start.implicit == 1: - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 824; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 824; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_start_mark)); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "_yaml.pyx":829 - * self.parsed_event.start_mark.column, - * None, None) - * implicit = False # <<<<<<<<<<<<<< - * if self.parsed_event.data.sequence_start.implicit == 1: - * implicit = True - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":830 - * None, None) - * implicit = False - * if self.parsed_event.data.sequence_start.implicit == 1: # <<<<<<<<<<<<<< - * implicit = True - * if self.parsed_event.data.sequence_start.tag == NULL \ - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.sequence_start.implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":831 - * implicit = False - * if self.parsed_event.data.sequence_start.implicit == 1: - * implicit = True # <<<<<<<<<<<<<< - * if self.parsed_event.data.sequence_start.tag == NULL \ - * or (self.parsed_event.data.sequence_start.tag[0] == c'!' - */ - __pyx_v_implicit = 1; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":832 - * if self.parsed_event.data.sequence_start.implicit == 1: - * implicit = True - * if self.parsed_event.data.sequence_start.tag == NULL \ # <<<<<<<<<<<<<< - * or (self.parsed_event.data.sequence_start.tag[0] == c'!' - * and self.parsed_event.data.sequence_start.tag[1] == c'\0'): - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.sequence_start.tag == NULL); - if (!__pyx_t_5) { - - /* "_yaml.pyx":833 - * implicit = True - * if self.parsed_event.data.sequence_start.tag == NULL \ - * or (self.parsed_event.data.sequence_start.tag[0] == c'!' # <<<<<<<<<<<<<< - * and self.parsed_event.data.sequence_start.tag[1] == c'\0'): - * tag = self.resolve(SequenceNode, None, implicit) - */ - __pyx_t_6 = ((__pyx_v_self->parsed_event.data.sequence_start.tag[0]) == '!'); - if (__pyx_t_6) { - - /* "_yaml.pyx":834 - * if self.parsed_event.data.sequence_start.tag == NULL \ - * or (self.parsed_event.data.sequence_start.tag[0] == c'!' - * and self.parsed_event.data.sequence_start.tag[1] == c'\0'): # <<<<<<<<<<<<<< - * tag = self.resolve(SequenceNode, None, implicit) - * else: - */ - __pyx_t_7 = ((__pyx_v_self->parsed_event.data.sequence_start.tag[1]) == '\x00'); - __pyx_t_8 = __pyx_t_7; - } else { - __pyx_t_8 = __pyx_t_6; - } - __pyx_t_6 = __pyx_t_8; - } else { - __pyx_t_6 = __pyx_t_5; - } - if (__pyx_t_6) { - - /* "_yaml.pyx":835 - * or (self.parsed_event.data.sequence_start.tag[0] == c'!' - * and self.parsed_event.data.sequence_start.tag[1] == c'\0'): - * tag = self.resolve(SequenceNode, None, implicit) # <<<<<<<<<<<<<< - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) - */ - __pyx_t_3 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 1, Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_4 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 835; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L4; - } - /*else*/ { - - /* "_yaml.pyx":837 - * tag = self.resolve(SequenceNode, None, implicit) - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) # <<<<<<<<<<<<<< - * flow_style = None - * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - */ - __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.sequence_start.tag); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 837; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_L4:; - - /* "_yaml.pyx":838 - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) - * flow_style = None # <<<<<<<<<<<<<< - * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - * flow_style = True - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = Py_None; - - /* "_yaml.pyx":839 - * tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) - * flow_style = None - * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: # <<<<<<<<<<<<<< - * flow_style = True - * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":840 - * flow_style = None - * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - * flow_style = True # <<<<<<<<<<<<<< - * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - * flow_style = False - */ - __pyx_t_2 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 840; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - - /* "_yaml.pyx":841 - * if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - * flow_style = True - * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: # <<<<<<<<<<<<<< - * flow_style = False - * value = [] - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":842 - * flow_style = True - * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - * flow_style = False # <<<<<<<<<<<<<< - * value = [] - * node = SequenceNode(tag, value, start_mark, None, flow_style) - */ - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 842; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":843 - * elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - * flow_style = False - * value = [] # <<<<<<<<<<<<<< - * node = SequenceNode(tag, value, start_mark, None, flow_style) - * if anchor is not None: - */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 843; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_DECREF(((PyObject *)__pyx_v_value)); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":844 - * flow_style = False - * value = [] - * node = SequenceNode(tag, value, start_mark, None, flow_style) # <<<<<<<<<<<<<< - * if anchor is not None: - * self.anchors[anchor] = node - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceNode); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = PyTuple_New(5); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_tag); - __Pyx_GIVEREF(__pyx_v_tag); - __Pyx_INCREF(((PyObject *)__pyx_v_value)); - PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_value)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_value)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 3, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(__pyx_v_flow_style); - PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_flow_style); - __Pyx_GIVEREF(__pyx_v_flow_style); - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 844; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":845 - * value = [] - * node = SequenceNode(tag, value, start_mark, None, flow_style) - * if anchor is not None: # <<<<<<<<<<<<<< - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_6 = (__pyx_v_anchor != Py_None); - if (__pyx_t_6) { - - /* "_yaml.pyx":846 - * node = SequenceNode(tag, value, start_mark, None, flow_style) - * if anchor is not None: - * self.anchors[anchor] = node # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * index = 0 - */ - if (PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 846; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":847 - * if anchor is not None: - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * index = 0 - * self._parse_next_event() - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":848 - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) - * index = 0 # <<<<<<<<<<<<<< - * self._parse_next_event() - * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: - */ - __pyx_v_index = 0; - - /* "_yaml.pyx":849 - * yaml_event_delete(&self.parsed_event) - * index = 0 - * self._parse_next_event() # <<<<<<<<<<<<<< - * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: - * value.append(self._compose_node(node, index)) - */ - __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 849; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":850 - * index = 0 - * self._parse_next_event() - * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: # <<<<<<<<<<<<<< - * value.append(self._compose_node(node, index)) - * index = index+1 - */ - while (1) { - __pyx_t_6 = (__pyx_v_self->parsed_event.type != YAML_SEQUENCE_END_EVENT); - if (!__pyx_t_6) break; - - /* "_yaml.pyx":851 - * self._parse_next_event() - * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: - * value.append(self._compose_node(node, index)) # <<<<<<<<<<<<<< - * index = index+1 - * self._parse_next_event() - */ - if (unlikely(__pyx_v_value == Py_None)) { - PyErr_SetString(PyExc_AttributeError, "'NoneType' object has no attribute 'append'"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_3 = PyInt_FromLong(__pyx_v_index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, __pyx_t_3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_9 = PyList_Append(__pyx_v_value, __pyx_t_1); if (unlikely(__pyx_t_9 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 851; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":852 - * while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: - * value.append(self._compose_node(node, index)) - * index = index+1 # <<<<<<<<<<<<<< - * self._parse_next_event() - * node.end_mark = Mark(self.stream_name, - */ - __pyx_v_index = (__pyx_v_index + 1); - - /* "_yaml.pyx":853 - * value.append(self._compose_node(node, index)) - * index = index+1 - * self._parse_next_event() # <<<<<<<<<<<<<< - * node.end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, - */ - __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 853; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - - /* "_yaml.pyx":855 - * self._parse_next_event() - * node.end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 855; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":856 - * node.end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.column, - * None, None) - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 856; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":857 - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<< - * None, None) - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 857; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":858 - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, - * None, None) # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * return node - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 854; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_1 = 0; - __pyx_t_3 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 854; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - - /* "_yaml.pyx":854 - * index = index+1 - * self._parse_next_event() - * node.end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, - */ - if (PyObject_SetAttr(__pyx_v_node, __pyx_n_s__end_mark, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 854; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "_yaml.pyx":859 - * self.parsed_event.end_mark.column, - * None, None) - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * return node - * - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":860 - * None, None) - * yaml_event_delete(&self.parsed_event) - * return node # <<<<<<<<<<<<<< - * - * cdef _compose_mapping_node(self, object anchor): - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_node); - __pyx_r = __pyx_v_node; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.CParser._compose_sequence_node"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_start_mark); - __Pyx_DECREF(__pyx_v_tag); - __Pyx_DECREF(__pyx_v_flow_style); - __Pyx_DECREF(__pyx_v_value); - __Pyx_DECREF(__pyx_v_node); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":862 - * return node - * - * cdef _compose_mapping_node(self, object anchor): # <<<<<<<<<<<<<< - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - */ - -static PyObject *__pyx_f_5_yaml_7CParser__compose_mapping_node(struct __pyx_obj_5_yaml_CParser *__pyx_v_self, PyObject *__pyx_v_anchor) { - struct __pyx_obj_5_yaml_Mark *__pyx_v_start_mark; - int __pyx_v_implicit; - PyObject *__pyx_v_tag; - PyObject *__pyx_v_flow_style; - PyObject *__pyx_v_value; - PyObject *__pyx_v_node; - PyObject *__pyx_v_item_key; - PyObject *__pyx_v_item_value; - PyObject *__pyx_r = NULL; - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - int __pyx_t_5; - int __pyx_t_6; - int __pyx_t_7; - int __pyx_t_8; - int __pyx_t_9; - __Pyx_RefNannySetupContext("_compose_mapping_node"); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_tag = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_flow_style = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value = ((PyObject*)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_node = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_item_key = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_item_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":864 - * cdef _compose_mapping_node(self, object anchor): - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.index); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 864; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":865 - * start_mark = Mark(self.stream_name, - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.start_mark.column, - * None, None) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.line); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 865; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":866 - * self.parsed_event.start_mark.index, - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, # <<<<<<<<<<<<<< - * None, None) - * implicit = False - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.start_mark.column); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 866; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":867 - * self.parsed_event.start_mark.line, - * self.parsed_event.start_mark.column, - * None, None) # <<<<<<<<<<<<<< - * implicit = False - * if self.parsed_event.data.mapping_start.implicit == 1: - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 863; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 863; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_v_start_mark)); - __pyx_v_start_mark = ((struct __pyx_obj_5_yaml_Mark *)__pyx_t_3); - __pyx_t_3 = 0; - - /* "_yaml.pyx":868 - * self.parsed_event.start_mark.column, - * None, None) - * implicit = False # <<<<<<<<<<<<<< - * if self.parsed_event.data.mapping_start.implicit == 1: - * implicit = True - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":869 - * None, None) - * implicit = False - * if self.parsed_event.data.mapping_start.implicit == 1: # <<<<<<<<<<<<<< - * implicit = True - * if self.parsed_event.data.mapping_start.tag == NULL \ - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.mapping_start.implicit == 1); - if (__pyx_t_5) { - - /* "_yaml.pyx":870 - * implicit = False - * if self.parsed_event.data.mapping_start.implicit == 1: - * implicit = True # <<<<<<<<<<<<<< - * if self.parsed_event.data.mapping_start.tag == NULL \ - * or (self.parsed_event.data.mapping_start.tag[0] == c'!' - */ - __pyx_v_implicit = 1; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":871 - * if self.parsed_event.data.mapping_start.implicit == 1: - * implicit = True - * if self.parsed_event.data.mapping_start.tag == NULL \ # <<<<<<<<<<<<<< - * or (self.parsed_event.data.mapping_start.tag[0] == c'!' - * and self.parsed_event.data.mapping_start.tag[1] == c'\0'): - */ - __pyx_t_5 = (__pyx_v_self->parsed_event.data.mapping_start.tag == NULL); - if (!__pyx_t_5) { - - /* "_yaml.pyx":872 - * implicit = True - * if self.parsed_event.data.mapping_start.tag == NULL \ - * or (self.parsed_event.data.mapping_start.tag[0] == c'!' # <<<<<<<<<<<<<< - * and self.parsed_event.data.mapping_start.tag[1] == c'\0'): - * tag = self.resolve(MappingNode, None, implicit) - */ - __pyx_t_6 = ((__pyx_v_self->parsed_event.data.mapping_start.tag[0]) == '!'); - if (__pyx_t_6) { - - /* "_yaml.pyx":873 - * if self.parsed_event.data.mapping_start.tag == NULL \ - * or (self.parsed_event.data.mapping_start.tag[0] == c'!' - * and self.parsed_event.data.mapping_start.tag[1] == c'\0'): # <<<<<<<<<<<<<< - * tag = self.resolve(MappingNode, None, implicit) - * else: - */ - __pyx_t_7 = ((__pyx_v_self->parsed_event.data.mapping_start.tag[1]) == '\x00'); - __pyx_t_8 = __pyx_t_7; - } else { - __pyx_t_8 = __pyx_t_6; - } - __pyx_t_6 = __pyx_t_8; - } else { - __pyx_t_6 = __pyx_t_5; - } - if (__pyx_t_6) { - - /* "_yaml.pyx":874 - * or (self.parsed_event.data.mapping_start.tag[0] == c'!' - * and self.parsed_event.data.mapping_start.tag[1] == c'\0'): - * tag = self.resolve(MappingNode, None, implicit) # <<<<<<<<<<<<<< - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) - */ - __pyx_t_3 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = __Pyx_PyBool_FromLong(__pyx_v_implicit); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = PyTuple_New(3); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 1, Py_None); - __Pyx_GIVEREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 2, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __pyx_t_4 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 874; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L4; - } - /*else*/ { - - /* "_yaml.pyx":876 - * tag = self.resolve(MappingNode, None, implicit) - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) # <<<<<<<<<<<<<< - * flow_style = None - * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - */ - __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->parsed_event.data.mapping_start.tag); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 876; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_tag); - __pyx_v_tag = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_L4:; - - /* "_yaml.pyx":877 - * else: - * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) - * flow_style = None # <<<<<<<<<<<<<< - * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - * flow_style = True - */ - __Pyx_INCREF(Py_None); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = Py_None; - - /* "_yaml.pyx":878 - * tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) - * flow_style = None - * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: # <<<<<<<<<<<<<< - * flow_style = True - * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":879 - * flow_style = None - * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - * flow_style = True # <<<<<<<<<<<<<< - * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - * flow_style = False - */ - __pyx_t_2 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 879; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - - /* "_yaml.pyx":880 - * if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - * flow_style = True - * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: # <<<<<<<<<<<<<< - * flow_style = False - * value = [] - */ - __pyx_t_6 = (__pyx_v_self->parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE); - if (__pyx_t_6) { - - /* "_yaml.pyx":881 - * flow_style = True - * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - * flow_style = False # <<<<<<<<<<<<<< - * value = [] - * node = MappingNode(tag, value, start_mark, None, flow_style) - */ - __pyx_t_2 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 881; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_flow_style); - __pyx_v_flow_style = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":882 - * elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - * flow_style = False - * value = [] # <<<<<<<<<<<<<< - * node = MappingNode(tag, value, start_mark, None, flow_style) - * if anchor is not None: - */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 882; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_DECREF(((PyObject *)__pyx_v_value)); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":883 - * flow_style = False - * value = [] - * node = MappingNode(tag, value, start_mark, None, flow_style) # <<<<<<<<<<<<<< - * if anchor is not None: - * self.anchors[anchor] = node - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingNode); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 883; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = PyTuple_New(5); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 883; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_INCREF(__pyx_v_tag); - PyTuple_SET_ITEM(__pyx_t_1, 0, __pyx_v_tag); - __Pyx_GIVEREF(__pyx_v_tag); - __Pyx_INCREF(((PyObject *)__pyx_v_value)); - PyTuple_SET_ITEM(__pyx_t_1, 1, ((PyObject *)__pyx_v_value)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_value)); - __Pyx_INCREF(((PyObject *)__pyx_v_start_mark)); - PyTuple_SET_ITEM(__pyx_t_1, 2, ((PyObject *)__pyx_v_start_mark)); - __Pyx_GIVEREF(((PyObject *)__pyx_v_start_mark)); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_1, 3, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(__pyx_v_flow_style); - PyTuple_SET_ITEM(__pyx_t_1, 4, __pyx_v_flow_style); - __Pyx_GIVEREF(__pyx_v_flow_style); - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_1), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 883; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_v_node); - __pyx_v_node = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":884 - * value = [] - * node = MappingNode(tag, value, start_mark, None, flow_style) - * if anchor is not None: # <<<<<<<<<<<<<< - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_6 = (__pyx_v_anchor != Py_None); - if (__pyx_t_6) { - - /* "_yaml.pyx":885 - * node = MappingNode(tag, value, start_mark, None, flow_style) - * if anchor is not None: - * self.anchors[anchor] = node # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() - */ - if (PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_anchor, __pyx_v_node) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 885; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":886 - * if anchor is not None: - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * self._parse_next_event() - * while self.parsed_event.type != YAML_MAPPING_END_EVENT: - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":887 - * self.anchors[anchor] = node - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() # <<<<<<<<<<<<<< - * while self.parsed_event.type != YAML_MAPPING_END_EVENT: - * item_key = self._compose_node(node, None) - */ - __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 887; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":888 - * yaml_event_delete(&self.parsed_event) - * self._parse_next_event() - * while self.parsed_event.type != YAML_MAPPING_END_EVENT: # <<<<<<<<<<<<<< - * item_key = self._compose_node(node, None) - * item_value = self._compose_node(node, item_key) - */ - while (1) { - __pyx_t_6 = (__pyx_v_self->parsed_event.type != YAML_MAPPING_END_EVENT); - if (!__pyx_t_6) break; - - /* "_yaml.pyx":889 - * self._parse_next_event() - * while self.parsed_event.type != YAML_MAPPING_END_EVENT: - * item_key = self._compose_node(node, None) # <<<<<<<<<<<<<< - * item_value = self._compose_node(node, item_key) - * value.append((item_key, item_value)) - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, Py_None); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 889; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_item_key); - __pyx_v_item_key = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":890 - * while self.parsed_event.type != YAML_MAPPING_END_EVENT: - * item_key = self._compose_node(node, None) - * item_value = self._compose_node(node, item_key) # <<<<<<<<<<<<<< - * value.append((item_key, item_value)) - * self._parse_next_event() - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_compose_node(__pyx_v_self, __pyx_v_node, __pyx_v_item_key); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 890; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_item_value); - __pyx_v_item_value = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":891 - * item_key = self._compose_node(node, None) - * item_value = self._compose_node(node, item_key) - * value.append((item_key, item_value)) # <<<<<<<<<<<<<< - * self._parse_next_event() - * node.end_mark = Mark(self.stream_name, - */ - if (unlikely(__pyx_v_value == Py_None)) { - PyErr_SetString(PyExc_AttributeError, "'NoneType' object has no attribute 'append'"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 891; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_3 = PyTuple_New(2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 891; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_item_key); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_item_key); - __Pyx_GIVEREF(__pyx_v_item_key); - __Pyx_INCREF(__pyx_v_item_value); - PyTuple_SET_ITEM(__pyx_t_3, 1, __pyx_v_item_value); - __Pyx_GIVEREF(__pyx_v_item_value); - __pyx_t_9 = PyList_Append(__pyx_v_value, ((PyObject *)__pyx_t_3)); if (unlikely(__pyx_t_9 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 891; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - - /* "_yaml.pyx":892 - * item_value = self._compose_node(node, item_key) - * value.append((item_key, item_value)) - * self._parse_next_event() # <<<<<<<<<<<<<< - * node.end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, - */ - __pyx_t_9 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parse_next_event(__pyx_v_self); if (unlikely(__pyx_t_9 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 892; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - - /* "_yaml.pyx":894 - * self._parse_next_event() - * node.end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, - */ - __pyx_t_3 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.index); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 894; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - - /* "_yaml.pyx":895 - * node.end_mark = Mark(self.stream_name, - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.column, - * None, None) - */ - __pyx_t_1 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.line); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 895; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - - /* "_yaml.pyx":896 - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, # <<<<<<<<<<<<<< - * None, None) - * yaml_event_delete(&self.parsed_event) - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->parsed_event.end_mark.column); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 896; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - - /* "_yaml.pyx":897 - * self.parsed_event.end_mark.line, - * self.parsed_event.end_mark.column, - * None, None) # <<<<<<<<<<<<<< - * yaml_event_delete(&self.parsed_event) - * return node - */ - __pyx_t_4 = PyTuple_New(6); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 893; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_self->stream_name); - __Pyx_GIVEREF(__pyx_v_self->stream_name); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - PyTuple_SET_ITEM(__pyx_t_4, 2, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_4, 3, __pyx_t_2); - __Pyx_GIVEREF(__pyx_t_2); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 4, Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_INCREF(Py_None); - PyTuple_SET_ITEM(__pyx_t_4, 5, Py_None); - __Pyx_GIVEREF(Py_None); - __pyx_t_3 = 0; - __pyx_t_1 = 0; - __pyx_t_2 = 0; - __pyx_t_2 = PyObject_Call(((PyObject *)((PyObject*)__pyx_ptype_5_yaml_Mark)), ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 893; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - - /* "_yaml.pyx":893 - * value.append((item_key, item_value)) - * self._parse_next_event() - * node.end_mark = Mark(self.stream_name, # <<<<<<<<<<<<<< - * self.parsed_event.end_mark.index, - * self.parsed_event.end_mark.line, - */ - if (PyObject_SetAttr(__pyx_v_node, __pyx_n_s__end_mark, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 893; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - - /* "_yaml.pyx":898 - * self.parsed_event.end_mark.column, - * None, None) - * yaml_event_delete(&self.parsed_event) # <<<<<<<<<<<<<< - * return node - * - */ - yaml_event_delete((&__pyx_v_self->parsed_event)); - - /* "_yaml.pyx":899 - * None, None) - * yaml_event_delete(&self.parsed_event) - * return node # <<<<<<<<<<<<<< - * - * cdef int _parse_next_event(self) except 0: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_v_node); - __pyx_r = __pyx_v_node; - goto __pyx_L0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.CParser._compose_mapping_node"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_start_mark); - __Pyx_DECREF(__pyx_v_tag); - __Pyx_DECREF(__pyx_v_flow_style); - __Pyx_DECREF(__pyx_v_value); - __Pyx_DECREF(__pyx_v_node); - __Pyx_DECREF(__pyx_v_item_key); - __Pyx_DECREF(__pyx_v_item_value); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":901 - * return node - * - * cdef int _parse_next_event(self) except 0: # <<<<<<<<<<<<<< - * if self.parsed_event.type == YAML_NO_EVENT: - * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: - */ - -static int __pyx_f_5_yaml_7CParser__parse_next_event(struct __pyx_obj_5_yaml_CParser *__pyx_v_self) { - PyObject *__pyx_v_error; - int __pyx_r; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("_parse_next_event"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":902 - * - * cdef int _parse_next_event(self) except 0: - * if self.parsed_event.type == YAML_NO_EVENT: # <<<<<<<<<<<<<< - * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: - * error = self._parser_error() - */ - __pyx_t_1 = (__pyx_v_self->parsed_event.type == YAML_NO_EVENT); - if (__pyx_t_1) { - - /* "_yaml.pyx":903 - * cdef int _parse_next_event(self) except 0: - * if self.parsed_event.type == YAML_NO_EVENT: - * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: # <<<<<<<<<<<<<< - * error = self._parser_error() - * raise error - */ - __pyx_t_2 = yaml_parser_parse((&__pyx_v_self->parser), (&__pyx_v_self->parsed_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 903; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_1 = (__pyx_t_2 == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":904 - * if self.parsed_event.type == YAML_NO_EVENT: - * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: - * error = self._parser_error() # <<<<<<<<<<<<<< - * raise error - * return 1 - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CParser *)__pyx_v_self->__pyx_vtab)->_parser_error(__pyx_v_self); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 904; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":905 - * if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: - * error = self._parser_error() - * raise error # <<<<<<<<<<<<<< - * return 1 - * - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 905; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L4; - } - __pyx_L4:; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":906 - * error = self._parser_error() - * raise error - * return 1 # <<<<<<<<<<<<<< - * - * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0: - */ - __pyx_r = 1; - goto __pyx_L0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CParser._parse_next_event"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":908 - * return 1 - * - * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0: # <<<<<<<<<<<<<< - * cdef CParser parser - * parser = data - */ - -static int __pyx_f_5_yaml_input_handler(void *__pyx_v_data, char *__pyx_v_buffer, int __pyx_v_size, int *__pyx_v_read) { - struct __pyx_obj_5_yaml_CParser *__pyx_v_parser; - PyObject *__pyx_v_value; - int __pyx_r; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - __Pyx_RefNannySetupContext("input_handler"); - __pyx_v_parser = ((struct __pyx_obj_5_yaml_CParser *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":910 - * cdef int input_handler(void *data, char *buffer, int size, int *read) except 0: - * cdef CParser parser - * parser = data # <<<<<<<<<<<<<< - * if parser.stream_cache is None: - * value = parser.stream.read(size) - */ - __Pyx_INCREF(((PyObject *)((struct __pyx_obj_5_yaml_CParser *)__pyx_v_data))); - __Pyx_DECREF(((PyObject *)__pyx_v_parser)); - __pyx_v_parser = ((struct __pyx_obj_5_yaml_CParser *)__pyx_v_data); - - /* "_yaml.pyx":911 - * cdef CParser parser - * parser = data - * if parser.stream_cache is None: # <<<<<<<<<<<<<< - * value = parser.stream.read(size) - * if PyUnicode_CheckExact(value) != 0: - */ - __pyx_t_1 = (__pyx_v_parser->stream_cache == Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":912 - * parser = data - * if parser.stream_cache is None: - * value = parser.stream.read(size) # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(value) != 0: - * value = PyUnicode_AsUTF8String(value) - */ - __pyx_t_2 = PyObject_GetAttr(__pyx_v_parser->stream, __pyx_n_s__read); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyInt_FromLong(__pyx_v_size); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_3); - __Pyx_GIVEREF(__pyx_t_3); - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 912; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":913 - * if parser.stream_cache is None: - * value = parser.stream.read(size) - * if PyUnicode_CheckExact(value) != 0: # <<<<<<<<<<<<<< - * value = PyUnicode_AsUTF8String(value) - * parser.unicode_source = 1 - */ - __pyx_t_1 = (PyUnicode_CheckExact(__pyx_v_value) != 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":914 - * value = parser.stream.read(size) - * if PyUnicode_CheckExact(value) != 0: - * value = PyUnicode_AsUTF8String(value) # <<<<<<<<<<<<<< - * parser.unicode_source = 1 - * if PyString_CheckExact(value) == 0: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_value); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 914; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":915 - * if PyUnicode_CheckExact(value) != 0: - * value = PyUnicode_AsUTF8String(value) - * parser.unicode_source = 1 # <<<<<<<<<<<<<< - * if PyString_CheckExact(value) == 0: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_v_parser->unicode_source = 1; - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":916 - * value = PyUnicode_AsUTF8String(value) - * parser.unicode_source = 1 - * if PyString_CheckExact(value) == 0: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("a string value is expected") - */ - __pyx_t_1 = (PyString_CheckExact(__pyx_v_value) == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":917 - * parser.unicode_source = 1 - * if PyString_CheckExact(value) == 0: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("a string value is expected") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":918 - * if PyString_CheckExact(value) == 0: - * if PY_MAJOR_VERSION < 3: - * raise TypeError("a string value is expected") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"a string value is expected") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_40), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 918; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 918; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - /*else*/ { - - /* "_yaml.pyx":920 - * raise TypeError("a string value is expected") - * else: - * raise TypeError(u"a string value is expected") # <<<<<<<<<<<<<< - * parser.stream_cache = value - * parser.stream_cache_pos = 0 - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_41), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 920; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 920; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L6:; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":921 - * else: - * raise TypeError(u"a string value is expected") - * parser.stream_cache = value # <<<<<<<<<<<<<< - * parser.stream_cache_pos = 0 - * parser.stream_cache_len = PyString_GET_SIZE(value) - */ - __Pyx_INCREF(__pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - __Pyx_GOTREF(__pyx_v_parser->stream_cache); - __Pyx_DECREF(__pyx_v_parser->stream_cache); - __pyx_v_parser->stream_cache = __pyx_v_value; - - /* "_yaml.pyx":922 - * raise TypeError(u"a string value is expected") - * parser.stream_cache = value - * parser.stream_cache_pos = 0 # <<<<<<<<<<<<<< - * parser.stream_cache_len = PyString_GET_SIZE(value) - * if (parser.stream_cache_len - parser.stream_cache_pos) < size: - */ - __pyx_v_parser->stream_cache_pos = 0; - - /* "_yaml.pyx":923 - * parser.stream_cache = value - * parser.stream_cache_pos = 0 - * parser.stream_cache_len = PyString_GET_SIZE(value) # <<<<<<<<<<<<<< - * if (parser.stream_cache_len - parser.stream_cache_pos) < size: - * size = parser.stream_cache_len - parser.stream_cache_pos - */ - __pyx_v_parser->stream_cache_len = PyString_GET_SIZE(__pyx_v_value); - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":924 - * parser.stream_cache_pos = 0 - * parser.stream_cache_len = PyString_GET_SIZE(value) - * if (parser.stream_cache_len - parser.stream_cache_pos) < size: # <<<<<<<<<<<<<< - * size = parser.stream_cache_len - parser.stream_cache_pos - * if size > 0: - */ - __pyx_t_1 = ((__pyx_v_parser->stream_cache_len - __pyx_v_parser->stream_cache_pos) < __pyx_v_size); - if (__pyx_t_1) { - - /* "_yaml.pyx":925 - * parser.stream_cache_len = PyString_GET_SIZE(value) - * if (parser.stream_cache_len - parser.stream_cache_pos) < size: - * size = parser.stream_cache_len - parser.stream_cache_pos # <<<<<<<<<<<<<< - * if size > 0: - * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) - */ - __pyx_v_size = (__pyx_v_parser->stream_cache_len - __pyx_v_parser->stream_cache_pos); - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":926 - * if (parser.stream_cache_len - parser.stream_cache_pos) < size: - * size = parser.stream_cache_len - parser.stream_cache_pos - * if size > 0: # <<<<<<<<<<<<<< - * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) - * + parser.stream_cache_pos, size) - */ - __pyx_t_1 = (__pyx_v_size > 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":927 - * size = parser.stream_cache_len - parser.stream_cache_pos - * if size > 0: - * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) # <<<<<<<<<<<<<< - * + parser.stream_cache_pos, size) - * read[0] = size - */ - __pyx_t_3 = __pyx_v_parser->stream_cache; - __Pyx_INCREF(__pyx_t_3); - - /* "_yaml.pyx":928 - * if size > 0: - * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) - * + parser.stream_cache_pos, size) # <<<<<<<<<<<<<< - * read[0] = size - * parser.stream_cache_pos += size - */ - memcpy(__pyx_v_buffer, (PyString_AS_STRING(__pyx_t_3) + __pyx_v_parser->stream_cache_pos), __pyx_v_size); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - goto __pyx_L8; - } - __pyx_L8:; - - /* "_yaml.pyx":929 - * memcpy(buffer, PyString_AS_STRING(parser.stream_cache) - * + parser.stream_cache_pos, size) - * read[0] = size # <<<<<<<<<<<<<< - * parser.stream_cache_pos += size - * if parser.stream_cache_pos == parser.stream_cache_len: - */ - (__pyx_v_read[0]) = __pyx_v_size; - - /* "_yaml.pyx":930 - * + parser.stream_cache_pos, size) - * read[0] = size - * parser.stream_cache_pos += size # <<<<<<<<<<<<<< - * if parser.stream_cache_pos == parser.stream_cache_len: - * parser.stream_cache = None - */ - __pyx_v_parser->stream_cache_pos = (__pyx_v_parser->stream_cache_pos + __pyx_v_size); - - /* "_yaml.pyx":931 - * read[0] = size - * parser.stream_cache_pos += size - * if parser.stream_cache_pos == parser.stream_cache_len: # <<<<<<<<<<<<<< - * parser.stream_cache = None - * return 1 - */ - __pyx_t_1 = (__pyx_v_parser->stream_cache_pos == __pyx_v_parser->stream_cache_len); - if (__pyx_t_1) { - - /* "_yaml.pyx":932 - * parser.stream_cache_pos += size - * if parser.stream_cache_pos == parser.stream_cache_len: - * parser.stream_cache = None # <<<<<<<<<<<<<< - * return 1 - * - */ - __Pyx_INCREF(Py_None); - __Pyx_GIVEREF(Py_None); - __Pyx_GOTREF(__pyx_v_parser->stream_cache); - __Pyx_DECREF(__pyx_v_parser->stream_cache); - __pyx_v_parser->stream_cache = Py_None; - goto __pyx_L9; - } - __pyx_L9:; - - /* "_yaml.pyx":933 - * if parser.stream_cache_pos == parser.stream_cache_len: - * parser.stream_cache = None - * return 1 # <<<<<<<<<<<<<< - * - * cdef class CEmitter: - */ - __pyx_r = 1; - goto __pyx_L0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.input_handler"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_parser); - __Pyx_DECREF(__pyx_v_value); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":953 - * cdef object use_encoding - * - * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<< - * allow_unicode=None, line_break=None, encoding=None, - * explicit_start=None, explicit_end=None, version=None, tags=None): - */ - -static int __pyx_pf_5_yaml_8CEmitter___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static int __pyx_pf_5_yaml_8CEmitter___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_v_stream = 0; - PyObject *__pyx_v_canonical = 0; - PyObject *__pyx_v_indent = 0; - PyObject *__pyx_v_width = 0; - PyObject *__pyx_v_allow_unicode = 0; - PyObject *__pyx_v_line_break = 0; - PyObject *__pyx_v_encoding = 0; - PyObject *__pyx_v_explicit_start = 0; - PyObject *__pyx_v_explicit_end = 0; - PyObject *__pyx_v_version = 0; - PyObject *__pyx_v_tags = 0; - int __pyx_r; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__stream,&__pyx_n_s__canonical,&__pyx_n_s__indent,&__pyx_n_s__width,&__pyx_n_s__allow_unicode,&__pyx_n_s__line_break,&__pyx_n_s__encoding,&__pyx_n_s__explicit_start,&__pyx_n_s__explicit_end,&__pyx_n_s__version,&__pyx_n_s__tags,0}; - __Pyx_RefNannySetupContext("__init__"); - if (unlikely(__pyx_kwds)) { - Py_ssize_t kw_args = PyDict_Size(__pyx_kwds); - PyObject* values[11] = {0,0,0,0,0,0,0,0,0,0,0}; - values[1] = ((PyObject *)Py_None); - values[2] = ((PyObject *)Py_None); - values[3] = ((PyObject *)Py_None); - - /* "_yaml.pyx":954 - * - * def __init__(self, stream, canonical=None, indent=None, width=None, - * allow_unicode=None, line_break=None, encoding=None, # <<<<<<<<<<<<<< - * explicit_start=None, explicit_end=None, version=None, tags=None): - * if yaml_emitter_initialize(&self.emitter) == 0: - */ - values[4] = ((PyObject *)Py_None); - values[5] = ((PyObject *)Py_None); - values[6] = ((PyObject *)Py_None); - - /* "_yaml.pyx":955 - * def __init__(self, stream, canonical=None, indent=None, width=None, - * allow_unicode=None, line_break=None, encoding=None, - * explicit_start=None, explicit_end=None, version=None, tags=None): # <<<<<<<<<<<<<< - * if yaml_emitter_initialize(&self.emitter) == 0: - * raise MemoryError - */ - values[7] = ((PyObject *)Py_None); - values[8] = ((PyObject *)Py_None); - values[9] = ((PyObject *)Py_None); - values[10] = ((PyObject *)Py_None); - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 11: values[10] = PyTuple_GET_ITEM(__pyx_args, 10); - case 10: values[9] = PyTuple_GET_ITEM(__pyx_args, 9); - case 9: values[8] = PyTuple_GET_ITEM(__pyx_args, 8); - case 8: values[7] = PyTuple_GET_ITEM(__pyx_args, 7); - case 7: values[6] = PyTuple_GET_ITEM(__pyx_args, 6); - case 6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5); - case 5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4); - case 4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3); - case 3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2); - case 2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1); - case 1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0); - case 0: break; - default: goto __pyx_L5_argtuple_error; - } - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 0: - values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__stream); - if (likely(values[0])) kw_args--; - else goto __pyx_L5_argtuple_error; - case 1: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__canonical); - if (value) { values[1] = value; kw_args--; } - } - case 2: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__indent); - if (value) { values[2] = value; kw_args--; } - } - case 3: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__width); - if (value) { values[3] = value; kw_args--; } - } - case 4: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__allow_unicode); - if (value) { values[4] = value; kw_args--; } - } - case 5: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__line_break); - if (value) { values[5] = value; kw_args--; } - } - case 6: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__encoding); - if (value) { values[6] = value; kw_args--; } - } - case 7: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__explicit_start); - if (value) { values[7] = value; kw_args--; } - } - case 8: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__explicit_end); - if (value) { values[8] = value; kw_args--; } - } - case 9: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__version); - if (value) { values[9] = value; kw_args--; } - } - case 10: - if (kw_args > 0) { - PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__tags); - if (value) { values[10] = value; kw_args--; } - } - } - if (unlikely(kw_args > 0)) { - if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 953; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - } - __pyx_v_stream = values[0]; - __pyx_v_canonical = values[1]; - __pyx_v_indent = values[2]; - __pyx_v_width = values[3]; - __pyx_v_allow_unicode = values[4]; - __pyx_v_line_break = values[5]; - __pyx_v_encoding = values[6]; - __pyx_v_explicit_start = values[7]; - __pyx_v_explicit_end = values[8]; - __pyx_v_version = values[9]; - __pyx_v_tags = values[10]; - } else { - - /* "_yaml.pyx":953 - * cdef object use_encoding - * - * def __init__(self, stream, canonical=None, indent=None, width=None, # <<<<<<<<<<<<<< - * allow_unicode=None, line_break=None, encoding=None, - * explicit_start=None, explicit_end=None, version=None, tags=None): - */ - __pyx_v_canonical = ((PyObject *)Py_None); - __pyx_v_indent = ((PyObject *)Py_None); - __pyx_v_width = ((PyObject *)Py_None); - - /* "_yaml.pyx":954 - * - * def __init__(self, stream, canonical=None, indent=None, width=None, - * allow_unicode=None, line_break=None, encoding=None, # <<<<<<<<<<<<<< - * explicit_start=None, explicit_end=None, version=None, tags=None): - * if yaml_emitter_initialize(&self.emitter) == 0: - */ - __pyx_v_allow_unicode = ((PyObject *)Py_None); - __pyx_v_line_break = ((PyObject *)Py_None); - __pyx_v_encoding = ((PyObject *)Py_None); - - /* "_yaml.pyx":955 - * def __init__(self, stream, canonical=None, indent=None, width=None, - * allow_unicode=None, line_break=None, encoding=None, - * explicit_start=None, explicit_end=None, version=None, tags=None): # <<<<<<<<<<<<<< - * if yaml_emitter_initialize(&self.emitter) == 0: - * raise MemoryError - */ - __pyx_v_explicit_start = ((PyObject *)Py_None); - __pyx_v_explicit_end = ((PyObject *)Py_None); - __pyx_v_version = ((PyObject *)Py_None); - __pyx_v_tags = ((PyObject *)Py_None); - switch (PyTuple_GET_SIZE(__pyx_args)) { - case 11: __pyx_v_tags = PyTuple_GET_ITEM(__pyx_args, 10); - case 10: __pyx_v_version = PyTuple_GET_ITEM(__pyx_args, 9); - case 9: __pyx_v_explicit_end = PyTuple_GET_ITEM(__pyx_args, 8); - case 8: __pyx_v_explicit_start = PyTuple_GET_ITEM(__pyx_args, 7); - case 7: __pyx_v_encoding = PyTuple_GET_ITEM(__pyx_args, 6); - case 6: __pyx_v_line_break = PyTuple_GET_ITEM(__pyx_args, 5); - case 5: __pyx_v_allow_unicode = PyTuple_GET_ITEM(__pyx_args, 4); - case 4: __pyx_v_width = PyTuple_GET_ITEM(__pyx_args, 3); - case 3: __pyx_v_indent = PyTuple_GET_ITEM(__pyx_args, 2); - case 2: __pyx_v_canonical = PyTuple_GET_ITEM(__pyx_args, 1); - case 1: __pyx_v_stream = PyTuple_GET_ITEM(__pyx_args, 0); - break; - default: goto __pyx_L5_argtuple_error; - } - } - goto __pyx_L4_argument_unpacking_done; - __pyx_L5_argtuple_error:; - __Pyx_RaiseArgtupleInvalid("__init__", 0, 1, 11, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 953; __pyx_clineno = __LINE__; goto __pyx_L3_error;} - __pyx_L3_error:; - __Pyx_AddTraceback("_yaml.CEmitter.__init__"); - __Pyx_RefNannyFinishContext(); - return -1; - __pyx_L4_argument_unpacking_done:; - - /* "_yaml.pyx":956 - * allow_unicode=None, line_break=None, encoding=None, - * explicit_start=None, explicit_end=None, version=None, tags=None): - * if yaml_emitter_initialize(&self.emitter) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * self.stream = stream - */ - __pyx_t_1 = (yaml_emitter_initialize((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter)) == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":957 - * explicit_start=None, explicit_end=None, version=None, tags=None): - * if yaml_emitter_initialize(&self.emitter) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * self.stream = stream - * self.dump_unicode = 0 - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 957; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":958 - * if yaml_emitter_initialize(&self.emitter) == 0: - * raise MemoryError - * self.stream = stream # <<<<<<<<<<<<<< - * self.dump_unicode = 0 - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_INCREF(__pyx_v_stream); - __Pyx_GIVEREF(__pyx_v_stream); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->stream); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->stream); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->stream = __pyx_v_stream; - - /* "_yaml.pyx":959 - * raise MemoryError - * self.stream = stream - * self.dump_unicode = 0 # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * if getattr3(stream, 'encoding', None): - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->dump_unicode = 0; - - /* "_yaml.pyx":960 - * self.stream = stream - * self.dump_unicode = 0 - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * if getattr3(stream, 'encoding', None): - * self.dump_unicode = 1 - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":961 - * self.dump_unicode = 0 - * if PY_MAJOR_VERSION < 3: - * if getattr3(stream, 'encoding', None): # <<<<<<<<<<<<<< - * self.dump_unicode = 1 - * else: - */ - __pyx_t_2 = __Pyx_GetAttr3(__pyx_v_stream, ((PyObject *)__pyx_n_s__encoding), Py_None); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 961; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 961; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":962 - * if PY_MAJOR_VERSION < 3: - * if getattr3(stream, 'encoding', None): - * self.dump_unicode = 1 # <<<<<<<<<<<<<< - * else: - * if hasattr(stream, u'encoding'): - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->dump_unicode = 1; - goto __pyx_L8; - } - __pyx_L8:; - goto __pyx_L7; - } - /*else*/ { - - /* "_yaml.pyx":964 - * self.dump_unicode = 1 - * else: - * if hasattr(stream, u'encoding'): # <<<<<<<<<<<<<< - * self.dump_unicode = 1 - * self.use_encoding = encoding - */ - __pyx_t_1 = PyObject_HasAttr(__pyx_v_stream, ((PyObject *)__pyx_n_u__encoding)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 964; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":965 - * else: - * if hasattr(stream, u'encoding'): - * self.dump_unicode = 1 # <<<<<<<<<<<<<< - * self.use_encoding = encoding - * yaml_emitter_set_output(&self.emitter, output_handler, self) - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->dump_unicode = 1; - goto __pyx_L9; - } - __pyx_L9:; - } - __pyx_L7:; - - /* "_yaml.pyx":966 - * if hasattr(stream, u'encoding'): - * self.dump_unicode = 1 - * self.use_encoding = encoding # <<<<<<<<<<<<<< - * yaml_emitter_set_output(&self.emitter, output_handler, self) - * if canonical: - */ - __Pyx_INCREF(__pyx_v_encoding); - __Pyx_GIVEREF(__pyx_v_encoding); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding = __pyx_v_encoding; - - /* "_yaml.pyx":967 - * self.dump_unicode = 1 - * self.use_encoding = encoding - * yaml_emitter_set_output(&self.emitter, output_handler, self) # <<<<<<<<<<<<<< - * if canonical: - * yaml_emitter_set_canonical(&self.emitter, 1) - */ - yaml_emitter_set_output((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), __pyx_f_5_yaml_output_handler, ((void *)__pyx_v_self)); - - /* "_yaml.pyx":968 - * self.use_encoding = encoding - * yaml_emitter_set_output(&self.emitter, output_handler, self) - * if canonical: # <<<<<<<<<<<<<< - * yaml_emitter_set_canonical(&self.emitter, 1) - * if indent is not None: - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_canonical); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 968; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":969 - * yaml_emitter_set_output(&self.emitter, output_handler, self) - * if canonical: - * yaml_emitter_set_canonical(&self.emitter, 1) # <<<<<<<<<<<<<< - * if indent is not None: - * yaml_emitter_set_indent(&self.emitter, indent) - */ - yaml_emitter_set_canonical((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), 1); - goto __pyx_L10; - } - __pyx_L10:; - - /* "_yaml.pyx":970 - * if canonical: - * yaml_emitter_set_canonical(&self.emitter, 1) - * if indent is not None: # <<<<<<<<<<<<<< - * yaml_emitter_set_indent(&self.emitter, indent) - * if width is not None: - */ - __pyx_t_1 = (__pyx_v_indent != Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":971 - * yaml_emitter_set_canonical(&self.emitter, 1) - * if indent is not None: - * yaml_emitter_set_indent(&self.emitter, indent) # <<<<<<<<<<<<<< - * if width is not None: - * yaml_emitter_set_width(&self.emitter, width) - */ - __pyx_t_3 = __Pyx_PyInt_AsInt(__pyx_v_indent); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 971; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - yaml_emitter_set_indent((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), __pyx_t_3); - goto __pyx_L11; - } - __pyx_L11:; - - /* "_yaml.pyx":972 - * if indent is not None: - * yaml_emitter_set_indent(&self.emitter, indent) - * if width is not None: # <<<<<<<<<<<<<< - * yaml_emitter_set_width(&self.emitter, width) - * if allow_unicode: - */ - __pyx_t_1 = (__pyx_v_width != Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":973 - * yaml_emitter_set_indent(&self.emitter, indent) - * if width is not None: - * yaml_emitter_set_width(&self.emitter, width) # <<<<<<<<<<<<<< - * if allow_unicode: - * yaml_emitter_set_unicode(&self.emitter, 1) - */ - __pyx_t_3 = __Pyx_PyInt_AsInt(__pyx_v_width); if (unlikely((__pyx_t_3 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 973; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - yaml_emitter_set_width((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), __pyx_t_3); - goto __pyx_L12; - } - __pyx_L12:; - - /* "_yaml.pyx":974 - * if width is not None: - * yaml_emitter_set_width(&self.emitter, width) - * if allow_unicode: # <<<<<<<<<<<<<< - * yaml_emitter_set_unicode(&self.emitter, 1) - * if line_break is not None: - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_allow_unicode); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 974; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":975 - * yaml_emitter_set_width(&self.emitter, width) - * if allow_unicode: - * yaml_emitter_set_unicode(&self.emitter, 1) # <<<<<<<<<<<<<< - * if line_break is not None: - * if line_break == '\r': - */ - yaml_emitter_set_unicode((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), 1); - goto __pyx_L13; - } - __pyx_L13:; - - /* "_yaml.pyx":976 - * if allow_unicode: - * yaml_emitter_set_unicode(&self.emitter, 1) - * if line_break is not None: # <<<<<<<<<<<<<< - * if line_break == '\r': - * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) - */ - __pyx_t_1 = (__pyx_v_line_break != Py_None); - if (__pyx_t_1) { - - /* "_yaml.pyx":977 - * yaml_emitter_set_unicode(&self.emitter, 1) - * if line_break is not None: - * if line_break == '\r': # <<<<<<<<<<<<<< - * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) - * elif line_break == '\n': - */ - __pyx_t_2 = PyObject_RichCompare(__pyx_v_line_break, ((PyObject *)__pyx_kp_s_42), Py_EQ); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 977; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 977; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":978 - * if line_break is not None: - * if line_break == '\r': - * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) # <<<<<<<<<<<<<< - * elif line_break == '\n': - * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) - */ - yaml_emitter_set_break((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), YAML_CR_BREAK); - goto __pyx_L15; - } - - /* "_yaml.pyx":979 - * if line_break == '\r': - * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) - * elif line_break == '\n': # <<<<<<<<<<<<<< - * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) - * elif line_break == '\r\n': - */ - __pyx_t_2 = PyObject_RichCompare(__pyx_v_line_break, ((PyObject *)__pyx_kp_s_43), Py_EQ); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 979; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 979; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":980 - * yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) - * elif line_break == '\n': - * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) # <<<<<<<<<<<<<< - * elif line_break == '\r\n': - * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) - */ - yaml_emitter_set_break((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), YAML_LN_BREAK); - goto __pyx_L15; - } - - /* "_yaml.pyx":981 - * elif line_break == '\n': - * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) - * elif line_break == '\r\n': # <<<<<<<<<<<<<< - * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) - * self.document_start_implicit = 1 - */ - __pyx_t_2 = PyObject_RichCompare(__pyx_v_line_break, ((PyObject *)__pyx_kp_s_44), Py_EQ); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 981; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 981; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":982 - * yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) - * elif line_break == '\r\n': - * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) # <<<<<<<<<<<<<< - * self.document_start_implicit = 1 - * if explicit_start: - */ - yaml_emitter_set_break((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), YAML_CRLN_BREAK); - goto __pyx_L15; - } - __pyx_L15:; - goto __pyx_L14; - } - __pyx_L14:; - - /* "_yaml.pyx":983 - * elif line_break == '\r\n': - * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) - * self.document_start_implicit = 1 # <<<<<<<<<<<<<< - * if explicit_start: - * self.document_start_implicit = 0 - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->document_start_implicit = 1; - - /* "_yaml.pyx":984 - * yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) - * self.document_start_implicit = 1 - * if explicit_start: # <<<<<<<<<<<<<< - * self.document_start_implicit = 0 - * self.document_end_implicit = 1 - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_explicit_start); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 984; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":985 - * self.document_start_implicit = 1 - * if explicit_start: - * self.document_start_implicit = 0 # <<<<<<<<<<<<<< - * self.document_end_implicit = 1 - * if explicit_end: - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->document_start_implicit = 0; - goto __pyx_L16; - } - __pyx_L16:; - - /* "_yaml.pyx":986 - * if explicit_start: - * self.document_start_implicit = 0 - * self.document_end_implicit = 1 # <<<<<<<<<<<<<< - * if explicit_end: - * self.document_end_implicit = 0 - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->document_end_implicit = 1; - - /* "_yaml.pyx":987 - * self.document_start_implicit = 0 - * self.document_end_implicit = 1 - * if explicit_end: # <<<<<<<<<<<<<< - * self.document_end_implicit = 0 - * self.use_version = version - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_v_explicit_end); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 987; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":988 - * self.document_end_implicit = 1 - * if explicit_end: - * self.document_end_implicit = 0 # <<<<<<<<<<<<<< - * self.use_version = version - * self.use_tags = tags - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->document_end_implicit = 0; - goto __pyx_L17; - } - __pyx_L17:; - - /* "_yaml.pyx":989 - * if explicit_end: - * self.document_end_implicit = 0 - * self.use_version = version # <<<<<<<<<<<<<< - * self.use_tags = tags - * self.serialized_nodes = {} - */ - __Pyx_INCREF(__pyx_v_version); - __Pyx_GIVEREF(__pyx_v_version); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_version); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_version); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_version = __pyx_v_version; - - /* "_yaml.pyx":990 - * self.document_end_implicit = 0 - * self.use_version = version - * self.use_tags = tags # <<<<<<<<<<<<<< - * self.serialized_nodes = {} - * self.anchors = {} - */ - __Pyx_INCREF(__pyx_v_tags); - __Pyx_GIVEREF(__pyx_v_tags); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags = __pyx_v_tags; - - /* "_yaml.pyx":991 - * self.use_version = version - * self.use_tags = tags - * self.serialized_nodes = {} # <<<<<<<<<<<<<< - * self.anchors = {} - * self.last_alias_id = 0 - */ - __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 991; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->serialized_nodes); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->serialized_nodes); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->serialized_nodes = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":992 - * self.use_tags = tags - * self.serialized_nodes = {} - * self.anchors = {} # <<<<<<<<<<<<<< - * self.last_alias_id = 0 - * self.closed = -1 - */ - __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 992; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->anchors); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->anchors); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->anchors = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":993 - * self.serialized_nodes = {} - * self.anchors = {} - * self.last_alias_id = 0 # <<<<<<<<<<<<<< - * self.closed = -1 - * - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->last_alias_id = 0; - - /* "_yaml.pyx":994 - * self.anchors = {} - * self.last_alias_id = 0 - * self.closed = -1 # <<<<<<<<<<<<<< - * - * def __dealloc__(self): - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->closed = -1; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_AddTraceback("_yaml.CEmitter.__init__"); - __pyx_r = -1; - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":996 - * self.closed = -1 - * - * def __dealloc__(self): # <<<<<<<<<<<<<< - * yaml_emitter_delete(&self.emitter) - * - */ - -static void __pyx_pf_5_yaml_8CEmitter_1__dealloc__(PyObject *__pyx_v_self); /*proto*/ -static void __pyx_pf_5_yaml_8CEmitter_1__dealloc__(PyObject *__pyx_v_self) { - __Pyx_RefNannySetupContext("__dealloc__"); - - /* "_yaml.pyx":997 - * - * def __dealloc__(self): - * yaml_emitter_delete(&self.emitter) # <<<<<<<<<<<<<< - * - * def dispose(self): - */ - yaml_emitter_delete((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter)); - - __Pyx_RefNannyFinishContext(); -} - -/* "_yaml.pyx":999 - * yaml_emitter_delete(&self.emitter) - * - * def dispose(self): # <<<<<<<<<<<<<< - * pass - * - */ - -static PyObject *__pyx_pf_5_yaml_8CEmitter_2dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_8CEmitter_2dispose(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - PyObject *__pyx_r = NULL; - __Pyx_RefNannySetupContext("dispose"); - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1002 - * pass - * - * cdef object _emitter_error(self): # <<<<<<<<<<<<<< - * if self.emitter.error == YAML_MEMORY_ERROR: - * return MemoryError - */ - -static PyObject *__pyx_f_5_yaml_8CEmitter__emitter_error(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self) { - PyObject *__pyx_v_problem; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - __Pyx_RefNannySetupContext("_emitter_error"); - __pyx_v_problem = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1003 - * - * cdef object _emitter_error(self): - * if self.emitter.error == YAML_MEMORY_ERROR: # <<<<<<<<<<<<<< - * return MemoryError - * elif self.emitter.error == YAML_EMITTER_ERROR: - */ - __pyx_t_1 = (__pyx_v_self->emitter.error == YAML_MEMORY_ERROR); - if (__pyx_t_1) { - - /* "_yaml.pyx":1004 - * cdef object _emitter_error(self): - * if self.emitter.error == YAML_MEMORY_ERROR: - * return MemoryError # <<<<<<<<<<<<<< - * elif self.emitter.error == YAML_EMITTER_ERROR: - * if PY_MAJOR_VERSION < 3: - */ - __Pyx_XDECREF(__pyx_r); - __Pyx_INCREF(__pyx_builtin_MemoryError); - __pyx_r = __pyx_builtin_MemoryError; - goto __pyx_L0; - goto __pyx_L3; - } - - /* "_yaml.pyx":1005 - * if self.emitter.error == YAML_MEMORY_ERROR: - * return MemoryError - * elif self.emitter.error == YAML_EMITTER_ERROR: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * problem = self.emitter.problem - */ - __pyx_t_1 = (__pyx_v_self->emitter.error == YAML_EMITTER_ERROR); - if (__pyx_t_1) { - - /* "_yaml.pyx":1006 - * return MemoryError - * elif self.emitter.error == YAML_EMITTER_ERROR: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * problem = self.emitter.problem - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1007 - * elif self.emitter.error == YAML_EMITTER_ERROR: - * if PY_MAJOR_VERSION < 3: - * problem = self.emitter.problem # <<<<<<<<<<<<<< - * else: - * problem = PyUnicode_FromString(self.emitter.problem) - */ - __pyx_t_2 = PyBytes_FromString(__pyx_v_self->emitter.problem); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1007; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_DECREF(__pyx_v_problem); - __pyx_v_problem = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - goto __pyx_L4; - } - /*else*/ { - - /* "_yaml.pyx":1009 - * problem = self.emitter.problem - * else: - * problem = PyUnicode_FromString(self.emitter.problem) # <<<<<<<<<<<<<< - * return EmitterError(problem) - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_2 = PyUnicode_FromString(__pyx_v_self->emitter.problem); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1009; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_problem); - __pyx_v_problem = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_L4:; - - /* "_yaml.pyx":1010 - * else: - * problem = PyUnicode_FromString(self.emitter.problem) - * return EmitterError(problem) # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ValueError("no emitter error") - */ - __Pyx_XDECREF(__pyx_r); - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__EmitterError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1010; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1010; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_problem); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_problem); - __Pyx_GIVEREF(__pyx_v_problem); - __pyx_t_4 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1010; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __pyx_r = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L0; - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":1011 - * problem = PyUnicode_FromString(self.emitter.problem) - * return EmitterError(problem) - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ValueError("no emitter error") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1012 - * return EmitterError(problem) - * if PY_MAJOR_VERSION < 3: - * raise ValueError("no emitter error") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"no emitter error") - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_48), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1012; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1012; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":1014 - * raise ValueError("no emitter error") - * else: - * raise ValueError(u"no emitter error") # <<<<<<<<<<<<<< - * - * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_49), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1014; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1014; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L5:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.CEmitter._emitter_error"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_problem); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1016 - * raise ValueError(u"no emitter error") - * - * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: # <<<<<<<<<<<<<< - * cdef yaml_encoding_t encoding - * cdef yaml_version_directive_t version_directive_value - */ - -static int __pyx_f_5_yaml_8CEmitter__object_to_event(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_event_object, yaml_event_t *__pyx_v_event) { - yaml_encoding_t __pyx_v_encoding; - yaml_version_directive_t __pyx_v_version_directive_value; - yaml_version_directive_t *__pyx_v_version_directive; - yaml_tag_directive_t __pyx_v_tag_directives_value[128]; - yaml_tag_directive_t *__pyx_v_tag_directives_start; - yaml_tag_directive_t *__pyx_v_tag_directives_end; - int __pyx_v_implicit; - int __pyx_v_plain_implicit; - int __pyx_v_quoted_implicit; - char *__pyx_v_anchor; - char *__pyx_v_tag; - char *__pyx_v_value; - int __pyx_v_length; - yaml_scalar_style_t __pyx_v_scalar_style; - yaml_sequence_style_t __pyx_v_sequence_style; - yaml_mapping_style_t __pyx_v_mapping_style; - PyObject *__pyx_v_event_class; - PyObject *__pyx_v_cache; - PyObject *__pyx_v_handle; - PyObject *__pyx_v_prefix; - PyObject *__pyx_v_anchor_object; - PyObject *__pyx_v_tag_object; - PyObject *__pyx_v_value_object; - PyObject *__pyx_v_style_object; - int __pyx_r; - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - int __pyx_t_5; - int __pyx_t_6; - Py_ssize_t __pyx_t_7; - PyObject *__pyx_t_8 = NULL; - __Pyx_RefNannySetupContext("_object_to_event"); - __pyx_v_event_class = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_cache = ((PyObject*)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_handle = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_prefix = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_anchor_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_tag_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_style_object = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1033 - * cdef yaml_sequence_style_t sequence_style - * cdef yaml_mapping_style_t mapping_style - * event_class = event_object.__class__ # <<<<<<<<<<<<<< - * if event_class is StreamStartEvent: - * encoding = YAML_UTF8_ENCODING - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s____class__); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1033; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_event_class); - __pyx_v_event_class = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":1034 - * cdef yaml_mapping_style_t mapping_style - * event_class = event_object.__class__ - * if event_class is StreamStartEvent: # <<<<<<<<<<<<<< - * encoding = YAML_UTF8_ENCODING - * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__StreamStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1034; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = (__pyx_v_event_class == __pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1035 - * event_class = event_object.__class__ - * if event_class is StreamStartEvent: - * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<< - * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': - * encoding = YAML_UTF16LE_ENCODING - */ - __pyx_v_encoding = YAML_UTF8_ENCODING; - - /* "_yaml.pyx":1036 - * if event_class is StreamStartEvent: - * encoding = YAML_UTF8_ENCODING - * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': # <<<<<<<<<<<<<< - * encoding = YAML_UTF16LE_ENCODING - * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = __Pyx_PyUnicode_Equals(__pyx_t_1, ((PyObject *)__pyx_kp_u_13), Py_EQ); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (!__pyx_t_2) { - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = PyObject_RichCompare(__pyx_t_1, ((PyObject *)__pyx_kp_s_13), Py_EQ); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1036; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_5 = __pyx_t_4; - } else { - __pyx_t_5 = __pyx_t_2; - } - if (__pyx_t_5) { - - /* "_yaml.pyx":1037 - * encoding = YAML_UTF8_ENCODING - * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': - * encoding = YAML_UTF16LE_ENCODING # <<<<<<<<<<<<<< - * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': - * encoding = YAML_UTF16BE_ENCODING - */ - __pyx_v_encoding = YAML_UTF16LE_ENCODING; - goto __pyx_L4; - } - - /* "_yaml.pyx":1038 - * if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': - * encoding = YAML_UTF16LE_ENCODING - * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': # <<<<<<<<<<<<<< - * encoding = YAML_UTF16BE_ENCODING - * if event_object.encoding is None: - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__encoding); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = __Pyx_PyUnicode_Equals(__pyx_t_3, ((PyObject *)__pyx_kp_u_14), Py_EQ); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!__pyx_t_5) { - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__encoding); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = PyObject_RichCompare(__pyx_t_3, ((PyObject *)__pyx_kp_s_14), Py_EQ); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1038; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_4 = __pyx_t_2; - } else { - __pyx_t_4 = __pyx_t_5; - } - if (__pyx_t_4) { - - /* "_yaml.pyx":1039 - * encoding = YAML_UTF16LE_ENCODING - * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': - * encoding = YAML_UTF16BE_ENCODING # <<<<<<<<<<<<<< - * if event_object.encoding is None: - * self.dump_unicode = 1 - */ - __pyx_v_encoding = YAML_UTF16BE_ENCODING; - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":1040 - * elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': - * encoding = YAML_UTF16BE_ENCODING - * if event_object.encoding is None: # <<<<<<<<<<<<<< - * self.dump_unicode = 1 - * if self.dump_unicode == 1: - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__encoding); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1040; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = (__pyx_t_1 == Py_None); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1041 - * encoding = YAML_UTF16BE_ENCODING - * if event_object.encoding is None: - * self.dump_unicode = 1 # <<<<<<<<<<<<<< - * if self.dump_unicode == 1: - * encoding = YAML_UTF8_ENCODING - */ - __pyx_v_self->dump_unicode = 1; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":1042 - * if event_object.encoding is None: - * self.dump_unicode = 1 - * if self.dump_unicode == 1: # <<<<<<<<<<<<<< - * encoding = YAML_UTF8_ENCODING - * yaml_stream_start_event_initialize(event, encoding) - */ - __pyx_t_4 = (__pyx_v_self->dump_unicode == 1); - if (__pyx_t_4) { - - /* "_yaml.pyx":1043 - * self.dump_unicode = 1 - * if self.dump_unicode == 1: - * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<< - * yaml_stream_start_event_initialize(event, encoding) - * elif event_class is StreamEndEvent: - */ - __pyx_v_encoding = YAML_UTF8_ENCODING; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":1044 - * if self.dump_unicode == 1: - * encoding = YAML_UTF8_ENCODING - * yaml_stream_start_event_initialize(event, encoding) # <<<<<<<<<<<<<< - * elif event_class is StreamEndEvent: - * yaml_stream_end_event_initialize(event) - */ - yaml_stream_start_event_initialize(__pyx_v_event, __pyx_v_encoding); - goto __pyx_L3; - } - - /* "_yaml.pyx":1045 - * encoding = YAML_UTF8_ENCODING - * yaml_stream_start_event_initialize(event, encoding) - * elif event_class is StreamEndEvent: # <<<<<<<<<<<<<< - * yaml_stream_end_event_initialize(event) - * elif event_class is DocumentStartEvent: - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__StreamEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1045; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = (__pyx_v_event_class == __pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1046 - * yaml_stream_start_event_initialize(event, encoding) - * elif event_class is StreamEndEvent: - * yaml_stream_end_event_initialize(event) # <<<<<<<<<<<<<< - * elif event_class is DocumentStartEvent: - * version_directive = NULL - */ - yaml_stream_end_event_initialize(__pyx_v_event); - goto __pyx_L3; - } - - /* "_yaml.pyx":1047 - * elif event_class is StreamEndEvent: - * yaml_stream_end_event_initialize(event) - * elif event_class is DocumentStartEvent: # <<<<<<<<<<<<<< - * version_directive = NULL - * if event_object.version: - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__DocumentStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1047; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = (__pyx_v_event_class == __pyx_t_1); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1048 - * yaml_stream_end_event_initialize(event) - * elif event_class is DocumentStartEvent: - * version_directive = NULL # <<<<<<<<<<<<<< - * if event_object.version: - * version_directive_value.major = event_object.version[0] - */ - __pyx_v_version_directive = NULL; - - /* "_yaml.pyx":1049 - * elif event_class is DocumentStartEvent: - * version_directive = NULL - * if event_object.version: # <<<<<<<<<<<<<< - * version_directive_value.major = event_object.version[0] - * version_directive_value.minor = event_object.version[1] - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__version); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1049; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1049; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1050 - * version_directive = NULL - * if event_object.version: - * version_directive_value.major = event_object.version[0] # <<<<<<<<<<<<<< - * version_directive_value.minor = event_object.version[1] - * version_directive = &version_directive_value - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__version); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1050; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_1, 0, sizeof(long), PyInt_FromLong); if (!__pyx_t_3) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1050; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_6 = __Pyx_PyInt_AsInt(__pyx_t_3); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1050; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_version_directive_value.major = __pyx_t_6; - - /* "_yaml.pyx":1051 - * if event_object.version: - * version_directive_value.major = event_object.version[0] - * version_directive_value.minor = event_object.version[1] # <<<<<<<<<<<<<< - * version_directive = &version_directive_value - * tag_directives_start = NULL - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__version); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1051; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = __Pyx_GetItemInt(__pyx_t_3, 1, sizeof(long), PyInt_FromLong); if (!__pyx_t_1) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1051; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_6 = __Pyx_PyInt_AsInt(__pyx_t_1); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1051; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_v_version_directive_value.minor = __pyx_t_6; - - /* "_yaml.pyx":1052 - * version_directive_value.major = event_object.version[0] - * version_directive_value.minor = event_object.version[1] - * version_directive = &version_directive_value # <<<<<<<<<<<<<< - * tag_directives_start = NULL - * tag_directives_end = NULL - */ - __pyx_v_version_directive = (&__pyx_v_version_directive_value); - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":1053 - * version_directive_value.minor = event_object.version[1] - * version_directive = &version_directive_value - * tag_directives_start = NULL # <<<<<<<<<<<<<< - * tag_directives_end = NULL - * if event_object.tags: - */ - __pyx_v_tag_directives_start = NULL; - - /* "_yaml.pyx":1054 - * version_directive = &version_directive_value - * tag_directives_start = NULL - * tag_directives_end = NULL # <<<<<<<<<<<<<< - * if event_object.tags: - * if len(event_object.tags) > 128: - */ - __pyx_v_tag_directives_end = NULL; - - /* "_yaml.pyx":1055 - * tag_directives_start = NULL - * tag_directives_end = NULL - * if event_object.tags: # <<<<<<<<<<<<<< - * if len(event_object.tags) > 128: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1055; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1055; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1056 - * tag_directives_end = NULL - * if event_object.tags: - * if len(event_object.tags) > 128: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ValueError("too many tags") - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1056; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_7 = PyObject_Length(__pyx_t_1); if (unlikely(__pyx_t_7 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1056; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_4 = (__pyx_t_7 > 128); - if (__pyx_t_4) { - - /* "_yaml.pyx":1057 - * if event_object.tags: - * if len(event_object.tags) > 128: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ValueError("too many tags") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1058 - * if len(event_object.tags) > 128: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("too many tags") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"too many tags") - */ - __pyx_t_1 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_51), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1058; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1058; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L10; - } - /*else*/ { - - /* "_yaml.pyx":1060 - * raise ValueError("too many tags") - * else: - * raise ValueError(u"too many tags") # <<<<<<<<<<<<<< - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value - */ - __pyx_t_1 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_52), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1060; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1060; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L10:; - goto __pyx_L9; - } - __pyx_L9:; - - /* "_yaml.pyx":1061 - * else: - * raise ValueError(u"too many tags") - * tag_directives_start = tag_directives_value # <<<<<<<<<<<<<< - * tag_directives_end = tag_directives_value - * cache = [] - */ - __pyx_v_tag_directives_start = __pyx_v_tag_directives_value; - - /* "_yaml.pyx":1062 - * raise ValueError(u"too many tags") - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value # <<<<<<<<<<<<<< - * cache = [] - * for handle in event_object.tags: - */ - __pyx_v_tag_directives_end = __pyx_v_tag_directives_value; - - /* "_yaml.pyx":1063 - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value - * cache = [] # <<<<<<<<<<<<<< - * for handle in event_object.tags: - * prefix = event_object.tags[handle] - */ - __pyx_t_1 = PyList_New(0); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1063; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - __Pyx_DECREF(((PyObject *)__pyx_v_cache)); - __pyx_v_cache = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":1064 - * tag_directives_end = tag_directives_value - * cache = [] - * for handle in event_object.tags: # <<<<<<<<<<<<<< - * prefix = event_object.tags[handle] - * if PyUnicode_CheckExact(handle): - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - if (PyList_CheckExact(__pyx_t_1) || PyTuple_CheckExact(__pyx_t_1)) { - __pyx_t_7 = 0; __pyx_t_3 = __pyx_t_1; __Pyx_INCREF(__pyx_t_3); - } else { - __pyx_t_7 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - for (;;) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_7 >= PyList_GET_SIZE(__pyx_t_3)) break; - __pyx_t_1 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; - } else if (likely(PyTuple_CheckExact(__pyx_t_3))) { - if (__pyx_t_7 >= PyTuple_GET_SIZE(__pyx_t_3)) break; - __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_7); __Pyx_INCREF(__pyx_t_1); __pyx_t_7++; - } else { - __pyx_t_1 = PyIter_Next(__pyx_t_3); - if (!__pyx_t_1) { - if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1064; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - break; - } - __Pyx_GOTREF(__pyx_t_1); - } - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":1065 - * cache = [] - * for handle in event_object.tags: - * prefix = event_object.tags[handle] # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(handle): - * handle = PyUnicode_AsUTF8String(handle) - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tags); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1065; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_8 = PyObject_GetItem(__pyx_t_1, __pyx_v_handle); if (!__pyx_t_8) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1065; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(__pyx_v_prefix); - __pyx_v_prefix = __pyx_t_8; - __pyx_t_8 = 0; - - /* "_yaml.pyx":1066 - * for handle in event_object.tags: - * prefix = event_object.tags[handle] - * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<< - * handle = PyUnicode_AsUTF8String(handle) - * cache.append(handle) - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_handle); - if (__pyx_t_6) { - - /* "_yaml.pyx":1067 - * prefix = event_object.tags[handle] - * if PyUnicode_CheckExact(handle): - * handle = PyUnicode_AsUTF8String(handle) # <<<<<<<<<<<<<< - * cache.append(handle) - * if not PyString_CheckExact(handle): - */ - __pyx_t_8 = PyUnicode_AsUTF8String(__pyx_v_handle); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1067; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_8; - __pyx_t_8 = 0; - - /* "_yaml.pyx":1068 - * if PyUnicode_CheckExact(handle): - * handle = PyUnicode_AsUTF8String(handle) - * cache.append(handle) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: - */ - if (unlikely(__pyx_v_cache == Py_None)) { - PyErr_SetString(PyExc_AttributeError, "'NoneType' object has no attribute 'append'"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1068; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_6 = PyList_Append(__pyx_v_cache, __pyx_v_handle); if (unlikely(__pyx_t_6 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1068; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L13; - } - __pyx_L13:; - - /* "_yaml.pyx":1069 - * handle = PyUnicode_AsUTF8String(handle) - * cache.append(handle) - * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag handle must be a string") - */ - __pyx_t_4 = (!PyString_CheckExact(__pyx_v_handle)); - if (__pyx_t_4) { - - /* "_yaml.pyx":1070 - * cache.append(handle) - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag handle must be a string") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1071 - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag handle must be a string") - */ - __pyx_t_8 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_54), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1071; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1071; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L15; - } - /*else*/ { - - /* "_yaml.pyx":1073 - * raise TypeError("tag handle must be a string") - * else: - * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): - */ - __pyx_t_8 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_55), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1073; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1073; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L15:; - goto __pyx_L14; - } - __pyx_L14:; - - /* "_yaml.pyx":1074 - * else: - * raise TypeError(u"tag handle must be a string") - * tag_directives_end.handle = PyString_AS_STRING(handle) # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(prefix): - * prefix = PyUnicode_AsUTF8String(prefix) - */ - __pyx_v_tag_directives_end->handle = PyString_AS_STRING(__pyx_v_handle); - - /* "_yaml.pyx":1075 - * raise TypeError(u"tag handle must be a string") - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<< - * prefix = PyUnicode_AsUTF8String(prefix) - * cache.append(prefix) - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_prefix); - if (__pyx_t_6) { - - /* "_yaml.pyx":1076 - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): - * prefix = PyUnicode_AsUTF8String(prefix) # <<<<<<<<<<<<<< - * cache.append(prefix) - * if not PyString_CheckExact(prefix): - */ - __pyx_t_8 = PyUnicode_AsUTF8String(__pyx_v_prefix); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1076; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_v_prefix); - __pyx_v_prefix = __pyx_t_8; - __pyx_t_8 = 0; - - /* "_yaml.pyx":1077 - * if PyUnicode_CheckExact(prefix): - * prefix = PyUnicode_AsUTF8String(prefix) - * cache.append(prefix) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: - */ - if (unlikely(__pyx_v_cache == Py_None)) { - PyErr_SetString(PyExc_AttributeError, "'NoneType' object has no attribute 'append'"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1077; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_6 = PyList_Append(__pyx_v_cache, __pyx_v_prefix); if (unlikely(__pyx_t_6 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1077; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L16; - } - __pyx_L16:; - - /* "_yaml.pyx":1078 - * prefix = PyUnicode_AsUTF8String(prefix) - * cache.append(prefix) - * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag prefix must be a string") - */ - __pyx_t_4 = (!PyString_CheckExact(__pyx_v_prefix)); - if (__pyx_t_4) { - - /* "_yaml.pyx":1079 - * cache.append(prefix) - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag prefix must be a string") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1080 - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag prefix must be a string") - */ - __pyx_t_8 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_57), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1080; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1080; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L18; - } - /*else*/ { - - /* "_yaml.pyx":1082 - * raise TypeError("tag prefix must be a string") - * else: - * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 - */ - __pyx_t_8 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_58), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1082; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1082; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L18:; - goto __pyx_L17; - } - __pyx_L17:; - - /* "_yaml.pyx":1083 - * else: - * raise TypeError(u"tag prefix must be a string") - * tag_directives_end.prefix = PyString_AS_STRING(prefix) # <<<<<<<<<<<<<< - * tag_directives_end = tag_directives_end+1 - * implicit = 1 - */ - __pyx_v_tag_directives_end->prefix = PyString_AS_STRING(__pyx_v_prefix); - - /* "_yaml.pyx":1084 - * raise TypeError(u"tag prefix must be a string") - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 # <<<<<<<<<<<<<< - * implicit = 1 - * if event_object.explicit: - */ - __pyx_v_tag_directives_end = (__pyx_v_tag_directives_end + 1); - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - goto __pyx_L8; - } - __pyx_L8:; - - /* "_yaml.pyx":1085 - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 - * implicit = 1 # <<<<<<<<<<<<<< - * if event_object.explicit: - * implicit = 0 - */ - __pyx_v_implicit = 1; - - /* "_yaml.pyx":1086 - * tag_directives_end = tag_directives_end+1 - * implicit = 1 - * if event_object.explicit: # <<<<<<<<<<<<<< - * implicit = 0 - * if yaml_document_start_event_initialize(event, version_directive, - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__explicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1086; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1086; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1087 - * implicit = 1 - * if event_object.explicit: - * implicit = 0 # <<<<<<<<<<<<<< - * if yaml_document_start_event_initialize(event, version_directive, - * tag_directives_start, tag_directives_end, implicit) == 0: - */ - __pyx_v_implicit = 0; - goto __pyx_L19; - } - __pyx_L19:; - - /* "_yaml.pyx":1089 - * implicit = 0 - * if yaml_document_start_event_initialize(event, version_directive, - * tag_directives_start, tag_directives_end, implicit) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * elif event_class is DocumentEndEvent: - */ - __pyx_t_4 = (yaml_document_start_event_initialize(__pyx_v_event, __pyx_v_version_directive, __pyx_v_tag_directives_start, __pyx_v_tag_directives_end, __pyx_v_implicit) == 0); - if (__pyx_t_4) { - - /* "_yaml.pyx":1090 - * if yaml_document_start_event_initialize(event, version_directive, - * tag_directives_start, tag_directives_end, implicit) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * elif event_class is DocumentEndEvent: - * implicit = 1 - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1090; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L20; - } - __pyx_L20:; - goto __pyx_L3; - } - - /* "_yaml.pyx":1091 - * tag_directives_start, tag_directives_end, implicit) == 0: - * raise MemoryError - * elif event_class is DocumentEndEvent: # <<<<<<<<<<<<<< - * implicit = 1 - * if event_object.explicit: - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__DocumentEndEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1091; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1092 - * raise MemoryError - * elif event_class is DocumentEndEvent: - * implicit = 1 # <<<<<<<<<<<<<< - * if event_object.explicit: - * implicit = 0 - */ - __pyx_v_implicit = 1; - - /* "_yaml.pyx":1093 - * elif event_class is DocumentEndEvent: - * implicit = 1 - * if event_object.explicit: # <<<<<<<<<<<<<< - * implicit = 0 - * yaml_document_end_event_initialize(event, implicit) - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__explicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1093; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1093; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1094 - * implicit = 1 - * if event_object.explicit: - * implicit = 0 # <<<<<<<<<<<<<< - * yaml_document_end_event_initialize(event, implicit) - * elif event_class is AliasEvent: - */ - __pyx_v_implicit = 0; - goto __pyx_L21; - } - __pyx_L21:; - - /* "_yaml.pyx":1095 - * if event_object.explicit: - * implicit = 0 - * yaml_document_end_event_initialize(event, implicit) # <<<<<<<<<<<<<< - * elif event_class is AliasEvent: - * anchor = NULL - */ - yaml_document_end_event_initialize(__pyx_v_event, __pyx_v_implicit); - goto __pyx_L3; - } - - /* "_yaml.pyx":1096 - * implicit = 0 - * yaml_document_end_event_initialize(event, implicit) - * elif event_class is AliasEvent: # <<<<<<<<<<<<<< - * anchor = NULL - * anchor_object = event_object.anchor - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__AliasEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1096; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1097 - * yaml_document_end_event_initialize(event, implicit) - * elif event_class is AliasEvent: - * anchor = NULL # <<<<<<<<<<<<<< - * anchor_object = event_object.anchor - * if PyUnicode_CheckExact(anchor_object): - */ - __pyx_v_anchor = NULL; - - /* "_yaml.pyx":1098 - * elif event_class is AliasEvent: - * anchor = NULL - * anchor_object = event_object.anchor # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1098; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1099 - * anchor = NULL - * anchor_object = event_object.anchor - * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_anchor_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1100 - * anchor_object = event_object.anchor - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1100; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L22; - } - __pyx_L22:; - - /* "_yaml.pyx":1101 - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") - */ - __pyx_t_4 = (!PyString_CheckExact(__pyx_v_anchor_object)); - if (__pyx_t_4) { - - /* "_yaml.pyx":1102 - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("anchor must be a string") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1103 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_60), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L24; - } - /*else*/ { - - /* "_yaml.pyx":1105 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * if yaml_alias_event_initialize(event, anchor) == 0: - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_61), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1105; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1105; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L24:; - goto __pyx_L23; - } - __pyx_L23:; - - /* "_yaml.pyx":1106 - * else: - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<< - * if yaml_alias_event_initialize(event, anchor) == 0: - * raise MemoryError - */ - __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object); - - /* "_yaml.pyx":1107 - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) - * if yaml_alias_event_initialize(event, anchor) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * elif event_class is ScalarEvent: - */ - __pyx_t_4 = (yaml_alias_event_initialize(__pyx_v_event, __pyx_v_anchor) == 0); - if (__pyx_t_4) { - - /* "_yaml.pyx":1108 - * anchor = PyString_AS_STRING(anchor_object) - * if yaml_alias_event_initialize(event, anchor) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * elif event_class is ScalarEvent: - * anchor = NULL - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1108; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L25; - } - __pyx_L25:; - goto __pyx_L3; - } - - /* "_yaml.pyx":1109 - * if yaml_alias_event_initialize(event, anchor) == 0: - * raise MemoryError - * elif event_class is ScalarEvent: # <<<<<<<<<<<<<< - * anchor = NULL - * anchor_object = event_object.anchor - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1109; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1110 - * raise MemoryError - * elif event_class is ScalarEvent: - * anchor = NULL # <<<<<<<<<<<<<< - * anchor_object = event_object.anchor - * if anchor_object is not None: - */ - __pyx_v_anchor = NULL; - - /* "_yaml.pyx":1111 - * elif event_class is ScalarEvent: - * anchor = NULL - * anchor_object = event_object.anchor # <<<<<<<<<<<<<< - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1111; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1112 - * anchor = NULL - * anchor_object = event_object.anchor - * if anchor_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - */ - __pyx_t_4 = (__pyx_v_anchor_object != Py_None); - if (__pyx_t_4) { - - /* "_yaml.pyx":1113 - * anchor_object = event_object.anchor - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_anchor_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1114 - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1114; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L27; - } - __pyx_L27:; - - /* "_yaml.pyx":1115 - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") - */ - __pyx_t_4 = (!PyString_CheckExact(__pyx_v_anchor_object)); - if (__pyx_t_4) { - - /* "_yaml.pyx":1116 - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("anchor must be a string") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1117 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_62), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1117; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1117; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L29; - } - /*else*/ { - - /* "_yaml.pyx":1119 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_63), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1119; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1119; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L29:; - goto __pyx_L28; - } - __pyx_L28:; - - /* "_yaml.pyx":1120 - * else: - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<< - * tag = NULL - * tag_object = event_object.tag - */ - __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object); - goto __pyx_L26; - } - __pyx_L26:; - - /* "_yaml.pyx":1121 - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL # <<<<<<<<<<<<<< - * tag_object = event_object.tag - * if tag_object is not None: - */ - __pyx_v_tag = NULL; - - /* "_yaml.pyx":1122 - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - * tag_object = event_object.tag # <<<<<<<<<<<<<< - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tag); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1122; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1123 - * tag = NULL - * tag_object = event_object.tag - * if tag_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - */ - __pyx_t_4 = (__pyx_v_tag_object != Py_None); - if (__pyx_t_4) { - - /* "_yaml.pyx":1124 - * tag_object = event_object.tag - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<< - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_tag_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1125 - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1125; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L31; - } - __pyx_L31:; - - /* "_yaml.pyx":1126 - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") - */ - __pyx_t_4 = (!PyString_CheckExact(__pyx_v_tag_object)); - if (__pyx_t_4) { - - /* "_yaml.pyx":1127 - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag must be a string") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1128 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_65), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1128; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1128; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L33; - } - /*else*/ { - - /* "_yaml.pyx":1130 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * value_object = event_object.value - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_66), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1130; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1130; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L33:; - goto __pyx_L32; - } - __pyx_L32:; - - /* "_yaml.pyx":1131 - * else: - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<< - * value_object = event_object.value - * if PyUnicode_CheckExact(value_object): - */ - __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object); - goto __pyx_L30; - } - __pyx_L30:; - - /* "_yaml.pyx":1132 - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) - * value_object = event_object.value # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(value_object): - * value_object = PyUnicode_AsUTF8String(value_object) - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__value); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1132; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_value_object); - __pyx_v_value_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1133 - * tag = PyString_AS_STRING(tag_object) - * value_object = event_object.value - * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<< - * value_object = PyUnicode_AsUTF8String(value_object) - * if not PyString_CheckExact(value_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_value_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1134 - * value_object = event_object.value - * if PyUnicode_CheckExact(value_object): - * value_object = PyUnicode_AsUTF8String(value_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_value_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1134; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_value_object); - __pyx_v_value_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L34; - } - __pyx_L34:; - - /* "_yaml.pyx":1135 - * if PyUnicode_CheckExact(value_object): - * value_object = PyUnicode_AsUTF8String(value_object) - * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("value must be a string") - */ - __pyx_t_4 = (!PyString_CheckExact(__pyx_v_value_object)); - if (__pyx_t_4) { - - /* "_yaml.pyx":1136 - * value_object = PyUnicode_AsUTF8String(value_object) - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("value must be a string") - * else: - */ - __pyx_t_4 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_4) { - - /* "_yaml.pyx":1137 - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("value must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"value must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_68), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1137; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1137; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L36; - } - /*else*/ { - - /* "_yaml.pyx":1139 - * raise TypeError("value must be a string") - * else: - * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<< - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_69), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1139; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1139; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L36:; - goto __pyx_L35; - } - __pyx_L35:; - - /* "_yaml.pyx":1140 - * else: - * raise TypeError(u"value must be a string") - * value = PyString_AS_STRING(value_object) # <<<<<<<<<<<<<< - * length = PyString_GET_SIZE(value_object) - * plain_implicit = 0 - */ - __pyx_v_value = PyString_AS_STRING(__pyx_v_value_object); - - /* "_yaml.pyx":1141 - * raise TypeError(u"value must be a string") - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) # <<<<<<<<<<<<<< - * plain_implicit = 0 - * quoted_implicit = 0 - */ - __pyx_v_length = PyString_GET_SIZE(__pyx_v_value_object); - - /* "_yaml.pyx":1142 - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) - * plain_implicit = 0 # <<<<<<<<<<<<<< - * quoted_implicit = 0 - * if event_object.implicit is not None: - */ - __pyx_v_plain_implicit = 0; - - /* "_yaml.pyx":1143 - * length = PyString_GET_SIZE(value_object) - * plain_implicit = 0 - * quoted_implicit = 0 # <<<<<<<<<<<<<< - * if event_object.implicit is not None: - * plain_implicit = event_object.implicit[0] - */ - __pyx_v_quoted_implicit = 0; - - /* "_yaml.pyx":1144 - * plain_implicit = 0 - * quoted_implicit = 0 - * if event_object.implicit is not None: # <<<<<<<<<<<<<< - * plain_implicit = event_object.implicit[0] - * quoted_implicit = event_object.implicit[1] - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__implicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1144; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = (__pyx_t_3 != Py_None); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_4) { - - /* "_yaml.pyx":1145 - * quoted_implicit = 0 - * if event_object.implicit is not None: - * plain_implicit = event_object.implicit[0] # <<<<<<<<<<<<<< - * quoted_implicit = event_object.implicit[1] - * style_object = event_object.style - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__implicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1145; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_8 = __Pyx_GetItemInt(__pyx_t_3, 0, sizeof(long), PyInt_FromLong); if (!__pyx_t_8) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1145; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_t_6 = __Pyx_PyInt_AsInt(__pyx_t_8); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1145; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_v_plain_implicit = __pyx_t_6; - - /* "_yaml.pyx":1146 - * if event_object.implicit is not None: - * plain_implicit = event_object.implicit[0] - * quoted_implicit = event_object.implicit[1] # <<<<<<<<<<<<<< - * style_object = event_object.style - * scalar_style = YAML_PLAIN_SCALAR_STYLE - */ - __pyx_t_8 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__implicit); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1146; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = __Pyx_GetItemInt(__pyx_t_8, 1, sizeof(long), PyInt_FromLong); if (!__pyx_t_3) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1146; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_6 = __Pyx_PyInt_AsInt(__pyx_t_3); if (unlikely((__pyx_t_6 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1146; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __pyx_v_quoted_implicit = __pyx_t_6; - goto __pyx_L37; - } - __pyx_L37:; - - /* "_yaml.pyx":1147 - * plain_implicit = event_object.implicit[0] - * quoted_implicit = event_object.implicit[1] - * style_object = event_object.style # <<<<<<<<<<<<<< - * scalar_style = YAML_PLAIN_SCALAR_STYLE - * if style_object == "'" or style_object == u"'": - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__style); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1147; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_style_object); - __pyx_v_style_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1148 - * quoted_implicit = event_object.implicit[1] - * style_object = event_object.style - * scalar_style = YAML_PLAIN_SCALAR_STYLE # <<<<<<<<<<<<<< - * if style_object == "'" or style_object == u"'": - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_PLAIN_SCALAR_STYLE; - - /* "_yaml.pyx":1149 - * style_object = event_object.style - * scalar_style = YAML_PLAIN_SCALAR_STYLE - * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<< - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - * elif style_object == "\"" or style_object == u"\"": - */ - __pyx_t_3 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_21), Py_EQ); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1149; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1149; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!__pyx_t_4) { - __pyx_t_5 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_21), Py_EQ); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1149; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = __pyx_t_5; - } else { - __pyx_t_2 = __pyx_t_4; - } - if (__pyx_t_2) { - - /* "_yaml.pyx":1150 - * scalar_style = YAML_PLAIN_SCALAR_STYLE - * if style_object == "'" or style_object == u"'": - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<< - * elif style_object == "\"" or style_object == u"\"": - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE; - goto __pyx_L38; - } - - /* "_yaml.pyx":1151 - * if style_object == "'" or style_object == u"'": - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<< - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - * elif style_object == "|" or style_object == u"|": - */ - __pyx_t_3 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_22), Py_EQ); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1151; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1151; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!__pyx_t_2) { - __pyx_t_4 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_22), Py_EQ); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1151; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_5 = __pyx_t_4; - } else { - __pyx_t_5 = __pyx_t_2; - } - if (__pyx_t_5) { - - /* "_yaml.pyx":1152 - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - * elif style_object == "\"" or style_object == u"\"": - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<< - * elif style_object == "|" or style_object == u"|": - * scalar_style = YAML_LITERAL_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE; - goto __pyx_L38; - } - - /* "_yaml.pyx":1153 - * elif style_object == "\"" or style_object == u"\"": - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<< - * scalar_style = YAML_LITERAL_SCALAR_STYLE - * elif style_object == ">" or style_object == u">": - */ - __pyx_t_3 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_23), Py_EQ); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1153; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_5 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1153; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!__pyx_t_5) { - __pyx_t_2 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_23), Py_EQ); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1153; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_4 = __pyx_t_2; - } else { - __pyx_t_4 = __pyx_t_5; - } - if (__pyx_t_4) { - - /* "_yaml.pyx":1154 - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - * elif style_object == "|" or style_object == u"|": - * scalar_style = YAML_LITERAL_SCALAR_STYLE # <<<<<<<<<<<<<< - * elif style_object == ">" or style_object == u">": - * scalar_style = YAML_FOLDED_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_LITERAL_SCALAR_STYLE; - goto __pyx_L38; - } - - /* "_yaml.pyx":1155 - * elif style_object == "|" or style_object == u"|": - * scalar_style = YAML_LITERAL_SCALAR_STYLE - * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<< - * scalar_style = YAML_FOLDED_SCALAR_STYLE - * if yaml_scalar_event_initialize(event, anchor, tag, value, length, - */ - __pyx_t_3 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_24), Py_EQ); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_4 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (!__pyx_t_4) { - __pyx_t_5 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_24), Py_EQ); if (unlikely(__pyx_t_5 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1155; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = __pyx_t_5; - } else { - __pyx_t_2 = __pyx_t_4; - } - if (__pyx_t_2) { - - /* "_yaml.pyx":1156 - * scalar_style = YAML_LITERAL_SCALAR_STYLE - * elif style_object == ">" or style_object == u">": - * scalar_style = YAML_FOLDED_SCALAR_STYLE # <<<<<<<<<<<<<< - * if yaml_scalar_event_initialize(event, anchor, tag, value, length, - * plain_implicit, quoted_implicit, scalar_style) == 0: - */ - __pyx_v_scalar_style = YAML_FOLDED_SCALAR_STYLE; - goto __pyx_L38; - } - __pyx_L38:; - - /* "_yaml.pyx":1158 - * scalar_style = YAML_FOLDED_SCALAR_STYLE - * if yaml_scalar_event_initialize(event, anchor, tag, value, length, - * plain_implicit, quoted_implicit, scalar_style) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * elif event_class is SequenceStartEvent: - */ - __pyx_t_2 = (yaml_scalar_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_value, __pyx_v_length, __pyx_v_plain_implicit, __pyx_v_quoted_implicit, __pyx_v_scalar_style) == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":1159 - * if yaml_scalar_event_initialize(event, anchor, tag, value, length, - * plain_implicit, quoted_implicit, scalar_style) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * elif event_class is SequenceStartEvent: - * anchor = NULL - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1159; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L39; - } - __pyx_L39:; - goto __pyx_L3; - } - - /* "_yaml.pyx":1160 - * plain_implicit, quoted_implicit, scalar_style) == 0: - * raise MemoryError - * elif event_class is SequenceStartEvent: # <<<<<<<<<<<<<< - * anchor = NULL - * anchor_object = event_object.anchor - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceStartEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1160; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1161 - * raise MemoryError - * elif event_class is SequenceStartEvent: - * anchor = NULL # <<<<<<<<<<<<<< - * anchor_object = event_object.anchor - * if anchor_object is not None: - */ - __pyx_v_anchor = NULL; - - /* "_yaml.pyx":1162 - * elif event_class is SequenceStartEvent: - * anchor = NULL - * anchor_object = event_object.anchor # <<<<<<<<<<<<<< - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1162; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1163 - * anchor = NULL - * anchor_object = event_object.anchor - * if anchor_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - */ - __pyx_t_2 = (__pyx_v_anchor_object != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":1164 - * anchor_object = event_object.anchor - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_anchor_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1165 - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1165; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L41; - } - __pyx_L41:; - - /* "_yaml.pyx":1166 - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_anchor_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1167 - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("anchor must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1168 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_70), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1168; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1168; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L43; - } - /*else*/ { - - /* "_yaml.pyx":1170 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_71), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L43:; - goto __pyx_L42; - } - __pyx_L42:; - - /* "_yaml.pyx":1171 - * else: - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<< - * tag = NULL - * tag_object = event_object.tag - */ - __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object); - goto __pyx_L40; - } - __pyx_L40:; - - /* "_yaml.pyx":1172 - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL # <<<<<<<<<<<<<< - * tag_object = event_object.tag - * if tag_object is not None: - */ - __pyx_v_tag = NULL; - - /* "_yaml.pyx":1173 - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - * tag_object = event_object.tag # <<<<<<<<<<<<<< - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tag); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1173; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1174 - * tag = NULL - * tag_object = event_object.tag - * if tag_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - */ - __pyx_t_2 = (__pyx_v_tag_object != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":1175 - * tag_object = event_object.tag - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<< - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_tag_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1176 - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1176; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L45; - } - __pyx_L45:; - - /* "_yaml.pyx":1177 - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_tag_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1178 - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1179 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_72), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1179; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1179; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L47; - } - /*else*/ { - - /* "_yaml.pyx":1181 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_73), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1181; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1181; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L47:; - goto __pyx_L46; - } - __pyx_L46:; - - /* "_yaml.pyx":1182 - * else: - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<< - * implicit = 0 - * if event_object.implicit: - */ - __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object); - goto __pyx_L44; - } - __pyx_L44:; - - /* "_yaml.pyx":1183 - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 # <<<<<<<<<<<<<< - * if event_object.implicit: - * implicit = 1 - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":1184 - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 - * if event_object.implicit: # <<<<<<<<<<<<<< - * implicit = 1 - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__implicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1184; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1185 - * implicit = 0 - * if event_object.implicit: - * implicit = 1 # <<<<<<<<<<<<<< - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - * if event_object.flow_style: - */ - __pyx_v_implicit = 1; - goto __pyx_L48; - } - __pyx_L48:; - - /* "_yaml.pyx":1186 - * if event_object.implicit: - * implicit = 1 - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE # <<<<<<<<<<<<<< - * if event_object.flow_style: - * sequence_style = YAML_FLOW_SEQUENCE_STYLE - */ - __pyx_v_sequence_style = YAML_BLOCK_SEQUENCE_STYLE; - - /* "_yaml.pyx":1187 - * implicit = 1 - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - * if event_object.flow_style: # <<<<<<<<<<<<<< - * sequence_style = YAML_FLOW_SEQUENCE_STYLE - * if yaml_sequence_start_event_initialize(event, anchor, tag, - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__flow_style); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1187; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1188 - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - * if event_object.flow_style: - * sequence_style = YAML_FLOW_SEQUENCE_STYLE # <<<<<<<<<<<<<< - * if yaml_sequence_start_event_initialize(event, anchor, tag, - * implicit, sequence_style) == 0: - */ - __pyx_v_sequence_style = YAML_FLOW_SEQUENCE_STYLE; - goto __pyx_L49; - } - __pyx_L49:; - - /* "_yaml.pyx":1190 - * sequence_style = YAML_FLOW_SEQUENCE_STYLE - * if yaml_sequence_start_event_initialize(event, anchor, tag, - * implicit, sequence_style) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * elif event_class is MappingStartEvent: - */ - __pyx_t_2 = (yaml_sequence_start_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_sequence_style) == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":1191 - * if yaml_sequence_start_event_initialize(event, anchor, tag, - * implicit, sequence_style) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * elif event_class is MappingStartEvent: - * anchor = NULL - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1191; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L50; - } - __pyx_L50:; - goto __pyx_L3; - } - - /* "_yaml.pyx":1192 - * implicit, sequence_style) == 0: - * raise MemoryError - * elif event_class is MappingStartEvent: # <<<<<<<<<<<<<< - * anchor = NULL - * anchor_object = event_object.anchor - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingStartEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1192; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1193 - * raise MemoryError - * elif event_class is MappingStartEvent: - * anchor = NULL # <<<<<<<<<<<<<< - * anchor_object = event_object.anchor - * if anchor_object is not None: - */ - __pyx_v_anchor = NULL; - - /* "_yaml.pyx":1194 - * elif event_class is MappingStartEvent: - * anchor = NULL - * anchor_object = event_object.anchor # <<<<<<<<<<<<<< - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__anchor); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1194; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1195 - * anchor = NULL - * anchor_object = event_object.anchor - * if anchor_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - */ - __pyx_t_2 = (__pyx_v_anchor_object != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":1196 - * anchor_object = event_object.anchor - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_anchor_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1197 - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1197; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L52; - } - __pyx_L52:; - - /* "_yaml.pyx":1198 - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_anchor_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1199 - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("anchor must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1200 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_74), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1200; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1200; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L54; - } - /*else*/ { - - /* "_yaml.pyx":1202 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_75), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1202; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1202; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L54:; - goto __pyx_L53; - } - __pyx_L53:; - - /* "_yaml.pyx":1203 - * else: - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<< - * tag = NULL - * tag_object = event_object.tag - */ - __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object); - goto __pyx_L51; - } - __pyx_L51:; - - /* "_yaml.pyx":1204 - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL # <<<<<<<<<<<<<< - * tag_object = event_object.tag - * if tag_object is not None: - */ - __pyx_v_tag = NULL; - - /* "_yaml.pyx":1205 - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - * tag_object = event_object.tag # <<<<<<<<<<<<<< - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__tag); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1205; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1206 - * tag = NULL - * tag_object = event_object.tag - * if tag_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - */ - __pyx_t_2 = (__pyx_v_tag_object != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":1207 - * tag_object = event_object.tag - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<< - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - */ - __pyx_t_6 = PyUnicode_CheckExact(__pyx_v_tag_object); - if (__pyx_t_6) { - - /* "_yaml.pyx":1208 - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1208; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_3; - __pyx_t_3 = 0; - goto __pyx_L56; - } - __pyx_L56:; - - /* "_yaml.pyx":1209 - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_tag_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1210 - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1211 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_76), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1211; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1211; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L58; - } - /*else*/ { - - /* "_yaml.pyx":1213 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_77), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1213; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1213; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L58:; - goto __pyx_L57; - } - __pyx_L57:; - - /* "_yaml.pyx":1214 - * else: - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<< - * implicit = 0 - * if event_object.implicit: - */ - __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object); - goto __pyx_L55; - } - __pyx_L55:; - - /* "_yaml.pyx":1215 - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 # <<<<<<<<<<<<<< - * if event_object.implicit: - * implicit = 1 - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":1216 - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 - * if event_object.implicit: # <<<<<<<<<<<<<< - * implicit = 1 - * mapping_style = YAML_BLOCK_MAPPING_STYLE - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__implicit); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1216; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1216; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1217 - * implicit = 0 - * if event_object.implicit: - * implicit = 1 # <<<<<<<<<<<<<< - * mapping_style = YAML_BLOCK_MAPPING_STYLE - * if event_object.flow_style: - */ - __pyx_v_implicit = 1; - goto __pyx_L59; - } - __pyx_L59:; - - /* "_yaml.pyx":1218 - * if event_object.implicit: - * implicit = 1 - * mapping_style = YAML_BLOCK_MAPPING_STYLE # <<<<<<<<<<<<<< - * if event_object.flow_style: - * mapping_style = YAML_FLOW_MAPPING_STYLE - */ - __pyx_v_mapping_style = YAML_BLOCK_MAPPING_STYLE; - - /* "_yaml.pyx":1219 - * implicit = 1 - * mapping_style = YAML_BLOCK_MAPPING_STYLE - * if event_object.flow_style: # <<<<<<<<<<<<<< - * mapping_style = YAML_FLOW_MAPPING_STYLE - * if yaml_mapping_start_event_initialize(event, anchor, tag, - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_event_object, __pyx_n_s__flow_style); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1219; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1220 - * mapping_style = YAML_BLOCK_MAPPING_STYLE - * if event_object.flow_style: - * mapping_style = YAML_FLOW_MAPPING_STYLE # <<<<<<<<<<<<<< - * if yaml_mapping_start_event_initialize(event, anchor, tag, - * implicit, mapping_style) == 0: - */ - __pyx_v_mapping_style = YAML_FLOW_MAPPING_STYLE; - goto __pyx_L60; - } - __pyx_L60:; - - /* "_yaml.pyx":1222 - * mapping_style = YAML_FLOW_MAPPING_STYLE - * if yaml_mapping_start_event_initialize(event, anchor, tag, - * implicit, mapping_style) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * elif event_class is SequenceEndEvent: - */ - __pyx_t_2 = (yaml_mapping_start_event_initialize(__pyx_v_event, __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_mapping_style) == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":1223 - * if yaml_mapping_start_event_initialize(event, anchor, tag, - * implicit, mapping_style) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * elif event_class is SequenceEndEvent: - * yaml_sequence_end_event_initialize(event) - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1223; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L61; - } - __pyx_L61:; - goto __pyx_L3; - } - - /* "_yaml.pyx":1224 - * implicit, mapping_style) == 0: - * raise MemoryError - * elif event_class is SequenceEndEvent: # <<<<<<<<<<<<<< - * yaml_sequence_end_event_initialize(event) - * elif event_class is MappingEndEvent: - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceEndEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1224; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1225 - * raise MemoryError - * elif event_class is SequenceEndEvent: - * yaml_sequence_end_event_initialize(event) # <<<<<<<<<<<<<< - * elif event_class is MappingEndEvent: - * yaml_mapping_end_event_initialize(event) - */ - yaml_sequence_end_event_initialize(__pyx_v_event); - goto __pyx_L3; - } - - /* "_yaml.pyx":1226 - * elif event_class is SequenceEndEvent: - * yaml_sequence_end_event_initialize(event) - * elif event_class is MappingEndEvent: # <<<<<<<<<<<<<< - * yaml_mapping_end_event_initialize(event) - * else: - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingEndEvent); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1226; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = (__pyx_v_event_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1227 - * yaml_sequence_end_event_initialize(event) - * elif event_class is MappingEndEvent: - * yaml_mapping_end_event_initialize(event) # <<<<<<<<<<<<<< - * else: - * if PY_MAJOR_VERSION < 3: - */ - yaml_mapping_end_event_initialize(__pyx_v_event); - goto __pyx_L3; - } - /*else*/ { - - /* "_yaml.pyx":1229 - * yaml_mapping_end_event_initialize(event) - * else: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("invalid event %s" % event_object) - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1230 - * else: - * if PY_MAJOR_VERSION < 3: - * raise TypeError("invalid event %s" % event_object) # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"invalid event %s" % event_object) - */ - __pyx_t_3 = PyNumber_Remainder(((PyObject *)__pyx_kp_s_78), __pyx_v_event_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_8)); - PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)__pyx_t_3)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_3)); - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_t_8), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1230; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L62; - } - /*else*/ { - - /* "_yaml.pyx":1232 - * raise TypeError("invalid event %s" % event_object) - * else: - * raise TypeError(u"invalid event %s" % event_object) # <<<<<<<<<<<<<< - * return 1 - * - */ - __pyx_t_3 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_78), __pyx_v_event_object); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __pyx_t_8 = PyTuple_New(1); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_8)); - PyTuple_SET_ITEM(__pyx_t_8, 0, ((PyObject *)__pyx_t_3)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_3)); - __pyx_t_3 = 0; - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_t_8), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1232; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L62:; - } - __pyx_L3:; - - /* "_yaml.pyx":1233 - * else: - * raise TypeError(u"invalid event %s" % event_object) - * return 1 # <<<<<<<<<<<<<< - * - * def emit(self, event_object): - */ - __pyx_r = 1; - goto __pyx_L0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("_yaml.CEmitter._object_to_event"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_event_class); - __Pyx_DECREF(__pyx_v_cache); - __Pyx_DECREF(__pyx_v_handle); - __Pyx_DECREF(__pyx_v_prefix); - __Pyx_DECREF(__pyx_v_anchor_object); - __Pyx_DECREF(__pyx_v_tag_object); - __Pyx_DECREF(__pyx_v_value_object); - __Pyx_DECREF(__pyx_v_style_object); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1235 - * return 1 - * - * def emit(self, event_object): # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * self._object_to_event(event_object, &event) - */ - -static PyObject *__pyx_pf_5_yaml_8CEmitter_3emit(PyObject *__pyx_v_self, PyObject *__pyx_v_event_object); /*proto*/ -static PyObject *__pyx_pf_5_yaml_8CEmitter_3emit(PyObject *__pyx_v_self, PyObject *__pyx_v_event_object) { - yaml_event_t __pyx_v_event; - PyObject *__pyx_v_error; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - int __pyx_t_2; - PyObject *__pyx_t_3 = NULL; - __Pyx_RefNannySetupContext("emit"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1237 - * def emit(self, event_object): - * cdef yaml_event_t event - * self._object_to_event(event_object, &event) # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_object_to_event(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), __pyx_v_event_object, (&__pyx_v_event)); if (unlikely(__pyx_t_1 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1237; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":1238 - * cdef yaml_event_t event - * self._object_to_event(event_object, &event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_1 = yaml_emitter_emit((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1238; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = (__pyx_t_1 == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":1239 - * self._object_to_event(event_object, &event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_emitter_error(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1239; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1240 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * - * def open(self): - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1240; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L5; - } - __pyx_L5:; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CEmitter.emit"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1242 - * raise error - * - * def open(self): # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * cdef yaml_encoding_t encoding - */ - -static PyObject *__pyx_pf_5_yaml_8CEmitter_4open(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_8CEmitter_4open(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - yaml_event_t __pyx_v_event; - yaml_encoding_t __pyx_v_encoding; - PyObject *__pyx_v_error; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - int __pyx_t_3; - int __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - __Pyx_RefNannySetupContext("open"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1261 - * raise error - * self.closed = 0 - * elif self.closed == 1: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") - */ - switch (((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->closed) { - - /* "_yaml.pyx":1245 - * cdef yaml_event_t event - * cdef yaml_encoding_t encoding - * if self.closed == -1: # <<<<<<<<<<<<<< - * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': - * encoding = YAML_UTF16LE_ENCODING - */ - case -1: - - /* "_yaml.pyx":1246 - * cdef yaml_encoding_t encoding - * if self.closed == -1: - * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': # <<<<<<<<<<<<<< - * encoding = YAML_UTF16LE_ENCODING - * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': - */ - __pyx_t_1 = __Pyx_PyUnicode_Equals(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding, ((PyObject *)__pyx_kp_u_13), Py_EQ); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1246; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (!__pyx_t_1) { - __pyx_t_2 = PyObject_RichCompare(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding, ((PyObject *)__pyx_kp_s_13), Py_EQ); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1246; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_3 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1246; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_4 = __pyx_t_3; - } else { - __pyx_t_4 = __pyx_t_1; - } - if (__pyx_t_4) { - - /* "_yaml.pyx":1247 - * if self.closed == -1: - * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': - * encoding = YAML_UTF16LE_ENCODING # <<<<<<<<<<<<<< - * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': - * encoding = YAML_UTF16BE_ENCODING - */ - __pyx_v_encoding = YAML_UTF16LE_ENCODING; - goto __pyx_L5; - } - - /* "_yaml.pyx":1248 - * if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': - * encoding = YAML_UTF16LE_ENCODING - * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': # <<<<<<<<<<<<<< - * encoding = YAML_UTF16BE_ENCODING - * else: - */ - __pyx_t_4 = __Pyx_PyUnicode_Equals(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding, ((PyObject *)__pyx_kp_u_14), Py_EQ); if (unlikely(__pyx_t_4 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1248; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (!__pyx_t_4) { - __pyx_t_2 = PyObject_RichCompare(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding, ((PyObject *)__pyx_kp_s_14), Py_EQ); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1248; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_2); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1248; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_3 = __pyx_t_1; - } else { - __pyx_t_3 = __pyx_t_4; - } - if (__pyx_t_3) { - - /* "_yaml.pyx":1249 - * encoding = YAML_UTF16LE_ENCODING - * elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': - * encoding = YAML_UTF16BE_ENCODING # <<<<<<<<<<<<<< - * else: - * encoding = YAML_UTF8_ENCODING - */ - __pyx_v_encoding = YAML_UTF16BE_ENCODING; - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":1251 - * encoding = YAML_UTF16BE_ENCODING - * else: - * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<< - * if self.use_encoding is None: - * self.dump_unicode = 1 - */ - __pyx_v_encoding = YAML_UTF8_ENCODING; - } - __pyx_L5:; - - /* "_yaml.pyx":1252 - * else: - * encoding = YAML_UTF8_ENCODING - * if self.use_encoding is None: # <<<<<<<<<<<<<< - * self.dump_unicode = 1 - * if self.dump_unicode == 1: - */ - __pyx_t_3 = (((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_encoding == Py_None); - if (__pyx_t_3) { - - /* "_yaml.pyx":1253 - * encoding = YAML_UTF8_ENCODING - * if self.use_encoding is None: - * self.dump_unicode = 1 # <<<<<<<<<<<<<< - * if self.dump_unicode == 1: - * encoding = YAML_UTF8_ENCODING - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->dump_unicode = 1; - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":1254 - * if self.use_encoding is None: - * self.dump_unicode = 1 - * if self.dump_unicode == 1: # <<<<<<<<<<<<<< - * encoding = YAML_UTF8_ENCODING - * yaml_stream_start_event_initialize(&event, encoding) - */ - __pyx_t_3 = (((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->dump_unicode == 1); - if (__pyx_t_3) { - - /* "_yaml.pyx":1255 - * self.dump_unicode = 1 - * if self.dump_unicode == 1: - * encoding = YAML_UTF8_ENCODING # <<<<<<<<<<<<<< - * yaml_stream_start_event_initialize(&event, encoding) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_v_encoding = YAML_UTF8_ENCODING; - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":1256 - * if self.dump_unicode == 1: - * encoding = YAML_UTF8_ENCODING - * yaml_stream_start_event_initialize(&event, encoding) # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - yaml_stream_start_event_initialize((&__pyx_v_event), __pyx_v_encoding); - - /* "_yaml.pyx":1257 - * encoding = YAML_UTF8_ENCODING - * yaml_stream_start_event_initialize(&event, encoding) - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_5 = yaml_emitter_emit((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1257; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_3 = (__pyx_t_5 == 0); - if (__pyx_t_3) { - - /* "_yaml.pyx":1258 - * yaml_stream_start_event_initialize(&event, encoding) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * self.closed = 0 - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_emitter_error(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1258; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":1259 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * self.closed = 0 - * elif self.closed == 1: - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1259; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L8; - } - __pyx_L8:; - - /* "_yaml.pyx":1260 - * error = self._emitter_error() - * raise error - * self.closed = 0 # <<<<<<<<<<<<<< - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->closed = 0; - break; - - /* "_yaml.pyx":1261 - * raise error - * self.closed = 0 - * elif self.closed == 1: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") - */ - case 1: - - /* "_yaml.pyx":1262 - * self.closed = 0 - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise SerializerError("serializer is closed") - * else: - */ - __pyx_t_3 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_3) { - - /* "_yaml.pyx":1263 - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is closed") - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_6 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_k_tuple_80), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L9; - } - /*else*/ { - - /* "_yaml.pyx":1265 - * raise SerializerError("serializer is closed") - * else: - * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<< - * else: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_2 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_k_tuple_81), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L9:; - break; - default: - - /* "_yaml.pyx":1267 - * raise SerializerError(u"serializer is closed") - * else: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise SerializerError("serializer is already opened") - * else: - */ - __pyx_t_3 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_3) { - - /* "_yaml.pyx":1268 - * else: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is already opened") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is already opened") - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_6 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_k_tuple_83), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_6, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L10; - } - /*else*/ { - - /* "_yaml.pyx":1270 - * raise SerializerError("serializer is already opened") - * else: - * raise SerializerError(u"serializer is already opened") # <<<<<<<<<<<<<< - * - * def close(self): - */ - __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_2 = PyObject_Call(__pyx_t_6, ((PyObject *)__pyx_k_tuple_84), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L10:; - break; - } - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_AddTraceback("_yaml.CEmitter.open"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1272 - * raise SerializerError(u"serializer is already opened") - * - * def close(self): # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * if self.closed == -1: - */ - -static PyObject *__pyx_pf_5_yaml_8CEmitter_5close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/ -static PyObject *__pyx_pf_5_yaml_8CEmitter_5close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) { - yaml_event_t __pyx_v_event; - PyObject *__pyx_v_error; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - __Pyx_RefNannySetupContext("close"); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1279 - * else: - * raise SerializerError(u"serializer is not opened") - * elif self.closed == 0: # <<<<<<<<<<<<<< - * yaml_stream_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - switch (((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->closed) { - - /* "_yaml.pyx":1274 - * def close(self): - * cdef yaml_event_t event - * if self.closed == -1: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is not opened") - */ - case -1: - - /* "_yaml.pyx":1275 - * cdef yaml_event_t event - * if self.closed == -1: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise SerializerError("serializer is not opened") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1276 - * if self.closed == -1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is not opened") - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_k_tuple_86), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":1278 - * raise SerializerError("serializer is not opened") - * else: - * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<< - * elif self.closed == 0: - * yaml_stream_end_event_initialize(&event) - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_k_tuple_87), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L5:; - break; - - /* "_yaml.pyx":1279 - * else: - * raise SerializerError(u"serializer is not opened") - * elif self.closed == 0: # <<<<<<<<<<<<<< - * yaml_stream_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - case 0: - - /* "_yaml.pyx":1280 - * raise SerializerError(u"serializer is not opened") - * elif self.closed == 0: - * yaml_stream_end_event_initialize(&event) # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - yaml_stream_end_event_initialize((&__pyx_v_event)); - - /* "_yaml.pyx":1281 - * elif self.closed == 0: - * yaml_stream_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_4 = yaml_emitter_emit((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1281; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_1 = (__pyx_t_4 == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":1282 - * yaml_stream_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * self.closed = 1 - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_emitter_error(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1282; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":1283 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * self.closed = 1 - * - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1283; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - __pyx_L6:; - - /* "_yaml.pyx":1284 - * error = self._emitter_error() - * raise error - * self.closed = 1 # <<<<<<<<<<<<<< - * - * def serialize(self, node): - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->closed = 1; - break; - } - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CEmitter.close"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_error); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1286 - * self.closed = 1 - * - * def serialize(self, node): # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * cdef yaml_version_directive_t version_directive_value - */ - -static PyObject *__pyx_pf_5_yaml_8CEmitter_6serialize(PyObject *__pyx_v_self, PyObject *__pyx_v_node); /*proto*/ -static PyObject *__pyx_pf_5_yaml_8CEmitter_6serialize(PyObject *__pyx_v_self, PyObject *__pyx_v_node) { - yaml_event_t __pyx_v_event; - yaml_version_directive_t __pyx_v_version_directive_value; - yaml_version_directive_t *__pyx_v_version_directive; - yaml_tag_directive_t __pyx_v_tag_directives_value[128]; - yaml_tag_directive_t *__pyx_v_tag_directives_start; - yaml_tag_directive_t *__pyx_v_tag_directives_end; - PyObject *__pyx_v_cache; - PyObject *__pyx_v_handle; - PyObject *__pyx_v_prefix; - PyObject *__pyx_v_error; - PyObject *__pyx_r = NULL; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - int __pyx_t_4; - Py_ssize_t __pyx_t_5; - __Pyx_RefNannySetupContext("serialize"); - __pyx_v_cache = ((PyObject*)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_handle = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_prefix = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1298 - * else: - * raise SerializerError(u"serializer is not opened") - * elif self.closed == 1: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") - */ - switch (((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->closed) { - - /* "_yaml.pyx":1293 - * cdef yaml_tag_directive_t *tag_directives_start - * cdef yaml_tag_directive_t *tag_directives_end - * if self.closed == -1: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is not opened") - */ - case -1: - - /* "_yaml.pyx":1294 - * cdef yaml_tag_directive_t *tag_directives_end - * if self.closed == -1: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise SerializerError("serializer is not opened") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1295 - * if self.closed == -1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is not opened") - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_k_tuple_88), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L5; - } - /*else*/ { - - /* "_yaml.pyx":1297 - * raise SerializerError("serializer is not opened") - * else: - * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<< - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_k_tuple_89), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L5:; - break; - - /* "_yaml.pyx":1298 - * else: - * raise SerializerError(u"serializer is not opened") - * elif self.closed == 1: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") - */ - case 1: - - /* "_yaml.pyx":1299 - * raise SerializerError(u"serializer is not opened") - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise SerializerError("serializer is closed") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1300 - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is closed") - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_k_tuple_90), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - /*else*/ { - - /* "_yaml.pyx":1302 - * raise SerializerError("serializer is closed") - * else: - * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<< - * cache = [] - * version_directive = NULL - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_2 = PyObject_Call(__pyx_t_3, ((PyObject *)__pyx_k_tuple_91), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L6:; - break; - } - - /* "_yaml.pyx":1303 - * else: - * raise SerializerError(u"serializer is closed") - * cache = [] # <<<<<<<<<<<<<< - * version_directive = NULL - * if self.use_version: - */ - __pyx_t_2 = PyList_New(0); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1303; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_DECREF(((PyObject *)__pyx_v_cache)); - __pyx_v_cache = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":1304 - * raise SerializerError(u"serializer is closed") - * cache = [] - * version_directive = NULL # <<<<<<<<<<<<<< - * if self.use_version: - * version_directive_value.major = self.use_version[0] - */ - __pyx_v_version_directive = NULL; - - /* "_yaml.pyx":1305 - * cache = [] - * version_directive = NULL - * if self.use_version: # <<<<<<<<<<<<<< - * version_directive_value.major = self.use_version[0] - * version_directive_value.minor = self.use_version[1] - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_version); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1305; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":1306 - * version_directive = NULL - * if self.use_version: - * version_directive_value.major = self.use_version[0] # <<<<<<<<<<<<<< - * version_directive_value.minor = self.use_version[1] - * version_directive = &version_directive_value - */ - __pyx_t_2 = __Pyx_GetItemInt(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_version, 0, sizeof(long), PyInt_FromLong); if (!__pyx_t_2) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1306; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyInt_AsInt(__pyx_t_2); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1306; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v_version_directive_value.major = __pyx_t_4; - - /* "_yaml.pyx":1307 - * if self.use_version: - * version_directive_value.major = self.use_version[0] - * version_directive_value.minor = self.use_version[1] # <<<<<<<<<<<<<< - * version_directive = &version_directive_value - * tag_directives_start = NULL - */ - __pyx_t_2 = __Pyx_GetItemInt(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_version, 1, sizeof(long), PyInt_FromLong); if (!__pyx_t_2) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1307; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_4 = __Pyx_PyInt_AsInt(__pyx_t_2); if (unlikely((__pyx_t_4 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1307; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_v_version_directive_value.minor = __pyx_t_4; - - /* "_yaml.pyx":1308 - * version_directive_value.major = self.use_version[0] - * version_directive_value.minor = self.use_version[1] - * version_directive = &version_directive_value # <<<<<<<<<<<<<< - * tag_directives_start = NULL - * tag_directives_end = NULL - */ - __pyx_v_version_directive = (&__pyx_v_version_directive_value); - goto __pyx_L7; - } - __pyx_L7:; - - /* "_yaml.pyx":1309 - * version_directive_value.minor = self.use_version[1] - * version_directive = &version_directive_value - * tag_directives_start = NULL # <<<<<<<<<<<<<< - * tag_directives_end = NULL - * if self.use_tags: - */ - __pyx_v_tag_directives_start = NULL; - - /* "_yaml.pyx":1310 - * version_directive = &version_directive_value - * tag_directives_start = NULL - * tag_directives_end = NULL # <<<<<<<<<<<<<< - * if self.use_tags: - * if len(self.use_tags) > 128: - */ - __pyx_v_tag_directives_end = NULL; - - /* "_yaml.pyx":1311 - * tag_directives_start = NULL - * tag_directives_end = NULL - * if self.use_tags: # <<<<<<<<<<<<<< - * if len(self.use_tags) > 128: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_1 = __Pyx_PyObject_IsTrue(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1311; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":1312 - * tag_directives_end = NULL - * if self.use_tags: - * if len(self.use_tags) > 128: # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise ValueError("too many tags") - */ - __pyx_t_2 = ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags; - __Pyx_INCREF(__pyx_t_2); - __pyx_t_5 = PyObject_Length(__pyx_t_2); if (unlikely(__pyx_t_5 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1312; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_1 = (__pyx_t_5 > 128); - if (__pyx_t_1) { - - /* "_yaml.pyx":1313 - * if self.use_tags: - * if len(self.use_tags) > 128: - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise ValueError("too many tags") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1314 - * if len(self.use_tags) > 128: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("too many tags") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"too many tags") - */ - __pyx_t_2 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_92), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1314; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1314; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L10; - } - /*else*/ { - - /* "_yaml.pyx":1316 - * raise ValueError("too many tags") - * else: - * raise ValueError(u"too many tags") # <<<<<<<<<<<<<< - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value - */ - __pyx_t_2 = PyObject_Call(__pyx_builtin_ValueError, ((PyObject *)__pyx_k_tuple_93), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1316; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_Raise(__pyx_t_2, 0, 0); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1316; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L10:; - goto __pyx_L9; - } - __pyx_L9:; - - /* "_yaml.pyx":1317 - * else: - * raise ValueError(u"too many tags") - * tag_directives_start = tag_directives_value # <<<<<<<<<<<<<< - * tag_directives_end = tag_directives_value - * for handle in self.use_tags: - */ - __pyx_v_tag_directives_start = __pyx_v_tag_directives_value; - - /* "_yaml.pyx":1318 - * raise ValueError(u"too many tags") - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value # <<<<<<<<<<<<<< - * for handle in self.use_tags: - * prefix = self.use_tags[handle] - */ - __pyx_v_tag_directives_end = __pyx_v_tag_directives_value; - - /* "_yaml.pyx":1319 - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value - * for handle in self.use_tags: # <<<<<<<<<<<<<< - * prefix = self.use_tags[handle] - * if PyUnicode_CheckExact(handle): - */ - if (PyList_CheckExact(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags) || PyTuple_CheckExact(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags)) { - __pyx_t_5 = 0; __pyx_t_2 = ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags; __Pyx_INCREF(__pyx_t_2); - } else { - __pyx_t_5 = -1; __pyx_t_2 = PyObject_GetIter(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - } - for (;;) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - if (__pyx_t_5 >= PyList_GET_SIZE(__pyx_t_2)) break; - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; - } else if (likely(PyTuple_CheckExact(__pyx_t_2))) { - if (__pyx_t_5 >= PyTuple_GET_SIZE(__pyx_t_2)) break; - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_5); __Pyx_INCREF(__pyx_t_3); __pyx_t_5++; - } else { - __pyx_t_3 = PyIter_Next(__pyx_t_2); - if (!__pyx_t_3) { - if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1319; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - break; - } - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1320 - * tag_directives_end = tag_directives_value - * for handle in self.use_tags: - * prefix = self.use_tags[handle] # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(handle): - * handle = PyUnicode_AsUTF8String(handle) - */ - __pyx_t_3 = PyObject_GetItem(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->use_tags, __pyx_v_handle); if (!__pyx_t_3) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1320; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_prefix); - __pyx_v_prefix = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1321 - * for handle in self.use_tags: - * prefix = self.use_tags[handle] - * if PyUnicode_CheckExact(handle): # <<<<<<<<<<<<<< - * handle = PyUnicode_AsUTF8String(handle) - * cache.append(handle) - */ - __pyx_t_4 = PyUnicode_CheckExact(__pyx_v_handle); - if (__pyx_t_4) { - - /* "_yaml.pyx":1322 - * prefix = self.use_tags[handle] - * if PyUnicode_CheckExact(handle): - * handle = PyUnicode_AsUTF8String(handle) # <<<<<<<<<<<<<< - * cache.append(handle) - * if not PyString_CheckExact(handle): - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_handle); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1322; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_handle); - __pyx_v_handle = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1323 - * if PyUnicode_CheckExact(handle): - * handle = PyUnicode_AsUTF8String(handle) - * cache.append(handle) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: - */ - if (unlikely(__pyx_v_cache == Py_None)) { - PyErr_SetString(PyExc_AttributeError, "'NoneType' object has no attribute 'append'"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1323; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_4 = PyList_Append(__pyx_v_cache, __pyx_v_handle); if (unlikely(__pyx_t_4 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1323; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L13; - } - __pyx_L13:; - - /* "_yaml.pyx":1324 - * handle = PyUnicode_AsUTF8String(handle) - * cache.append(handle) - * if not PyString_CheckExact(handle): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag handle must be a string") - */ - __pyx_t_1 = (!PyString_CheckExact(__pyx_v_handle)); - if (__pyx_t_1) { - - /* "_yaml.pyx":1325 - * cache.append(handle) - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag handle must be a string") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1326 - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag handle must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_94), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1326; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1326; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L15; - } - /*else*/ { - - /* "_yaml.pyx":1328 - * raise TypeError("tag handle must be a string") - * else: - * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_95), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1328; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1328; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L15:; - goto __pyx_L14; - } - __pyx_L14:; - - /* "_yaml.pyx":1329 - * else: - * raise TypeError(u"tag handle must be a string") - * tag_directives_end.handle = PyString_AS_STRING(handle) # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(prefix): - * prefix = PyUnicode_AsUTF8String(prefix) - */ - __pyx_v_tag_directives_end->handle = PyString_AS_STRING(__pyx_v_handle); - - /* "_yaml.pyx":1330 - * raise TypeError(u"tag handle must be a string") - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): # <<<<<<<<<<<<<< - * prefix = PyUnicode_AsUTF8String(prefix) - * cache.append(prefix) - */ - __pyx_t_4 = PyUnicode_CheckExact(__pyx_v_prefix); - if (__pyx_t_4) { - - /* "_yaml.pyx":1331 - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): - * prefix = PyUnicode_AsUTF8String(prefix) # <<<<<<<<<<<<<< - * cache.append(prefix) - * if not PyString_CheckExact(prefix): - */ - __pyx_t_3 = PyUnicode_AsUTF8String(__pyx_v_prefix); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1331; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_prefix); - __pyx_v_prefix = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1332 - * if PyUnicode_CheckExact(prefix): - * prefix = PyUnicode_AsUTF8String(prefix) - * cache.append(prefix) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: - */ - if (unlikely(__pyx_v_cache == Py_None)) { - PyErr_SetString(PyExc_AttributeError, "'NoneType' object has no attribute 'append'"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1332; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_t_4 = PyList_Append(__pyx_v_cache, __pyx_v_prefix); if (unlikely(__pyx_t_4 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1332; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L16; - } - __pyx_L16:; - - /* "_yaml.pyx":1333 - * prefix = PyUnicode_AsUTF8String(prefix) - * cache.append(prefix) - * if not PyString_CheckExact(prefix): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag prefix must be a string") - */ - __pyx_t_1 = (!PyString_CheckExact(__pyx_v_prefix)); - if (__pyx_t_1) { - - /* "_yaml.pyx":1334 - * cache.append(prefix) - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag prefix must be a string") - * else: - */ - __pyx_t_1 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_1) { - - /* "_yaml.pyx":1335 - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag prefix must be a string") - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_96), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1335; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1335; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L18; - } - /*else*/ { - - /* "_yaml.pyx":1337 - * raise TypeError("tag prefix must be a string") - * else: - * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 - */ - __pyx_t_3 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_97), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1337; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_Raise(__pyx_t_3, 0, 0); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1337; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L18:; - goto __pyx_L17; - } - __pyx_L17:; - - /* "_yaml.pyx":1338 - * else: - * raise TypeError(u"tag prefix must be a string") - * tag_directives_end.prefix = PyString_AS_STRING(prefix) # <<<<<<<<<<<<<< - * tag_directives_end = tag_directives_end+1 - * if yaml_document_start_event_initialize(&event, version_directive, - */ - __pyx_v_tag_directives_end->prefix = PyString_AS_STRING(__pyx_v_prefix); - - /* "_yaml.pyx":1339 - * raise TypeError(u"tag prefix must be a string") - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 # <<<<<<<<<<<<<< - * if yaml_document_start_event_initialize(&event, version_directive, - * tag_directives_start, tag_directives_end, - */ - __pyx_v_tag_directives_end = (__pyx_v_tag_directives_end + 1); - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - goto __pyx_L8; - } - __pyx_L8:; - - /* "_yaml.pyx":1342 - * if yaml_document_start_event_initialize(&event, version_directive, - * tag_directives_start, tag_directives_end, - * self.document_start_implicit) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_1 = (yaml_document_start_event_initialize((&__pyx_v_event), __pyx_v_version_directive, __pyx_v_tag_directives_start, __pyx_v_tag_directives_end, ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->document_start_implicit) == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":1343 - * tag_directives_start, tag_directives_end, - * self.document_start_implicit) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1343; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L19; - } - __pyx_L19:; - - /* "_yaml.pyx":1344 - * self.document_start_implicit) == 0: - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_4 = yaml_emitter_emit((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1344; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_1 = (__pyx_t_4 == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":1345 - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * self._anchor_node(node) - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_emitter_error(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1345; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":1346 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * self._anchor_node(node) - * self._serialize_node(node, None, None) - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1346; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L20; - } - __pyx_L20:; - - /* "_yaml.pyx":1347 - * error = self._emitter_error() - * raise error - * self._anchor_node(node) # <<<<<<<<<<<<<< - * self._serialize_node(node, None, None) - * yaml_document_end_event_initialize(&event, self.document_end_implicit) - */ - __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_anchor_node(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), __pyx_v_node); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1347; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":1348 - * raise error - * self._anchor_node(node) - * self._serialize_node(node, None, None) # <<<<<<<<<<<<<< - * yaml_document_end_event_initialize(&event, self.document_end_implicit) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_serialize_node(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self), __pyx_v_node, Py_None, Py_None); if (unlikely(__pyx_t_4 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1348; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":1349 - * self._anchor_node(node) - * self._serialize_node(node, None, None) - * yaml_document_end_event_initialize(&event, self.document_end_implicit) # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - yaml_document_end_event_initialize((&__pyx_v_event), ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->document_end_implicit); - - /* "_yaml.pyx":1350 - * self._serialize_node(node, None, None) - * yaml_document_end_event_initialize(&event, self.document_end_implicit) - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_4 = yaml_emitter_emit((&((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1350; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_1 = (__pyx_t_4 == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":1351 - * yaml_document_end_event_initialize(&event, self.document_end_implicit) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * self.serialized_nodes = {} - */ - __pyx_t_2 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->__pyx_vtab)->_emitter_error(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1351; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_2; - __pyx_t_2 = 0; - - /* "_yaml.pyx":1352 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * self.serialized_nodes = {} - * self.anchors = {} - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1352; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L21; - } - __pyx_L21:; - - /* "_yaml.pyx":1353 - * error = self._emitter_error() - * raise error - * self.serialized_nodes = {} # <<<<<<<<<<<<<< - * self.anchors = {} - * self.last_alias_id = 0 - */ - __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1353; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->serialized_nodes); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->serialized_nodes); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->serialized_nodes = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":1354 - * raise error - * self.serialized_nodes = {} - * self.anchors = {} # <<<<<<<<<<<<<< - * self.last_alias_id = 0 - * - */ - __pyx_t_2 = PyDict_New(); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1354; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_2)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_2)); - __Pyx_GOTREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->anchors); - __Pyx_DECREF(((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->anchors); - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->anchors = ((PyObject *)__pyx_t_2); - __pyx_t_2 = 0; - - /* "_yaml.pyx":1355 - * self.serialized_nodes = {} - * self.anchors = {} - * self.last_alias_id = 0 # <<<<<<<<<<<<<< - * - * cdef int _anchor_node(self, object node) except 0: - */ - ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_self)->last_alias_id = 0; - - __pyx_r = Py_None; __Pyx_INCREF(Py_None); - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_AddTraceback("_yaml.CEmitter.serialize"); - __pyx_r = NULL; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_cache); - __Pyx_DECREF(__pyx_v_handle); - __Pyx_DECREF(__pyx_v_prefix); - __Pyx_DECREF(__pyx_v_error); - __Pyx_XGIVEREF(__pyx_r); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1357 - * self.last_alias_id = 0 - * - * cdef int _anchor_node(self, object node) except 0: # <<<<<<<<<<<<<< - * if node in self.anchors: - * if self.anchors[node] is None: - */ - -static int __pyx_f_5_yaml_8CEmitter__anchor_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node) { - PyObject *__pyx_v_node_class; - PyObject *__pyx_v_item; - PyObject *__pyx_v_key; - PyObject *__pyx_v_value; - int __pyx_r; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - Py_ssize_t __pyx_t_4; - int __pyx_t_5; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - __Pyx_RefNannySetupContext("_anchor_node"); - __pyx_v_node_class = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_item = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_key = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1358 - * - * cdef int _anchor_node(self, object node) except 0: - * if node in self.anchors: # <<<<<<<<<<<<<< - * if self.anchors[node] is None: - * self.last_alias_id = self.last_alias_id+1 - */ - __pyx_t_1 = ((PySequence_Contains(__pyx_v_self->anchors, __pyx_v_node))); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1358; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_1) { - - /* "_yaml.pyx":1359 - * cdef int _anchor_node(self, object node) except 0: - * if node in self.anchors: - * if self.anchors[node] is None: # <<<<<<<<<<<<<< - * self.last_alias_id = self.last_alias_id+1 - * self.anchors[node] = u"id%03d" % self.last_alias_id - */ - __pyx_t_2 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_node); if (!__pyx_t_2) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1359; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = (__pyx_t_2 == Py_None); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":1360 - * if node in self.anchors: - * if self.anchors[node] is None: - * self.last_alias_id = self.last_alias_id+1 # <<<<<<<<<<<<<< - * self.anchors[node] = u"id%03d" % self.last_alias_id - * else: - */ - __pyx_v_self->last_alias_id = (__pyx_v_self->last_alias_id + 1); - - /* "_yaml.pyx":1361 - * if self.anchors[node] is None: - * self.last_alias_id = self.last_alias_id+1 - * self.anchors[node] = u"id%03d" % self.last_alias_id # <<<<<<<<<<<<<< - * else: - * self.anchors[node] = None - */ - __pyx_t_2 = PyInt_FromLong(__pyx_v_self->last_alias_id); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1361; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyNumber_Remainder(((PyObject *)__pyx_kp_u_98), __pyx_t_2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1361; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_node, ((PyObject *)__pyx_t_3)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1361; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - goto __pyx_L4; - } - __pyx_L4:; - goto __pyx_L3; - } - /*else*/ { - - /* "_yaml.pyx":1363 - * self.anchors[node] = u"id%03d" % self.last_alias_id - * else: - * self.anchors[node] = None # <<<<<<<<<<<<<< - * node_class = node.__class__ - * if node_class is SequenceNode: - */ - if (PyObject_SetItem(__pyx_v_self->anchors, __pyx_v_node, Py_None) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1363; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":1364 - * else: - * self.anchors[node] = None - * node_class = node.__class__ # <<<<<<<<<<<<<< - * if node_class is SequenceNode: - * for item in node.value: - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s____class__); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1364; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __Pyx_DECREF(__pyx_v_node_class); - __pyx_v_node_class = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1365 - * self.anchors[node] = None - * node_class = node.__class__ - * if node_class is SequenceNode: # <<<<<<<<<<<<<< - * for item in node.value: - * self._anchor_node(item) - */ - __pyx_t_3 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceNode); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1365; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - __pyx_t_1 = (__pyx_v_node_class == __pyx_t_3); - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":1366 - * node_class = node.__class__ - * if node_class is SequenceNode: - * for item in node.value: # <<<<<<<<<<<<<< - * self._anchor_node(item) - * elif node_class is MappingNode: - */ - __pyx_t_3 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - if (PyList_CheckExact(__pyx_t_3) || PyTuple_CheckExact(__pyx_t_3)) { - __pyx_t_4 = 0; __pyx_t_2 = __pyx_t_3; __Pyx_INCREF(__pyx_t_2); - } else { - __pyx_t_4 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - for (;;) { - if (likely(PyList_CheckExact(__pyx_t_2))) { - if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_2)) break; - __pyx_t_3 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; - } else if (likely(PyTuple_CheckExact(__pyx_t_2))) { - if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_2)) break; - __pyx_t_3 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_4); __Pyx_INCREF(__pyx_t_3); __pyx_t_4++; - } else { - __pyx_t_3 = PyIter_Next(__pyx_t_2); - if (!__pyx_t_3) { - if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1366; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - break; - } - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_DECREF(__pyx_v_item); - __pyx_v_item = __pyx_t_3; - __pyx_t_3 = 0; - - /* "_yaml.pyx":1367 - * if node_class is SequenceNode: - * for item in node.value: - * self._anchor_node(item) # <<<<<<<<<<<<<< - * elif node_class is MappingNode: - * for key, value in node.value: - */ - __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_item); if (unlikely(__pyx_t_5 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1367; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - goto __pyx_L5; - } - - /* "_yaml.pyx":1368 - * for item in node.value: - * self._anchor_node(item) - * elif node_class is MappingNode: # <<<<<<<<<<<<<< - * for key, value in node.value: - * self._anchor_node(key) - */ - __pyx_t_2 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingNode); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1368; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_1 = (__pyx_v_node_class == __pyx_t_2); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (__pyx_t_1) { - - /* "_yaml.pyx":1369 - * self._anchor_node(item) - * elif node_class is MappingNode: - * for key, value in node.value: # <<<<<<<<<<<<<< - * self._anchor_node(key) - * self._anchor_node(value) - */ - __pyx_t_2 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - if (PyList_CheckExact(__pyx_t_2) || PyTuple_CheckExact(__pyx_t_2)) { - __pyx_t_4 = 0; __pyx_t_3 = __pyx_t_2; __Pyx_INCREF(__pyx_t_3); - } else { - __pyx_t_4 = -1; __pyx_t_3 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_3); - } - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - for (;;) { - if (likely(PyList_CheckExact(__pyx_t_3))) { - if (__pyx_t_4 >= PyList_GET_SIZE(__pyx_t_3)) break; - __pyx_t_2 = PyList_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_2); __pyx_t_4++; - } else if (likely(PyTuple_CheckExact(__pyx_t_3))) { - if (__pyx_t_4 >= PyTuple_GET_SIZE(__pyx_t_3)) break; - __pyx_t_2 = PyTuple_GET_ITEM(__pyx_t_3, __pyx_t_4); __Pyx_INCREF(__pyx_t_2); __pyx_t_4++; - } else { - __pyx_t_2 = PyIter_Next(__pyx_t_3); - if (!__pyx_t_2) { - if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - break; - } - __Pyx_GOTREF(__pyx_t_2); - } - if (PyTuple_CheckExact(__pyx_t_2) && likely(PyTuple_GET_SIZE(__pyx_t_2) == 2)) { - PyObject* tuple = __pyx_t_2; - __pyx_t_6 = PyTuple_GET_ITEM(tuple, 0); __Pyx_INCREF(__pyx_t_6); - __pyx_t_7 = PyTuple_GET_ITEM(tuple, 1); __Pyx_INCREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(__pyx_v_key); - __pyx_v_key = __pyx_t_6; - __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_7; - __pyx_t_7 = 0; - } else { - __pyx_t_8 = PyObject_GetIter(__pyx_t_2); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __pyx_t_6 = __Pyx_UnpackItem(__pyx_t_8, 0); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_UnpackItem(__pyx_t_8, 1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - if (__Pyx_EndUnpack(__pyx_t_8, 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1369; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_v_key); - __pyx_v_key = __pyx_t_6; - __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_7; - __pyx_t_7 = 0; - } - - /* "_yaml.pyx":1370 - * elif node_class is MappingNode: - * for key, value in node.value: - * self._anchor_node(key) # <<<<<<<<<<<<<< - * self._anchor_node(value) - * return 1 - */ - __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_key); if (unlikely(__pyx_t_5 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1370; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":1371 - * for key, value in node.value: - * self._anchor_node(key) - * self._anchor_node(value) # <<<<<<<<<<<<<< - * return 1 - * - */ - __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_anchor_node(__pyx_v_self, __pyx_v_value); if (unlikely(__pyx_t_5 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1371; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0; - goto __pyx_L5; - } - __pyx_L5:; - } - __pyx_L3:; - - /* "_yaml.pyx":1372 - * self._anchor_node(key) - * self._anchor_node(value) - * return 1 # <<<<<<<<<<<<<< - * - * cdef int _serialize_node(self, object node, object parent, object index) except 0: - */ - __pyx_r = 1; - goto __pyx_L0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("_yaml.CEmitter._anchor_node"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_node_class); - __Pyx_DECREF(__pyx_v_item); - __Pyx_DECREF(__pyx_v_key); - __Pyx_DECREF(__pyx_v_value); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1374 - * return 1 - * - * cdef int _serialize_node(self, object node, object parent, object index) except 0: # <<<<<<<<<<<<<< - * cdef yaml_event_t event - * cdef int implicit - */ - -static int __pyx_f_5_yaml_8CEmitter__serialize_node(struct __pyx_obj_5_yaml_CEmitter *__pyx_v_self, PyObject *__pyx_v_node, PyObject *__pyx_v_parent, PyObject *__pyx_v_index) { - yaml_event_t __pyx_v_event; - int __pyx_v_implicit; - int __pyx_v_plain_implicit; - int __pyx_v_quoted_implicit; - char *__pyx_v_anchor; - char *__pyx_v_tag; - char *__pyx_v_value; - int __pyx_v_length; - int __pyx_v_item_index; - yaml_scalar_style_t __pyx_v_scalar_style; - yaml_sequence_style_t __pyx_v_sequence_style; - yaml_mapping_style_t __pyx_v_mapping_style; - PyObject *__pyx_v_anchor_object; - PyObject *__pyx_v_error; - PyObject *__pyx_v_node_class; - PyObject *__pyx_v_tag_object; - PyObject *__pyx_v_value_object; - PyObject *__pyx_v_style_object; - PyObject *__pyx_v_item; - PyObject *__pyx_v_item_key; - PyObject *__pyx_v_item_value; - int __pyx_r; - PyObject *__pyx_t_1 = NULL; - int __pyx_t_2; - int __pyx_t_3; - PyObject *__pyx_t_4 = NULL; - PyObject *__pyx_t_5 = NULL; - PyObject *__pyx_t_6 = NULL; - PyObject *__pyx_t_7 = NULL; - PyObject *__pyx_t_8 = NULL; - int __pyx_t_9; - int __pyx_t_10; - Py_ssize_t __pyx_t_11; - __Pyx_RefNannySetupContext("_serialize_node"); - __pyx_v_anchor_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_error = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_node_class = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_tag_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_value_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_style_object = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_item = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_item_key = Py_None; __Pyx_INCREF(Py_None); - __pyx_v_item_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1387 - * cdef yaml_sequence_style_t sequence_style - * cdef yaml_mapping_style_t mapping_style - * anchor_object = self.anchors[node] # <<<<<<<<<<<<<< - * anchor = NULL - * if anchor_object is not None: - */ - __pyx_t_1 = PyObject_GetItem(__pyx_v_self->anchors, __pyx_v_node); if (!__pyx_t_1) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1387; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":1388 - * cdef yaml_mapping_style_t mapping_style - * anchor_object = self.anchors[node] - * anchor = NULL # <<<<<<<<<<<<<< - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - */ - __pyx_v_anchor = NULL; - - /* "_yaml.pyx":1389 - * anchor_object = self.anchors[node] - * anchor = NULL - * if anchor_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - */ - __pyx_t_2 = (__pyx_v_anchor_object != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":1390 - * anchor = NULL - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - */ - __pyx_t_3 = PyUnicode_CheckExact(__pyx_v_anchor_object); - if (__pyx_t_3) { - - /* "_yaml.pyx":1391 - * if anchor_object is not None: - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_1 = PyUnicode_AsUTF8String(__pyx_v_anchor_object); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1391; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_anchor_object); - __pyx_v_anchor_object = __pyx_t_1; - __pyx_t_1 = 0; - goto __pyx_L4; - } - __pyx_L4:; - - /* "_yaml.pyx":1392 - * if PyUnicode_CheckExact(anchor_object): - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_anchor_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1393 - * anchor_object = PyUnicode_AsUTF8String(anchor_object) - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("anchor must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1394 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_t_1 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_99), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1394; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1394; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L6; - } - /*else*/ { - - /* "_yaml.pyx":1396 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * if node in self.serialized_nodes: - */ - __pyx_t_1 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_100), NULL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1396; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_Raise(__pyx_t_1, 0, 0); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1396; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L6:; - goto __pyx_L5; - } - __pyx_L5:; - - /* "_yaml.pyx":1397 - * else: - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) # <<<<<<<<<<<<<< - * if node in self.serialized_nodes: - * if yaml_alias_event_initialize(&event, anchor) == 0: - */ - __pyx_v_anchor = PyString_AS_STRING(__pyx_v_anchor_object); - goto __pyx_L3; - } - __pyx_L3:; - - /* "_yaml.pyx":1398 - * raise TypeError(u"anchor must be a string") - * anchor = PyString_AS_STRING(anchor_object) - * if node in self.serialized_nodes: # <<<<<<<<<<<<<< - * if yaml_alias_event_initialize(&event, anchor) == 0: - * raise MemoryError - */ - __pyx_t_2 = ((PySequence_Contains(__pyx_v_self->serialized_nodes, __pyx_v_node))); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1398; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_t_2) { - - /* "_yaml.pyx":1399 - * anchor = PyString_AS_STRING(anchor_object) - * if node in self.serialized_nodes: - * if yaml_alias_event_initialize(&event, anchor) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_2 = (yaml_alias_event_initialize((&__pyx_v_event), __pyx_v_anchor) == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":1400 - * if node in self.serialized_nodes: - * if yaml_alias_event_initialize(&event, anchor) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1400; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L8; - } - __pyx_L8:; - - /* "_yaml.pyx":1401 - * if yaml_alias_event_initialize(&event, anchor) == 0: - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1401; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = (__pyx_t_3 == 0); - if (__pyx_t_2) { - - /* "_yaml.pyx":1402 - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * else: - */ - __pyx_t_1 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1402; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":1403 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * else: - * node_class = node.__class__ - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1403; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L9; - } - __pyx_L9:; - goto __pyx_L7; - } - /*else*/ { - - /* "_yaml.pyx":1405 - * raise error - * else: - * node_class = node.__class__ # <<<<<<<<<<<<<< - * self.serialized_nodes[node] = True - * self.descend_resolver(parent, index) - */ - __pyx_t_1 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s____class__); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1405; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_v_node_class); - __pyx_v_node_class = __pyx_t_1; - __pyx_t_1 = 0; - - /* "_yaml.pyx":1406 - * else: - * node_class = node.__class__ - * self.serialized_nodes[node] = True # <<<<<<<<<<<<<< - * self.descend_resolver(parent, index) - * if node_class is ScalarNode: - */ - __pyx_t_1 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1406; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - if (PyObject_SetItem(__pyx_v_self->serialized_nodes, __pyx_v_node, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1406; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":1407 - * node_class = node.__class__ - * self.serialized_nodes[node] = True - * self.descend_resolver(parent, index) # <<<<<<<<<<<<<< - * if node_class is ScalarNode: - * plain_implicit = 0 - */ - __pyx_t_1 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__descend_resolver); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1407; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1407; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - __Pyx_INCREF(__pyx_v_parent); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_parent); - __Pyx_GIVEREF(__pyx_v_parent); - __Pyx_INCREF(__pyx_v_index); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_v_index); - __Pyx_GIVEREF(__pyx_v_index); - __pyx_t_5 = PyObject_Call(__pyx_t_1, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1407; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - - /* "_yaml.pyx":1408 - * self.serialized_nodes[node] = True - * self.descend_resolver(parent, index) - * if node_class is ScalarNode: # <<<<<<<<<<<<<< - * plain_implicit = 0 - * quoted_implicit = 0 - */ - __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarNode); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1408; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_2 = (__pyx_v_node_class == __pyx_t_5); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1409 - * self.descend_resolver(parent, index) - * if node_class is ScalarNode: - * plain_implicit = 0 # <<<<<<<<<<<<<< - * quoted_implicit = 0 - * tag_object = node.tag - */ - __pyx_v_plain_implicit = 0; - - /* "_yaml.pyx":1410 - * if node_class is ScalarNode: - * plain_implicit = 0 - * quoted_implicit = 0 # <<<<<<<<<<<<<< - * tag_object = node.tag - * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: - */ - __pyx_v_quoted_implicit = 0; - - /* "_yaml.pyx":1411 - * plain_implicit = 0 - * quoted_implicit = 0 - * tag_object = node.tag # <<<<<<<<<<<<<< - * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: - * plain_implicit = 1 - */ - __pyx_t_5 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__tag); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1411; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_5; - __pyx_t_5 = 0; - - /* "_yaml.pyx":1412 - * quoted_implicit = 0 - * tag_object = node.tag - * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: # <<<<<<<<<<<<<< - * plain_implicit = 1 - * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: - */ - __pyx_t_5 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_1 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_6 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_8)); - PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_7); - __Pyx_GIVEREF(__pyx_t_7); - __pyx_t_6 = 0; - __pyx_t_7 = 0; - __pyx_t_7 = PyTuple_New(3); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_7)); - PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_7, 2, ((PyObject *)__pyx_t_8)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_8)); - __pyx_t_4 = 0; - __pyx_t_1 = 0; - __pyx_t_8 = 0; - __pyx_t_8 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_7), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_7)); __pyx_t_7 = 0; - __pyx_t_7 = PyObject_RichCompare(__pyx_t_8, __pyx_v_tag_object, Py_EQ); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_7); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1412; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1413 - * tag_object = node.tag - * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: - * plain_implicit = 1 # <<<<<<<<<<<<<< - * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: - * quoted_implicit = 1 - */ - __pyx_v_plain_implicit = 1; - goto __pyx_L11; - } - __pyx_L11:; - - /* "_yaml.pyx":1414 - * if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: - * plain_implicit = 1 - * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: # <<<<<<<<<<<<<< - * quoted_implicit = 1 - * tag = NULL - */ - __pyx_t_7 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_8 = __Pyx_GetName(__pyx_m, __pyx_n_s__ScalarNode); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_5 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_1 = __Pyx_PyBool_FromLong(0); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_4 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_1); - __Pyx_GIVEREF(__pyx_t_1); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - __pyx_t_1 = 0; - __pyx_t_4 = 0; - __pyx_t_4 = PyTuple_New(3); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_4)); - PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_8); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - PyTuple_SET_ITEM(__pyx_t_4, 2, ((PyObject *)__pyx_t_6)); - __Pyx_GIVEREF(((PyObject *)__pyx_t_6)); - __pyx_t_8 = 0; - __pyx_t_5 = 0; - __pyx_t_6 = 0; - __pyx_t_6 = PyObject_Call(__pyx_t_7, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0; - __pyx_t_4 = PyObject_RichCompare(__pyx_t_6, __pyx_v_tag_object, Py_EQ); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1414; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (__pyx_t_2) { - - /* "_yaml.pyx":1415 - * plain_implicit = 1 - * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: - * quoted_implicit = 1 # <<<<<<<<<<<<<< - * tag = NULL - * if tag_object is not None: - */ - __pyx_v_quoted_implicit = 1; - goto __pyx_L12; - } - __pyx_L12:; - - /* "_yaml.pyx":1416 - * if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: - * quoted_implicit = 1 - * tag = NULL # <<<<<<<<<<<<<< - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - */ - __pyx_v_tag = NULL; - - /* "_yaml.pyx":1417 - * quoted_implicit = 1 - * tag = NULL - * if tag_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - */ - __pyx_t_2 = (__pyx_v_tag_object != Py_None); - if (__pyx_t_2) { - - /* "_yaml.pyx":1418 - * tag = NULL - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<< - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - */ - __pyx_t_3 = PyUnicode_CheckExact(__pyx_v_tag_object); - if (__pyx_t_3) { - - /* "_yaml.pyx":1419 - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1419; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L14; - } - __pyx_L14:; - - /* "_yaml.pyx":1420 - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_tag_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1421 - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1422 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_101), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1422; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1422; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L16; - } - /*else*/ { - - /* "_yaml.pyx":1424 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * value_object = node.value - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_102), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1424; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1424; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L16:; - goto __pyx_L15; - } - __pyx_L15:; - - /* "_yaml.pyx":1425 - * else: - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<< - * value_object = node.value - * if PyUnicode_CheckExact(value_object): - */ - __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object); - goto __pyx_L13; - } - __pyx_L13:; - - /* "_yaml.pyx":1426 - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) - * value_object = node.value # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(value_object): - * value_object = PyUnicode_AsUTF8String(value_object) - */ - __pyx_t_4 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1426; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_value_object); - __pyx_v_value_object = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":1427 - * tag = PyString_AS_STRING(tag_object) - * value_object = node.value - * if PyUnicode_CheckExact(value_object): # <<<<<<<<<<<<<< - * value_object = PyUnicode_AsUTF8String(value_object) - * if not PyString_CheckExact(value_object): - */ - __pyx_t_3 = PyUnicode_CheckExact(__pyx_v_value_object); - if (__pyx_t_3) { - - /* "_yaml.pyx":1428 - * value_object = node.value - * if PyUnicode_CheckExact(value_object): - * value_object = PyUnicode_AsUTF8String(value_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_4 = PyUnicode_AsUTF8String(__pyx_v_value_object); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1428; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_value_object); - __pyx_v_value_object = __pyx_t_4; - __pyx_t_4 = 0; - goto __pyx_L17; - } - __pyx_L17:; - - /* "_yaml.pyx":1429 - * if PyUnicode_CheckExact(value_object): - * value_object = PyUnicode_AsUTF8String(value_object) - * if not PyString_CheckExact(value_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("value must be a string") - */ - __pyx_t_2 = (!PyString_CheckExact(__pyx_v_value_object)); - if (__pyx_t_2) { - - /* "_yaml.pyx":1430 - * value_object = PyUnicode_AsUTF8String(value_object) - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("value must be a string") - * else: - */ - __pyx_t_2 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_2) { - - /* "_yaml.pyx":1431 - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("value must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"value must be a string") - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_103), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1431; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1431; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L19; - } - /*else*/ { - - /* "_yaml.pyx":1433 - * raise TypeError("value must be a string") - * else: - * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<< - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) - */ - __pyx_t_4 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_104), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1433; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_Raise(__pyx_t_4, 0, 0); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1433; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L19:; - goto __pyx_L18; - } - __pyx_L18:; - - /* "_yaml.pyx":1434 - * else: - * raise TypeError(u"value must be a string") - * value = PyString_AS_STRING(value_object) # <<<<<<<<<<<<<< - * length = PyString_GET_SIZE(value_object) - * style_object = node.style - */ - __pyx_v_value = PyString_AS_STRING(__pyx_v_value_object); - - /* "_yaml.pyx":1435 - * raise TypeError(u"value must be a string") - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) # <<<<<<<<<<<<<< - * style_object = node.style - * scalar_style = YAML_PLAIN_SCALAR_STYLE - */ - __pyx_v_length = PyString_GET_SIZE(__pyx_v_value_object); - - /* "_yaml.pyx":1436 - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) - * style_object = node.style # <<<<<<<<<<<<<< - * scalar_style = YAML_PLAIN_SCALAR_STYLE - * if style_object == "'" or style_object == u"'": - */ - __pyx_t_4 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__style); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1436; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_style_object); - __pyx_v_style_object = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":1437 - * length = PyString_GET_SIZE(value_object) - * style_object = node.style - * scalar_style = YAML_PLAIN_SCALAR_STYLE # <<<<<<<<<<<<<< - * if style_object == "'" or style_object == u"'": - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_PLAIN_SCALAR_STYLE; - - /* "_yaml.pyx":1438 - * style_object = node.style - * scalar_style = YAML_PLAIN_SCALAR_STYLE - * if style_object == "'" or style_object == u"'": # <<<<<<<<<<<<<< - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - * elif style_object == "\"" or style_object == u"\"": - */ - __pyx_t_4 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_21), Py_EQ); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1438; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1438; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (!__pyx_t_2) { - __pyx_t_9 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_21), Py_EQ); if (unlikely(__pyx_t_9 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1438; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = __pyx_t_9; - } else { - __pyx_t_10 = __pyx_t_2; - } - if (__pyx_t_10) { - - /* "_yaml.pyx":1439 - * scalar_style = YAML_PLAIN_SCALAR_STYLE - * if style_object == "'" or style_object == u"'": - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<< - * elif style_object == "\"" or style_object == u"\"": - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE; - goto __pyx_L20; - } - - /* "_yaml.pyx":1440 - * if style_object == "'" or style_object == u"'": - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - * elif style_object == "\"" or style_object == u"\"": # <<<<<<<<<<<<<< - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - * elif style_object == "|" or style_object == u"|": - */ - __pyx_t_4 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_22), Py_EQ); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1440; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_10 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1440; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (!__pyx_t_10) { - __pyx_t_2 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_22), Py_EQ); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1440; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_9 = __pyx_t_2; - } else { - __pyx_t_9 = __pyx_t_10; - } - if (__pyx_t_9) { - - /* "_yaml.pyx":1441 - * scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - * elif style_object == "\"" or style_object == u"\"": - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE # <<<<<<<<<<<<<< - * elif style_object == "|" or style_object == u"|": - * scalar_style = YAML_LITERAL_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE; - goto __pyx_L20; - } - - /* "_yaml.pyx":1442 - * elif style_object == "\"" or style_object == u"\"": - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - * elif style_object == "|" or style_object == u"|": # <<<<<<<<<<<<<< - * scalar_style = YAML_LITERAL_SCALAR_STYLE - * elif style_object == ">" or style_object == u">": - */ - __pyx_t_4 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_23), Py_EQ); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1442; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_9 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_9 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1442; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (!__pyx_t_9) { - __pyx_t_10 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_23), Py_EQ); if (unlikely(__pyx_t_10 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1442; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_2 = __pyx_t_10; - } else { - __pyx_t_2 = __pyx_t_9; - } - if (__pyx_t_2) { - - /* "_yaml.pyx":1443 - * scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - * elif style_object == "|" or style_object == u"|": - * scalar_style = YAML_LITERAL_SCALAR_STYLE # <<<<<<<<<<<<<< - * elif style_object == ">" or style_object == u">": - * scalar_style = YAML_FOLDED_SCALAR_STYLE - */ - __pyx_v_scalar_style = YAML_LITERAL_SCALAR_STYLE; - goto __pyx_L20; - } - - /* "_yaml.pyx":1444 - * elif style_object == "|" or style_object == u"|": - * scalar_style = YAML_LITERAL_SCALAR_STYLE - * elif style_object == ">" or style_object == u">": # <<<<<<<<<<<<<< - * scalar_style = YAML_FOLDED_SCALAR_STYLE - * if yaml_scalar_event_initialize(&event, anchor, tag, value, length, - */ - __pyx_t_4 = PyObject_RichCompare(__pyx_v_style_object, ((PyObject *)__pyx_kp_s_24), Py_EQ); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1444; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_4); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1444; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (!__pyx_t_2) { - __pyx_t_9 = __Pyx_PyUnicode_Equals(__pyx_v_style_object, ((PyObject *)__pyx_kp_u_24), Py_EQ); if (unlikely(__pyx_t_9 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1444; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = __pyx_t_9; - } else { - __pyx_t_10 = __pyx_t_2; - } - if (__pyx_t_10) { - - /* "_yaml.pyx":1445 - * scalar_style = YAML_LITERAL_SCALAR_STYLE - * elif style_object == ">" or style_object == u">": - * scalar_style = YAML_FOLDED_SCALAR_STYLE # <<<<<<<<<<<<<< - * if yaml_scalar_event_initialize(&event, anchor, tag, value, length, - * plain_implicit, quoted_implicit, scalar_style) == 0: - */ - __pyx_v_scalar_style = YAML_FOLDED_SCALAR_STYLE; - goto __pyx_L20; - } - __pyx_L20:; - - /* "_yaml.pyx":1447 - * scalar_style = YAML_FOLDED_SCALAR_STYLE - * if yaml_scalar_event_initialize(&event, anchor, tag, value, length, - * plain_implicit, quoted_implicit, scalar_style) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_10 = (yaml_scalar_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_value, __pyx_v_length, __pyx_v_plain_implicit, __pyx_v_quoted_implicit, __pyx_v_scalar_style) == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1448 - * if yaml_scalar_event_initialize(&event, anchor, tag, value, length, - * plain_implicit, quoted_implicit, scalar_style) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1448; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L21; - } - __pyx_L21:; - - /* "_yaml.pyx":1449 - * plain_implicit, quoted_implicit, scalar_style) == 0: - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1449; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = (__pyx_t_3 == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1450 - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * elif node_class is SequenceNode: - */ - __pyx_t_4 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1450; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":1451 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * elif node_class is SequenceNode: - * implicit = 0 - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1451; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L22; - } - __pyx_L22:; - goto __pyx_L10; - } - - /* "_yaml.pyx":1452 - * error = self._emitter_error() - * raise error - * elif node_class is SequenceNode: # <<<<<<<<<<<<<< - * implicit = 0 - * tag_object = node.tag - */ - __pyx_t_4 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceNode); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1452; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_10 = (__pyx_v_node_class == __pyx_t_4); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - if (__pyx_t_10) { - - /* "_yaml.pyx":1453 - * raise error - * elif node_class is SequenceNode: - * implicit = 0 # <<<<<<<<<<<<<< - * tag_object = node.tag - * if self.resolve(SequenceNode, node.value, True) == tag_object: - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":1454 - * elif node_class is SequenceNode: - * implicit = 0 - * tag_object = node.tag # <<<<<<<<<<<<<< - * if self.resolve(SequenceNode, node.value, True) == tag_object: - * implicit = 1 - */ - __pyx_t_4 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__tag); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1454; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_4; - __pyx_t_4 = 0; - - /* "_yaml.pyx":1455 - * implicit = 0 - * tag_object = node.tag - * if self.resolve(SequenceNode, node.value, True) == tag_object: # <<<<<<<<<<<<<< - * implicit = 1 - * tag = NULL - */ - __pyx_t_4 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_6 = __Pyx_GetName(__pyx_m, __pyx_n_s__SequenceNode); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_7 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_5 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_8 = PyTuple_New(3); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_8)); - PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_6); - __Pyx_GIVEREF(__pyx_t_6); - PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_7); - __Pyx_GIVEREF(__pyx_t_7); - PyTuple_SET_ITEM(__pyx_t_8, 2, __pyx_t_5); - __Pyx_GIVEREF(__pyx_t_5); - __pyx_t_6 = 0; - __pyx_t_7 = 0; - __pyx_t_5 = 0; - __pyx_t_5 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_t_8), NULL); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_8)); __pyx_t_8 = 0; - __pyx_t_8 = PyObject_RichCompare(__pyx_t_5, __pyx_v_tag_object, Py_EQ); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_10 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1455; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - if (__pyx_t_10) { - - /* "_yaml.pyx":1456 - * tag_object = node.tag - * if self.resolve(SequenceNode, node.value, True) == tag_object: - * implicit = 1 # <<<<<<<<<<<<<< - * tag = NULL - * if tag_object is not None: - */ - __pyx_v_implicit = 1; - goto __pyx_L23; - } - __pyx_L23:; - - /* "_yaml.pyx":1457 - * if self.resolve(SequenceNode, node.value, True) == tag_object: - * implicit = 1 - * tag = NULL # <<<<<<<<<<<<<< - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - */ - __pyx_v_tag = NULL; - - /* "_yaml.pyx":1458 - * implicit = 1 - * tag = NULL - * if tag_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - */ - __pyx_t_10 = (__pyx_v_tag_object != Py_None); - if (__pyx_t_10) { - - /* "_yaml.pyx":1459 - * tag = NULL - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<< - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - */ - __pyx_t_3 = PyUnicode_CheckExact(__pyx_v_tag_object); - if (__pyx_t_3) { - - /* "_yaml.pyx":1460 - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_8 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1460; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_8; - __pyx_t_8 = 0; - goto __pyx_L25; - } - __pyx_L25:; - - /* "_yaml.pyx":1461 - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") - */ - __pyx_t_10 = (!PyString_CheckExact(__pyx_v_tag_object)); - if (__pyx_t_10) { - - /* "_yaml.pyx":1462 - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag must be a string") - * else: - */ - __pyx_t_10 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_10) { - - /* "_yaml.pyx":1463 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_t_8 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_105), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1463; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1463; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L27; - } - /*else*/ { - - /* "_yaml.pyx":1465 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - */ - __pyx_t_8 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_106), NULL); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1465; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_Raise(__pyx_t_8, 0, 0); - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1465; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L27:; - goto __pyx_L26; - } - __pyx_L26:; - - /* "_yaml.pyx":1466 - * else: - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<< - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - * if node.flow_style: - */ - __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object); - goto __pyx_L24; - } - __pyx_L24:; - - /* "_yaml.pyx":1467 - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE # <<<<<<<<<<<<<< - * if node.flow_style: - * sequence_style = YAML_FLOW_SEQUENCE_STYLE - */ - __pyx_v_sequence_style = YAML_BLOCK_SEQUENCE_STYLE; - - /* "_yaml.pyx":1468 - * tag = PyString_AS_STRING(tag_object) - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - * if node.flow_style: # <<<<<<<<<<<<<< - * sequence_style = YAML_FLOW_SEQUENCE_STYLE - * if yaml_sequence_start_event_initialize(&event, anchor, tag, - */ - __pyx_t_8 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__flow_style); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1468; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_8); if (unlikely(__pyx_t_10 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1468; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - if (__pyx_t_10) { - - /* "_yaml.pyx":1469 - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - * if node.flow_style: - * sequence_style = YAML_FLOW_SEQUENCE_STYLE # <<<<<<<<<<<<<< - * if yaml_sequence_start_event_initialize(&event, anchor, tag, - * implicit, sequence_style) == 0: - */ - __pyx_v_sequence_style = YAML_FLOW_SEQUENCE_STYLE; - goto __pyx_L28; - } - __pyx_L28:; - - /* "_yaml.pyx":1471 - * sequence_style = YAML_FLOW_SEQUENCE_STYLE - * if yaml_sequence_start_event_initialize(&event, anchor, tag, - * implicit, sequence_style) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_10 = (yaml_sequence_start_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_sequence_style) == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1472 - * if yaml_sequence_start_event_initialize(&event, anchor, tag, - * implicit, sequence_style) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1472; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L29; - } - __pyx_L29:; - - /* "_yaml.pyx":1473 - * implicit, sequence_style) == 0: - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1473; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = (__pyx_t_3 == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1474 - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * item_index = 0 - */ - __pyx_t_8 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1474; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_8; - __pyx_t_8 = 0; - - /* "_yaml.pyx":1475 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * item_index = 0 - * for item in node.value: - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1475; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L30; - } - __pyx_L30:; - - /* "_yaml.pyx":1476 - * error = self._emitter_error() - * raise error - * item_index = 0 # <<<<<<<<<<<<<< - * for item in node.value: - * self._serialize_node(item, node, item_index) - */ - __pyx_v_item_index = 0; - - /* "_yaml.pyx":1477 - * raise error - * item_index = 0 - * for item in node.value: # <<<<<<<<<<<<<< - * self._serialize_node(item, node, item_index) - * item_index = item_index+1 - */ - __pyx_t_8 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - if (PyList_CheckExact(__pyx_t_8) || PyTuple_CheckExact(__pyx_t_8)) { - __pyx_t_11 = 0; __pyx_t_5 = __pyx_t_8; __Pyx_INCREF(__pyx_t_5); - } else { - __pyx_t_11 = -1; __pyx_t_5 = PyObject_GetIter(__pyx_t_8); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - } - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - for (;;) { - if (likely(PyList_CheckExact(__pyx_t_5))) { - if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_5)) break; - __pyx_t_8 = PyList_GET_ITEM(__pyx_t_5, __pyx_t_11); __Pyx_INCREF(__pyx_t_8); __pyx_t_11++; - } else if (likely(PyTuple_CheckExact(__pyx_t_5))) { - if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_5)) break; - __pyx_t_8 = PyTuple_GET_ITEM(__pyx_t_5, __pyx_t_11); __Pyx_INCREF(__pyx_t_8); __pyx_t_11++; - } else { - __pyx_t_8 = PyIter_Next(__pyx_t_5); - if (!__pyx_t_8) { - if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1477; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - break; - } - __Pyx_GOTREF(__pyx_t_8); - } - __Pyx_DECREF(__pyx_v_item); - __pyx_v_item = __pyx_t_8; - __pyx_t_8 = 0; - - /* "_yaml.pyx":1478 - * item_index = 0 - * for item in node.value: - * self._serialize_node(item, node, item_index) # <<<<<<<<<<<<<< - * item_index = item_index+1 - * yaml_sequence_end_event_initialize(&event) - */ - __pyx_t_8 = PyInt_FromLong(__pyx_v_item_index); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1478; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item, __pyx_v_node, __pyx_t_8); if (unlikely(__pyx_t_3 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1478; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - - /* "_yaml.pyx":1479 - * for item in node.value: - * self._serialize_node(item, node, item_index) - * item_index = item_index+1 # <<<<<<<<<<<<<< - * yaml_sequence_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_v_item_index = (__pyx_v_item_index + 1); - } - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - - /* "_yaml.pyx":1480 - * self._serialize_node(item, node, item_index) - * item_index = item_index+1 - * yaml_sequence_end_event_initialize(&event) # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - yaml_sequence_end_event_initialize((&__pyx_v_event)); - - /* "_yaml.pyx":1481 - * item_index = item_index+1 - * yaml_sequence_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1481; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = (__pyx_t_3 == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1482 - * yaml_sequence_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * elif node_class is MappingNode: - */ - __pyx_t_5 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1482; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_5; - __pyx_t_5 = 0; - - /* "_yaml.pyx":1483 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * elif node_class is MappingNode: - * implicit = 0 - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1483; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L33; - } - __pyx_L33:; - goto __pyx_L10; - } - - /* "_yaml.pyx":1484 - * error = self._emitter_error() - * raise error - * elif node_class is MappingNode: # <<<<<<<<<<<<<< - * implicit = 0 - * tag_object = node.tag - */ - __pyx_t_5 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingNode); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1484; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_10 = (__pyx_v_node_class == __pyx_t_5); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - if (__pyx_t_10) { - - /* "_yaml.pyx":1485 - * raise error - * elif node_class is MappingNode: - * implicit = 0 # <<<<<<<<<<<<<< - * tag_object = node.tag - * if self.resolve(MappingNode, node.value, True) == tag_object: - */ - __pyx_v_implicit = 0; - - /* "_yaml.pyx":1486 - * elif node_class is MappingNode: - * implicit = 0 - * tag_object = node.tag # <<<<<<<<<<<<<< - * if self.resolve(MappingNode, node.value, True) == tag_object: - * implicit = 1 - */ - __pyx_t_5 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__tag); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1486; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_5; - __pyx_t_5 = 0; - - /* "_yaml.pyx":1487 - * implicit = 0 - * tag_object = node.tag - * if self.resolve(MappingNode, node.value, True) == tag_object: # <<<<<<<<<<<<<< - * implicit = 1 - * tag = NULL - */ - __pyx_t_5 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__resolve); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_8 = __Pyx_GetName(__pyx_m, __pyx_n_s__MappingNode); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __pyx_t_4 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __pyx_t_7 = __Pyx_PyBool_FromLong(1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_6 = PyTuple_New(3); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_6)); - PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_8); - __Pyx_GIVEREF(__pyx_t_8); - PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_4); - __Pyx_GIVEREF(__pyx_t_4); - PyTuple_SET_ITEM(__pyx_t_6, 2, __pyx_t_7); - __Pyx_GIVEREF(__pyx_t_7); - __pyx_t_8 = 0; - __pyx_t_4 = 0; - __pyx_t_7 = 0; - __pyx_t_7 = PyObject_Call(__pyx_t_5, ((PyObject *)__pyx_t_6), NULL); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_6)); __pyx_t_6 = 0; - __pyx_t_6 = PyObject_RichCompare(__pyx_t_7, __pyx_v_tag_object, Py_EQ); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_10 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1487; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (__pyx_t_10) { - - /* "_yaml.pyx":1488 - * tag_object = node.tag - * if self.resolve(MappingNode, node.value, True) == tag_object: - * implicit = 1 # <<<<<<<<<<<<<< - * tag = NULL - * if tag_object is not None: - */ - __pyx_v_implicit = 1; - goto __pyx_L34; - } - __pyx_L34:; - - /* "_yaml.pyx":1489 - * if self.resolve(MappingNode, node.value, True) == tag_object: - * implicit = 1 - * tag = NULL # <<<<<<<<<<<<<< - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - */ - __pyx_v_tag = NULL; - - /* "_yaml.pyx":1490 - * implicit = 1 - * tag = NULL - * if tag_object is not None: # <<<<<<<<<<<<<< - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - */ - __pyx_t_10 = (__pyx_v_tag_object != Py_None); - if (__pyx_t_10) { - - /* "_yaml.pyx":1491 - * tag = NULL - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): # <<<<<<<<<<<<<< - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - */ - __pyx_t_3 = PyUnicode_CheckExact(__pyx_v_tag_object); - if (__pyx_t_3) { - - /* "_yaml.pyx":1492 - * if tag_object is not None: - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) # <<<<<<<<<<<<<< - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - */ - __pyx_t_6 = PyUnicode_AsUTF8String(__pyx_v_tag_object); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1492; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_v_tag_object); - __pyx_v_tag_object = __pyx_t_6; - __pyx_t_6 = 0; - goto __pyx_L36; - } - __pyx_L36:; - - /* "_yaml.pyx":1493 - * if PyUnicode_CheckExact(tag_object): - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): # <<<<<<<<<<<<<< - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") - */ - __pyx_t_10 = (!PyString_CheckExact(__pyx_v_tag_object)); - if (__pyx_t_10) { - - /* "_yaml.pyx":1494 - * tag_object = PyUnicode_AsUTF8String(tag_object) - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: # <<<<<<<<<<<<<< - * raise TypeError("tag must be a string") - * else: - */ - __pyx_t_10 = (PY_MAJOR_VERSION < 3); - if (__pyx_t_10) { - - /* "_yaml.pyx":1495 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_t_6 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_107), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1495; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_Raise(__pyx_t_6, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1495; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L38; - } - /*else*/ { - - /* "_yaml.pyx":1497 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * mapping_style = YAML_BLOCK_MAPPING_STYLE - */ - __pyx_t_6 = PyObject_Call(__pyx_builtin_TypeError, ((PyObject *)__pyx_k_tuple_108), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1497; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_Raise(__pyx_t_6, 0, 0); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1497; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __pyx_L38:; - goto __pyx_L37; - } - __pyx_L37:; - - /* "_yaml.pyx":1498 - * else: - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) # <<<<<<<<<<<<<< - * mapping_style = YAML_BLOCK_MAPPING_STYLE - * if node.flow_style: - */ - __pyx_v_tag = PyString_AS_STRING(__pyx_v_tag_object); - goto __pyx_L35; - } - __pyx_L35:; - - /* "_yaml.pyx":1499 - * raise TypeError(u"tag must be a string") - * tag = PyString_AS_STRING(tag_object) - * mapping_style = YAML_BLOCK_MAPPING_STYLE # <<<<<<<<<<<<<< - * if node.flow_style: - * mapping_style = YAML_FLOW_MAPPING_STYLE - */ - __pyx_v_mapping_style = YAML_BLOCK_MAPPING_STYLE; - - /* "_yaml.pyx":1500 - * tag = PyString_AS_STRING(tag_object) - * mapping_style = YAML_BLOCK_MAPPING_STYLE - * if node.flow_style: # <<<<<<<<<<<<<< - * mapping_style = YAML_FLOW_MAPPING_STYLE - * if yaml_mapping_start_event_initialize(&event, anchor, tag, - */ - __pyx_t_6 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__flow_style); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1500; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __pyx_t_10 = __Pyx_PyObject_IsTrue(__pyx_t_6); if (unlikely(__pyx_t_10 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1500; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - if (__pyx_t_10) { - - /* "_yaml.pyx":1501 - * mapping_style = YAML_BLOCK_MAPPING_STYLE - * if node.flow_style: - * mapping_style = YAML_FLOW_MAPPING_STYLE # <<<<<<<<<<<<<< - * if yaml_mapping_start_event_initialize(&event, anchor, tag, - * implicit, mapping_style) == 0: - */ - __pyx_v_mapping_style = YAML_FLOW_MAPPING_STYLE; - goto __pyx_L39; - } - __pyx_L39:; - - /* "_yaml.pyx":1503 - * mapping_style = YAML_FLOW_MAPPING_STYLE - * if yaml_mapping_start_event_initialize(&event, anchor, tag, - * implicit, mapping_style) == 0: # <<<<<<<<<<<<<< - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_10 = (yaml_mapping_start_event_initialize((&__pyx_v_event), __pyx_v_anchor, __pyx_v_tag, __pyx_v_implicit, __pyx_v_mapping_style) == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1504 - * if yaml_mapping_start_event_initialize(&event, anchor, tag, - * implicit, mapping_style) == 0: - * raise MemoryError # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1504; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L40; - } - __pyx_L40:; - - /* "_yaml.pyx":1505 - * implicit, mapping_style) == 0: - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1505; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = (__pyx_t_3 == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1506 - * raise MemoryError - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * for item_key, item_value in node.value: - */ - __pyx_t_6 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1506; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_6; - __pyx_t_6 = 0; - - /* "_yaml.pyx":1507 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * for item_key, item_value in node.value: - * self._serialize_node(item_key, node, None) - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1507; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L41; - } - __pyx_L41:; - - /* "_yaml.pyx":1508 - * error = self._emitter_error() - * raise error - * for item_key, item_value in node.value: # <<<<<<<<<<<<<< - * self._serialize_node(item_key, node, None) - * self._serialize_node(item_value, node, item_key) - */ - __pyx_t_6 = PyObject_GetAttr(__pyx_v_node, __pyx_n_s__value); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - if (PyList_CheckExact(__pyx_t_6) || PyTuple_CheckExact(__pyx_t_6)) { - __pyx_t_11 = 0; __pyx_t_7 = __pyx_t_6; __Pyx_INCREF(__pyx_t_7); - } else { - __pyx_t_11 = -1; __pyx_t_7 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - } - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - for (;;) { - if (likely(PyList_CheckExact(__pyx_t_7))) { - if (__pyx_t_11 >= PyList_GET_SIZE(__pyx_t_7)) break; - __pyx_t_6 = PyList_GET_ITEM(__pyx_t_7, __pyx_t_11); __Pyx_INCREF(__pyx_t_6); __pyx_t_11++; - } else if (likely(PyTuple_CheckExact(__pyx_t_7))) { - if (__pyx_t_11 >= PyTuple_GET_SIZE(__pyx_t_7)) break; - __pyx_t_6 = PyTuple_GET_ITEM(__pyx_t_7, __pyx_t_11); __Pyx_INCREF(__pyx_t_6); __pyx_t_11++; - } else { - __pyx_t_6 = PyIter_Next(__pyx_t_7); - if (!__pyx_t_6) { - if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - break; - } - __Pyx_GOTREF(__pyx_t_6); - } - if (PyTuple_CheckExact(__pyx_t_6) && likely(PyTuple_GET_SIZE(__pyx_t_6) == 2)) { - PyObject* tuple = __pyx_t_6; - __pyx_t_5 = PyTuple_GET_ITEM(tuple, 0); __Pyx_INCREF(__pyx_t_5); - __pyx_t_4 = PyTuple_GET_ITEM(tuple, 1); __Pyx_INCREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __Pyx_DECREF(__pyx_v_item_key); - __pyx_v_item_key = __pyx_t_5; - __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_v_item_value); - __pyx_v_item_value = __pyx_t_4; - __pyx_t_4 = 0; - } else { - __pyx_t_8 = PyObject_GetIter(__pyx_t_6); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_8); - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - __pyx_t_5 = __Pyx_UnpackItem(__pyx_t_8, 0); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_5); - __pyx_t_4 = __Pyx_UnpackItem(__pyx_t_8, 1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - if (__Pyx_EndUnpack(__pyx_t_8, 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1508; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_8); __pyx_t_8 = 0; - __Pyx_DECREF(__pyx_v_item_key); - __pyx_v_item_key = __pyx_t_5; - __pyx_t_5 = 0; - __Pyx_DECREF(__pyx_v_item_value); - __pyx_v_item_value = __pyx_t_4; - __pyx_t_4 = 0; - } - - /* "_yaml.pyx":1509 - * raise error - * for item_key, item_value in node.value: - * self._serialize_node(item_key, node, None) # <<<<<<<<<<<<<< - * self._serialize_node(item_value, node, item_key) - * yaml_mapping_end_event_initialize(&event) - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item_key, __pyx_v_node, Py_None); if (unlikely(__pyx_t_3 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1509; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - - /* "_yaml.pyx":1510 - * for item_key, item_value in node.value: - * self._serialize_node(item_key, node, None) - * self._serialize_node(item_value, node, item_key) # <<<<<<<<<<<<<< - * yaml_mapping_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - */ - __pyx_t_3 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_serialize_node(__pyx_v_self, __pyx_v_item_value, __pyx_v_node, __pyx_v_item_key); if (unlikely(__pyx_t_3 == 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1510; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - } - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - - /* "_yaml.pyx":1511 - * self._serialize_node(item_key, node, None) - * self._serialize_node(item_value, node, item_key) - * yaml_mapping_end_event_initialize(&event) # <<<<<<<<<<<<<< - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - */ - yaml_mapping_end_event_initialize((&__pyx_v_event)); - - /* "_yaml.pyx":1512 - * self._serialize_node(item_value, node, item_key) - * yaml_mapping_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: # <<<<<<<<<<<<<< - * error = self._emitter_error() - * raise error - */ - __pyx_t_3 = yaml_emitter_emit((&__pyx_v_self->emitter), (&__pyx_v_event)); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1512; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_t_10 = (__pyx_t_3 == 0); - if (__pyx_t_10) { - - /* "_yaml.pyx":1513 - * yaml_mapping_end_event_initialize(&event) - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() # <<<<<<<<<<<<<< - * raise error - * self.ascend_resolver() - */ - __pyx_t_7 = ((struct __pyx_vtabstruct_5_yaml_CEmitter *)__pyx_v_self->__pyx_vtab)->_emitter_error(__pyx_v_self); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1513; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __Pyx_DECREF(__pyx_v_error); - __pyx_v_error = __pyx_t_7; - __pyx_t_7 = 0; - - /* "_yaml.pyx":1514 - * if yaml_emitter_emit(&self.emitter, &event) == 0: - * error = self._emitter_error() - * raise error # <<<<<<<<<<<<<< - * self.ascend_resolver() - * return 1 - */ - __Pyx_Raise(__pyx_v_error, 0, 0); - {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1514; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - goto __pyx_L44; - } - __pyx_L44:; - goto __pyx_L10; - } - __pyx_L10:; - - /* "_yaml.pyx":1515 - * error = self._emitter_error() - * raise error - * self.ascend_resolver() # <<<<<<<<<<<<<< - * return 1 - * - */ - __pyx_t_7 = PyObject_GetAttr(((PyObject *)__pyx_v_self), __pyx_n_s__ascend_resolver); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1515; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_7); - __pyx_t_6 = PyObject_Call(__pyx_t_7, ((PyObject *)__pyx_empty_tuple), NULL); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1515; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_6); - __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0; - __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0; - } - __pyx_L7:; - - /* "_yaml.pyx":1516 - * raise error - * self.ascend_resolver() - * return 1 # <<<<<<<<<<<<<< - * - * cdef int output_handler(void *data, char *buffer, int size) except 0: - */ - __pyx_r = 1; - goto __pyx_L0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_XDECREF(__pyx_t_5); - __Pyx_XDECREF(__pyx_t_6); - __Pyx_XDECREF(__pyx_t_7); - __Pyx_XDECREF(__pyx_t_8); - __Pyx_AddTraceback("_yaml.CEmitter._serialize_node"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF(__pyx_v_anchor_object); - __Pyx_DECREF(__pyx_v_error); - __Pyx_DECREF(__pyx_v_node_class); - __Pyx_DECREF(__pyx_v_tag_object); - __Pyx_DECREF(__pyx_v_value_object); - __Pyx_DECREF(__pyx_v_style_object); - __Pyx_DECREF(__pyx_v_item); - __Pyx_DECREF(__pyx_v_item_key); - __Pyx_DECREF(__pyx_v_item_value); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -/* "_yaml.pyx":1518 - * return 1 - * - * cdef int output_handler(void *data, char *buffer, int size) except 0: # <<<<<<<<<<<<<< - * cdef CEmitter emitter - * emitter = data - */ - -static int __pyx_f_5_yaml_output_handler(void *__pyx_v_data, char *__pyx_v_buffer, int __pyx_v_size) { - struct __pyx_obj_5_yaml_CEmitter *__pyx_v_emitter; - PyObject *__pyx_v_value; - int __pyx_r; - int __pyx_t_1; - PyObject *__pyx_t_2 = NULL; - PyObject *__pyx_t_3 = NULL; - PyObject *__pyx_t_4 = NULL; - __Pyx_RefNannySetupContext("output_handler"); - __pyx_v_emitter = ((struct __pyx_obj_5_yaml_CEmitter *)Py_None); __Pyx_INCREF(Py_None); - __pyx_v_value = Py_None; __Pyx_INCREF(Py_None); - - /* "_yaml.pyx":1520 - * cdef int output_handler(void *data, char *buffer, int size) except 0: - * cdef CEmitter emitter - * emitter = data # <<<<<<<<<<<<<< - * if emitter.dump_unicode == 0: - * value = PyString_FromStringAndSize(buffer, size) - */ - __Pyx_INCREF(((PyObject *)((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_data))); - __Pyx_DECREF(((PyObject *)__pyx_v_emitter)); - __pyx_v_emitter = ((struct __pyx_obj_5_yaml_CEmitter *)__pyx_v_data); - - /* "_yaml.pyx":1521 - * cdef CEmitter emitter - * emitter = data - * if emitter.dump_unicode == 0: # <<<<<<<<<<<<<< - * value = PyString_FromStringAndSize(buffer, size) - * else: - */ - __pyx_t_1 = (__pyx_v_emitter->dump_unicode == 0); - if (__pyx_t_1) { - - /* "_yaml.pyx":1522 - * emitter = data - * if emitter.dump_unicode == 0: - * value = PyString_FromStringAndSize(buffer, size) # <<<<<<<<<<<<<< - * else: - * value = PyUnicode_DecodeUTF8(buffer, size, 'strict') - */ - __pyx_t_2 = PyString_FromStringAndSize(__pyx_v_buffer, __pyx_v_size); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1522; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - goto __pyx_L3; - } - /*else*/ { - - /* "_yaml.pyx":1524 - * value = PyString_FromStringAndSize(buffer, size) - * else: - * value = PyUnicode_DecodeUTF8(buffer, size, 'strict') # <<<<<<<<<<<<<< - * emitter.stream.write(value) - * return 1 - */ - __pyx_t_2 = PyUnicode_DecodeUTF8(__pyx_v_buffer, __pyx_v_size, __pyx_k__strict); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1524; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_v_value); - __pyx_v_value = __pyx_t_2; - __pyx_t_2 = 0; - } - __pyx_L3:; - - /* "_yaml.pyx":1525 - * else: - * value = PyUnicode_DecodeUTF8(buffer, size, 'strict') - * emitter.stream.write(value) # <<<<<<<<<<<<<< - * return 1 - * - */ - __pyx_t_2 = PyObject_GetAttr(__pyx_v_emitter->stream, __pyx_n_s__write); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1525; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1525; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_3)); - __Pyx_INCREF(__pyx_v_value); - PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_value); - __Pyx_GIVEREF(__pyx_v_value); - __pyx_t_4 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1525; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_4); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0; - __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0; - - /* "_yaml.pyx":1526 - * value = PyUnicode_DecodeUTF8(buffer, size, 'strict') - * emitter.stream.write(value) - * return 1 # <<<<<<<<<<<<<< - * - */ - __pyx_r = 1; - goto __pyx_L0; - - __pyx_r = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_2); - __Pyx_XDECREF(__pyx_t_3); - __Pyx_XDECREF(__pyx_t_4); - __Pyx_AddTraceback("_yaml.output_handler"); - __pyx_r = 0; - __pyx_L0:; - __Pyx_DECREF((PyObject *)__pyx_v_emitter); - __Pyx_DECREF(__pyx_v_value); - __Pyx_RefNannyFinishContext(); - return __pyx_r; -} - -static PyObject *__pyx_tp_new_5_yaml_Mark(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_5_yaml_Mark *p; - PyObject *o = (*t->tp_alloc)(t, 0); - if (!o) return 0; - p = ((struct __pyx_obj_5_yaml_Mark *)o); - p->name = Py_None; Py_INCREF(Py_None); - p->buffer = Py_None; Py_INCREF(Py_None); - p->pointer = Py_None; Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_5_yaml_Mark(PyObject *o) { - struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o; - Py_XDECREF(p->name); - Py_XDECREF(p->buffer); - Py_XDECREF(p->pointer); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_5_yaml_Mark(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o; - if (p->name) { - e = (*v)(p->name, a); if (e) return e; - } - if (p->buffer) { - e = (*v)(p->buffer, a); if (e) return e; - } - if (p->pointer) { - e = (*v)(p->pointer, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_5_yaml_Mark(PyObject *o) { - struct __pyx_obj_5_yaml_Mark *p = (struct __pyx_obj_5_yaml_Mark *)o; - PyObject* tmp; - tmp = ((PyObject*)p->name); - p->name = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->buffer); - p->buffer = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->pointer); - p->pointer = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyObject *__pyx_getprop_5_yaml_4Mark_name(PyObject *o, void *x) { - return __pyx_pf_5_yaml_4Mark_4name___get__(o); -} - -static PyObject *__pyx_getprop_5_yaml_4Mark_index(PyObject *o, void *x) { - return __pyx_pf_5_yaml_4Mark_5index___get__(o); -} - -static PyObject *__pyx_getprop_5_yaml_4Mark_line(PyObject *o, void *x) { - return __pyx_pf_5_yaml_4Mark_4line___get__(o); -} - -static PyObject *__pyx_getprop_5_yaml_4Mark_column(PyObject *o, void *x) { - return __pyx_pf_5_yaml_4Mark_6column___get__(o); -} - -static PyObject *__pyx_getprop_5_yaml_4Mark_buffer(PyObject *o, void *x) { - return __pyx_pf_5_yaml_4Mark_6buffer___get__(o); -} - -static PyObject *__pyx_getprop_5_yaml_4Mark_pointer(PyObject *o, void *x) { - return __pyx_pf_5_yaml_4Mark_7pointer___get__(o); -} - -static PyMethodDef __pyx_methods_5_yaml_Mark[] = { - {__Pyx_NAMESTR("get_snippet"), (PyCFunction)__pyx_pf_5_yaml_4Mark_1get_snippet, METH_NOARGS, __Pyx_DOCSTR(0)}, - {0, 0, 0, 0} -}; - -static struct PyGetSetDef __pyx_getsets_5_yaml_Mark[] = { - {(char *)"name", __pyx_getprop_5_yaml_4Mark_name, 0, 0, 0}, - {(char *)"index", __pyx_getprop_5_yaml_4Mark_index, 0, 0, 0}, - {(char *)"line", __pyx_getprop_5_yaml_4Mark_line, 0, 0, 0}, - {(char *)"column", __pyx_getprop_5_yaml_4Mark_column, 0, 0, 0}, - {(char *)"buffer", __pyx_getprop_5_yaml_4Mark_buffer, 0, 0, 0}, - {(char *)"pointer", __pyx_getprop_5_yaml_4Mark_pointer, 0, 0, 0}, - {0, 0, 0, 0, 0} -}; - -static PyNumberMethods __pyx_tp_as_number_Mark = { - 0, /*nb_add*/ - 0, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_nonzero*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - 0, /*nb_and*/ - 0, /*nb_xor*/ - 0, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*nb_hex*/ - #endif - 0, /*nb_inplace_add*/ - 0, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - 0, /*nb_inplace_and*/ - 0, /*nb_inplace_xor*/ - 0, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - #if PY_VERSION_HEX >= 0x02050000 - 0, /*nb_index*/ - #endif -}; - -static PySequenceMethods __pyx_tp_as_sequence_Mark = { - 0, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - 0, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - 0, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_Mark = { - 0, /*mp_length*/ - 0, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyBufferProcs __pyx_tp_as_buffer_Mark = { - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getreadbuffer*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getwritebuffer*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getsegcount*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getcharbuffer*/ - #endif - #if PY_VERSION_HEX >= 0x02060000 - 0, /*bf_getbuffer*/ - #endif - #if PY_VERSION_HEX >= 0x02060000 - 0, /*bf_releasebuffer*/ - #endif -}; - -static PyTypeObject __pyx_type_5_yaml_Mark = { - PyVarObject_HEAD_INIT(0, 0) - __Pyx_NAMESTR("_yaml.Mark"), /*tp_name*/ - sizeof(struct __pyx_obj_5_yaml_Mark), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_5_yaml_Mark, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #else - 0, /*reserved*/ - #endif - 0, /*tp_repr*/ - &__pyx_tp_as_number_Mark, /*tp_as_number*/ - &__pyx_tp_as_sequence_Mark, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_Mark, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - __pyx_pf_5_yaml_4Mark_2__str__, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - &__pyx_tp_as_buffer_Mark, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_5_yaml_Mark, /*tp_traverse*/ - __pyx_tp_clear_5_yaml_Mark, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_5_yaml_Mark, /*tp_methods*/ - 0, /*tp_members*/ - __pyx_getsets_5_yaml_Mark, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pf_5_yaml_4Mark___init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_5_yaml_Mark, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - #if PY_VERSION_HEX >= 0x02060000 - 0, /*tp_version_tag*/ - #endif -}; -static struct __pyx_vtabstruct_5_yaml_CParser __pyx_vtable_5_yaml_CParser; - -static PyObject *__pyx_tp_new_5_yaml_CParser(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_5_yaml_CParser *p; - PyObject *o = (*t->tp_alloc)(t, 0); - if (!o) return 0; - p = ((struct __pyx_obj_5_yaml_CParser *)o); - p->__pyx_vtab = __pyx_vtabptr_5_yaml_CParser; - p->stream = Py_None; Py_INCREF(Py_None); - p->stream_name = Py_None; Py_INCREF(Py_None); - p->current_token = Py_None; Py_INCREF(Py_None); - p->current_event = Py_None; Py_INCREF(Py_None); - p->anchors = Py_None; Py_INCREF(Py_None); - p->stream_cache = Py_None; Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_5_yaml_CParser(PyObject *o) { - struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o; - { - PyObject *etype, *eval, *etb; - PyErr_Fetch(&etype, &eval, &etb); - ++Py_REFCNT(o); - __pyx_pf_5_yaml_7CParser_1__dealloc__(o); - if (PyErr_Occurred()) PyErr_WriteUnraisable(o); - --Py_REFCNT(o); - PyErr_Restore(etype, eval, etb); - } - Py_XDECREF(p->stream); - Py_XDECREF(p->stream_name); - Py_XDECREF(p->current_token); - Py_XDECREF(p->current_event); - Py_XDECREF(p->anchors); - Py_XDECREF(p->stream_cache); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_5_yaml_CParser(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o; - if (p->stream) { - e = (*v)(p->stream, a); if (e) return e; - } - if (p->stream_name) { - e = (*v)(p->stream_name, a); if (e) return e; - } - if (p->current_token) { - e = (*v)(p->current_token, a); if (e) return e; - } - if (p->current_event) { - e = (*v)(p->current_event, a); if (e) return e; - } - if (p->anchors) { - e = (*v)(p->anchors, a); if (e) return e; - } - if (p->stream_cache) { - e = (*v)(p->stream_cache, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_5_yaml_CParser(PyObject *o) { - struct __pyx_obj_5_yaml_CParser *p = (struct __pyx_obj_5_yaml_CParser *)o; - PyObject* tmp; - tmp = ((PyObject*)p->stream); - p->stream = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->stream_name); - p->stream_name = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->current_token); - p->current_token = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->current_event); - p->current_event = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->anchors); - p->anchors = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->stream_cache); - p->stream_cache = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyMethodDef __pyx_methods_5_yaml_CParser[] = { - {__Pyx_NAMESTR("dispose"), (PyCFunction)__pyx_pf_5_yaml_7CParser_2dispose, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("raw_scan"), (PyCFunction)__pyx_pf_5_yaml_7CParser_3raw_scan, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("get_token"), (PyCFunction)__pyx_pf_5_yaml_7CParser_4get_token, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("peek_token"), (PyCFunction)__pyx_pf_5_yaml_7CParser_5peek_token, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("check_token"), (PyCFunction)__pyx_pf_5_yaml_7CParser_6check_token, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("raw_parse"), (PyCFunction)__pyx_pf_5_yaml_7CParser_7raw_parse, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("get_event"), (PyCFunction)__pyx_pf_5_yaml_7CParser_8get_event, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("peek_event"), (PyCFunction)__pyx_pf_5_yaml_7CParser_9peek_event, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("check_event"), (PyCFunction)__pyx_pf_5_yaml_7CParser_10check_event, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("check_node"), (PyCFunction)__pyx_pf_5_yaml_7CParser_11check_node, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("get_node"), (PyCFunction)__pyx_pf_5_yaml_7CParser_12get_node, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("get_single_node"), (PyCFunction)__pyx_pf_5_yaml_7CParser_13get_single_node, METH_NOARGS, __Pyx_DOCSTR(0)}, - {0, 0, 0, 0} -}; - -static PyNumberMethods __pyx_tp_as_number_CParser = { - 0, /*nb_add*/ - 0, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_nonzero*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - 0, /*nb_and*/ - 0, /*nb_xor*/ - 0, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*nb_hex*/ - #endif - 0, /*nb_inplace_add*/ - 0, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - 0, /*nb_inplace_and*/ - 0, /*nb_inplace_xor*/ - 0, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - #if PY_VERSION_HEX >= 0x02050000 - 0, /*nb_index*/ - #endif -}; - -static PySequenceMethods __pyx_tp_as_sequence_CParser = { - 0, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - 0, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - 0, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_CParser = { - 0, /*mp_length*/ - 0, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyBufferProcs __pyx_tp_as_buffer_CParser = { - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getreadbuffer*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getwritebuffer*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getsegcount*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getcharbuffer*/ - #endif - #if PY_VERSION_HEX >= 0x02060000 - 0, /*bf_getbuffer*/ - #endif - #if PY_VERSION_HEX >= 0x02060000 - 0, /*bf_releasebuffer*/ - #endif -}; - -static PyTypeObject __pyx_type_5_yaml_CParser = { - PyVarObject_HEAD_INIT(0, 0) - __Pyx_NAMESTR("_yaml.CParser"), /*tp_name*/ - sizeof(struct __pyx_obj_5_yaml_CParser), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_5_yaml_CParser, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #else - 0, /*reserved*/ - #endif - 0, /*tp_repr*/ - &__pyx_tp_as_number_CParser, /*tp_as_number*/ - &__pyx_tp_as_sequence_CParser, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_CParser, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - &__pyx_tp_as_buffer_CParser, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_5_yaml_CParser, /*tp_traverse*/ - __pyx_tp_clear_5_yaml_CParser, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_5_yaml_CParser, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pf_5_yaml_7CParser___init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_5_yaml_CParser, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - #if PY_VERSION_HEX >= 0x02060000 - 0, /*tp_version_tag*/ - #endif -}; -static struct __pyx_vtabstruct_5_yaml_CEmitter __pyx_vtable_5_yaml_CEmitter; - -static PyObject *__pyx_tp_new_5_yaml_CEmitter(PyTypeObject *t, PyObject *a, PyObject *k) { - struct __pyx_obj_5_yaml_CEmitter *p; - PyObject *o = (*t->tp_alloc)(t, 0); - if (!o) return 0; - p = ((struct __pyx_obj_5_yaml_CEmitter *)o); - p->__pyx_vtab = __pyx_vtabptr_5_yaml_CEmitter; - p->stream = Py_None; Py_INCREF(Py_None); - p->use_version = Py_None; Py_INCREF(Py_None); - p->use_tags = Py_None; Py_INCREF(Py_None); - p->serialized_nodes = Py_None; Py_INCREF(Py_None); - p->anchors = Py_None; Py_INCREF(Py_None); - p->use_encoding = Py_None; Py_INCREF(Py_None); - return o; -} - -static void __pyx_tp_dealloc_5_yaml_CEmitter(PyObject *o) { - struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o; - { - PyObject *etype, *eval, *etb; - PyErr_Fetch(&etype, &eval, &etb); - ++Py_REFCNT(o); - __pyx_pf_5_yaml_8CEmitter_1__dealloc__(o); - if (PyErr_Occurred()) PyErr_WriteUnraisable(o); - --Py_REFCNT(o); - PyErr_Restore(etype, eval, etb); - } - Py_XDECREF(p->stream); - Py_XDECREF(p->use_version); - Py_XDECREF(p->use_tags); - Py_XDECREF(p->serialized_nodes); - Py_XDECREF(p->anchors); - Py_XDECREF(p->use_encoding); - (*Py_TYPE(o)->tp_free)(o); -} - -static int __pyx_tp_traverse_5_yaml_CEmitter(PyObject *o, visitproc v, void *a) { - int e; - struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o; - if (p->stream) { - e = (*v)(p->stream, a); if (e) return e; - } - if (p->use_version) { - e = (*v)(p->use_version, a); if (e) return e; - } - if (p->use_tags) { - e = (*v)(p->use_tags, a); if (e) return e; - } - if (p->serialized_nodes) { - e = (*v)(p->serialized_nodes, a); if (e) return e; - } - if (p->anchors) { - e = (*v)(p->anchors, a); if (e) return e; - } - if (p->use_encoding) { - e = (*v)(p->use_encoding, a); if (e) return e; - } - return 0; -} - -static int __pyx_tp_clear_5_yaml_CEmitter(PyObject *o) { - struct __pyx_obj_5_yaml_CEmitter *p = (struct __pyx_obj_5_yaml_CEmitter *)o; - PyObject* tmp; - tmp = ((PyObject*)p->stream); - p->stream = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->use_version); - p->use_version = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->use_tags); - p->use_tags = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->serialized_nodes); - p->serialized_nodes = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->anchors); - p->anchors = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - tmp = ((PyObject*)p->use_encoding); - p->use_encoding = Py_None; Py_INCREF(Py_None); - Py_XDECREF(tmp); - return 0; -} - -static PyMethodDef __pyx_methods_5_yaml_CEmitter[] = { - {__Pyx_NAMESTR("dispose"), (PyCFunction)__pyx_pf_5_yaml_8CEmitter_2dispose, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("emit"), (PyCFunction)__pyx_pf_5_yaml_8CEmitter_3emit, METH_O, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("open"), (PyCFunction)__pyx_pf_5_yaml_8CEmitter_4open, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("close"), (PyCFunction)__pyx_pf_5_yaml_8CEmitter_5close, METH_NOARGS, __Pyx_DOCSTR(0)}, - {__Pyx_NAMESTR("serialize"), (PyCFunction)__pyx_pf_5_yaml_8CEmitter_6serialize, METH_O, __Pyx_DOCSTR(0)}, - {0, 0, 0, 0} -}; - -static PyNumberMethods __pyx_tp_as_number_CEmitter = { - 0, /*nb_add*/ - 0, /*nb_subtract*/ - 0, /*nb_multiply*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_divide*/ - #endif - 0, /*nb_remainder*/ - 0, /*nb_divmod*/ - 0, /*nb_power*/ - 0, /*nb_negative*/ - 0, /*nb_positive*/ - 0, /*nb_absolute*/ - 0, /*nb_nonzero*/ - 0, /*nb_invert*/ - 0, /*nb_lshift*/ - 0, /*nb_rshift*/ - 0, /*nb_and*/ - 0, /*nb_xor*/ - 0, /*nb_or*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_coerce*/ - #endif - 0, /*nb_int*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_long*/ - #else - 0, /*reserved*/ - #endif - 0, /*nb_float*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_oct*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*nb_hex*/ - #endif - 0, /*nb_inplace_add*/ - 0, /*nb_inplace_subtract*/ - 0, /*nb_inplace_multiply*/ - #if PY_MAJOR_VERSION < 3 - 0, /*nb_inplace_divide*/ - #endif - 0, /*nb_inplace_remainder*/ - 0, /*nb_inplace_power*/ - 0, /*nb_inplace_lshift*/ - 0, /*nb_inplace_rshift*/ - 0, /*nb_inplace_and*/ - 0, /*nb_inplace_xor*/ - 0, /*nb_inplace_or*/ - 0, /*nb_floor_divide*/ - 0, /*nb_true_divide*/ - 0, /*nb_inplace_floor_divide*/ - 0, /*nb_inplace_true_divide*/ - #if PY_VERSION_HEX >= 0x02050000 - 0, /*nb_index*/ - #endif -}; - -static PySequenceMethods __pyx_tp_as_sequence_CEmitter = { - 0, /*sq_length*/ - 0, /*sq_concat*/ - 0, /*sq_repeat*/ - 0, /*sq_item*/ - 0, /*sq_slice*/ - 0, /*sq_ass_item*/ - 0, /*sq_ass_slice*/ - 0, /*sq_contains*/ - 0, /*sq_inplace_concat*/ - 0, /*sq_inplace_repeat*/ -}; - -static PyMappingMethods __pyx_tp_as_mapping_CEmitter = { - 0, /*mp_length*/ - 0, /*mp_subscript*/ - 0, /*mp_ass_subscript*/ -}; - -static PyBufferProcs __pyx_tp_as_buffer_CEmitter = { - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getreadbuffer*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getwritebuffer*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getsegcount*/ - #endif - #if PY_MAJOR_VERSION < 3 - 0, /*bf_getcharbuffer*/ - #endif - #if PY_VERSION_HEX >= 0x02060000 - 0, /*bf_getbuffer*/ - #endif - #if PY_VERSION_HEX >= 0x02060000 - 0, /*bf_releasebuffer*/ - #endif -}; - -static PyTypeObject __pyx_type_5_yaml_CEmitter = { - PyVarObject_HEAD_INIT(0, 0) - __Pyx_NAMESTR("_yaml.CEmitter"), /*tp_name*/ - sizeof(struct __pyx_obj_5_yaml_CEmitter), /*tp_basicsize*/ - 0, /*tp_itemsize*/ - __pyx_tp_dealloc_5_yaml_CEmitter, /*tp_dealloc*/ - 0, /*tp_print*/ - 0, /*tp_getattr*/ - 0, /*tp_setattr*/ - #if PY_MAJOR_VERSION < 3 - 0, /*tp_compare*/ - #else - 0, /*reserved*/ - #endif - 0, /*tp_repr*/ - &__pyx_tp_as_number_CEmitter, /*tp_as_number*/ - &__pyx_tp_as_sequence_CEmitter, /*tp_as_sequence*/ - &__pyx_tp_as_mapping_CEmitter, /*tp_as_mapping*/ - 0, /*tp_hash*/ - 0, /*tp_call*/ - 0, /*tp_str*/ - 0, /*tp_getattro*/ - 0, /*tp_setattro*/ - &__pyx_tp_as_buffer_CEmitter, /*tp_as_buffer*/ - Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE|Py_TPFLAGS_HAVE_GC, /*tp_flags*/ - 0, /*tp_doc*/ - __pyx_tp_traverse_5_yaml_CEmitter, /*tp_traverse*/ - __pyx_tp_clear_5_yaml_CEmitter, /*tp_clear*/ - 0, /*tp_richcompare*/ - 0, /*tp_weaklistoffset*/ - 0, /*tp_iter*/ - 0, /*tp_iternext*/ - __pyx_methods_5_yaml_CEmitter, /*tp_methods*/ - 0, /*tp_members*/ - 0, /*tp_getset*/ - 0, /*tp_base*/ - 0, /*tp_dict*/ - 0, /*tp_descr_get*/ - 0, /*tp_descr_set*/ - 0, /*tp_dictoffset*/ - __pyx_pf_5_yaml_8CEmitter___init__, /*tp_init*/ - 0, /*tp_alloc*/ - __pyx_tp_new_5_yaml_CEmitter, /*tp_new*/ - 0, /*tp_free*/ - 0, /*tp_is_gc*/ - 0, /*tp_bases*/ - 0, /*tp_mro*/ - 0, /*tp_cache*/ - 0, /*tp_subclasses*/ - 0, /*tp_weaklist*/ - 0, /*tp_del*/ - #if PY_VERSION_HEX >= 0x02060000 - 0, /*tp_version_tag*/ - #endif -}; - -static PyMethodDef __pyx_methods[] = { - {0, 0, 0, 0} -}; - -#if PY_MAJOR_VERSION >= 3 -static struct PyModuleDef __pyx_moduledef = { - PyModuleDef_HEAD_INIT, - __Pyx_NAMESTR("_yaml"), - 0, /* m_doc */ - -1, /* m_size */ - __pyx_methods /* m_methods */, - NULL, /* m_reload */ - NULL, /* m_traverse */ - NULL, /* m_clear */ - NULL /* m_free */ -}; -#endif - -static __Pyx_StringTabEntry __pyx_string_tab[] = { - {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0}, - {&__pyx_kp_u_12, __pyx_k_12, sizeof(__pyx_k_12), 0, 1, 0, 0}, - {&__pyx_kp_s_13, __pyx_k_13, sizeof(__pyx_k_13), 0, 0, 1, 0}, - {&__pyx_kp_u_13, __pyx_k_13, sizeof(__pyx_k_13), 0, 1, 0, 0}, - {&__pyx_kp_s_14, __pyx_k_14, sizeof(__pyx_k_14), 0, 0, 1, 0}, - {&__pyx_kp_u_14, __pyx_k_14, sizeof(__pyx_k_14), 0, 1, 0, 0}, - {&__pyx_n_s_15, __pyx_k_15, sizeof(__pyx_k_15), 0, 0, 1, 1}, - {&__pyx_n_s_16, __pyx_k_16, sizeof(__pyx_k_16), 0, 0, 1, 1}, - {&__pyx_n_s_17, __pyx_k_17, sizeof(__pyx_k_17), 0, 0, 1, 1}, - {&__pyx_n_s_18, __pyx_k_18, sizeof(__pyx_k_18), 0, 0, 1, 1}, - {&__pyx_n_s_19, __pyx_k_19, sizeof(__pyx_k_19), 0, 0, 1, 1}, - {&__pyx_kp_s_2, __pyx_k_2, sizeof(__pyx_k_2), 0, 0, 1, 0}, - {&__pyx_kp_u_2, __pyx_k_2, sizeof(__pyx_k_2), 0, 1, 0, 0}, - {&__pyx_kp_u_20, __pyx_k_20, sizeof(__pyx_k_20), 0, 1, 0, 0}, - {&__pyx_kp_s_21, __pyx_k_21, sizeof(__pyx_k_21), 0, 0, 1, 0}, - {&__pyx_kp_u_21, __pyx_k_21, sizeof(__pyx_k_21), 0, 1, 0, 0}, - {&__pyx_kp_s_22, __pyx_k_22, sizeof(__pyx_k_22), 0, 0, 1, 0}, - {&__pyx_kp_u_22, __pyx_k_22, sizeof(__pyx_k_22), 0, 1, 0, 0}, - {&__pyx_kp_s_23, __pyx_k_23, sizeof(__pyx_k_23), 0, 0, 1, 0}, - {&__pyx_kp_u_23, __pyx_k_23, sizeof(__pyx_k_23), 0, 1, 0, 0}, - {&__pyx_kp_s_24, __pyx_k_24, sizeof(__pyx_k_24), 0, 0, 1, 0}, - {&__pyx_kp_u_24, __pyx_k_24, sizeof(__pyx_k_24), 0, 1, 0, 0}, - {&__pyx_kp_s_25, __pyx_k_25, sizeof(__pyx_k_25), 0, 0, 1, 0}, - {&__pyx_kp_u_25, __pyx_k_25, sizeof(__pyx_k_25), 0, 1, 0, 0}, - {&__pyx_kp_s_28, __pyx_k_28, sizeof(__pyx_k_28), 0, 0, 1, 0}, - {&__pyx_kp_u_28, __pyx_k_28, sizeof(__pyx_k_28), 0, 1, 0, 0}, - {&__pyx_kp_s_3, __pyx_k_3, sizeof(__pyx_k_3), 0, 0, 1, 0}, - {&__pyx_kp_u_3, __pyx_k_3, sizeof(__pyx_k_3), 0, 1, 0, 0}, - {&__pyx_kp_s_31, __pyx_k_31, sizeof(__pyx_k_31), 0, 0, 1, 0}, - {&__pyx_kp_u_31, __pyx_k_31, sizeof(__pyx_k_31), 0, 1, 0, 0}, - {&__pyx_kp_s_32, __pyx_k_32, sizeof(__pyx_k_32), 0, 0, 1, 0}, - {&__pyx_kp_u_32, __pyx_k_32, sizeof(__pyx_k_32), 0, 1, 0, 0}, - {&__pyx_kp_s_33, __pyx_k_33, sizeof(__pyx_k_33), 0, 0, 1, 0}, - {&__pyx_kp_u_33, __pyx_k_33, sizeof(__pyx_k_33), 0, 1, 0, 0}, - {&__pyx_kp_s_34, __pyx_k_34, sizeof(__pyx_k_34), 0, 0, 1, 0}, - {&__pyx_kp_u_34, __pyx_k_34, sizeof(__pyx_k_34), 0, 1, 0, 0}, - {&__pyx_kp_s_35, __pyx_k_35, sizeof(__pyx_k_35), 0, 0, 1, 0}, - {&__pyx_kp_u_35, __pyx_k_35, sizeof(__pyx_k_35), 0, 1, 0, 0}, - {&__pyx_n_s_36, __pyx_k_36, sizeof(__pyx_k_36), 0, 0, 1, 1}, - {&__pyx_n_s_37, __pyx_k_37, sizeof(__pyx_k_37), 0, 0, 1, 1}, - {&__pyx_n_s_38, __pyx_k_38, sizeof(__pyx_k_38), 0, 0, 1, 1}, - {&__pyx_kp_s_39, __pyx_k_39, sizeof(__pyx_k_39), 0, 0, 1, 0}, - {&__pyx_kp_u_39, __pyx_k_39, sizeof(__pyx_k_39), 0, 1, 0, 0}, - {&__pyx_kp_s_4, __pyx_k_4, sizeof(__pyx_k_4), 0, 0, 1, 0}, - {&__pyx_kp_u_4, __pyx_k_4, sizeof(__pyx_k_4), 0, 1, 0, 0}, - {&__pyx_kp_s_42, __pyx_k_42, sizeof(__pyx_k_42), 0, 0, 1, 0}, - {&__pyx_kp_s_43, __pyx_k_43, sizeof(__pyx_k_43), 0, 0, 1, 0}, - {&__pyx_kp_s_44, __pyx_k_44, sizeof(__pyx_k_44), 0, 0, 1, 0}, - {&__pyx_n_s_45, __pyx_k_45, sizeof(__pyx_k_45), 0, 0, 1, 1}, - {&__pyx_n_s_46, __pyx_k_46, sizeof(__pyx_k_46), 0, 0, 1, 1}, - {&__pyx_kp_s_47, __pyx_k_47, sizeof(__pyx_k_47), 0, 0, 1, 0}, - {&__pyx_kp_u_47, __pyx_k_47, sizeof(__pyx_k_47), 0, 1, 0, 0}, - {&__pyx_kp_s_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 0, 1, 0}, - {&__pyx_kp_u_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 1, 0, 0}, - {&__pyx_kp_s_50, __pyx_k_50, sizeof(__pyx_k_50), 0, 0, 1, 0}, - {&__pyx_kp_u_50, __pyx_k_50, sizeof(__pyx_k_50), 0, 1, 0, 0}, - {&__pyx_kp_s_53, __pyx_k_53, sizeof(__pyx_k_53), 0, 0, 1, 0}, - {&__pyx_kp_u_53, __pyx_k_53, sizeof(__pyx_k_53), 0, 1, 0, 0}, - {&__pyx_kp_s_56, __pyx_k_56, sizeof(__pyx_k_56), 0, 0, 1, 0}, - {&__pyx_kp_u_56, __pyx_k_56, sizeof(__pyx_k_56), 0, 1, 0, 0}, - {&__pyx_kp_s_59, __pyx_k_59, sizeof(__pyx_k_59), 0, 0, 1, 0}, - {&__pyx_kp_u_59, __pyx_k_59, sizeof(__pyx_k_59), 0, 1, 0, 0}, - {&__pyx_kp_s_64, __pyx_k_64, sizeof(__pyx_k_64), 0, 0, 1, 0}, - {&__pyx_kp_u_64, __pyx_k_64, sizeof(__pyx_k_64), 0, 1, 0, 0}, - {&__pyx_kp_s_67, __pyx_k_67, sizeof(__pyx_k_67), 0, 0, 1, 0}, - {&__pyx_kp_u_67, __pyx_k_67, sizeof(__pyx_k_67), 0, 1, 0, 0}, - {&__pyx_kp_s_78, __pyx_k_78, sizeof(__pyx_k_78), 0, 0, 1, 0}, - {&__pyx_kp_u_78, __pyx_k_78, sizeof(__pyx_k_78), 0, 1, 0, 0}, - {&__pyx_kp_s_79, __pyx_k_79, sizeof(__pyx_k_79), 0, 0, 1, 0}, - {&__pyx_kp_u_79, __pyx_k_79, sizeof(__pyx_k_79), 0, 1, 0, 0}, - {&__pyx_kp_s_8, __pyx_k_8, sizeof(__pyx_k_8), 0, 0, 1, 0}, - {&__pyx_kp_u_8, __pyx_k_8, sizeof(__pyx_k_8), 0, 1, 0, 0}, - {&__pyx_kp_s_82, __pyx_k_82, sizeof(__pyx_k_82), 0, 0, 1, 0}, - {&__pyx_kp_u_82, __pyx_k_82, sizeof(__pyx_k_82), 0, 1, 0, 0}, - {&__pyx_kp_s_85, __pyx_k_85, sizeof(__pyx_k_85), 0, 0, 1, 0}, - {&__pyx_kp_u_85, __pyx_k_85, sizeof(__pyx_k_85), 0, 1, 0, 0}, - {&__pyx_kp_s_9, __pyx_k_9, sizeof(__pyx_k_9), 0, 0, 1, 0}, - {&__pyx_kp_u_9, __pyx_k_9, sizeof(__pyx_k_9), 0, 1, 0, 0}, - {&__pyx_kp_u_98, __pyx_k_98, sizeof(__pyx_k_98), 0, 1, 0, 0}, - {&__pyx_n_s__AliasEvent, __pyx_k__AliasEvent, sizeof(__pyx_k__AliasEvent), 0, 0, 1, 1}, - {&__pyx_n_s__AliasToken, __pyx_k__AliasToken, sizeof(__pyx_k__AliasToken), 0, 0, 1, 1}, - {&__pyx_n_s__AnchorToken, __pyx_k__AnchorToken, sizeof(__pyx_k__AnchorToken), 0, 0, 1, 1}, - {&__pyx_n_s__AttributeError, __pyx_k__AttributeError, sizeof(__pyx_k__AttributeError), 0, 0, 1, 1}, - {&__pyx_n_s__BlockEndToken, __pyx_k__BlockEndToken, sizeof(__pyx_k__BlockEndToken), 0, 0, 1, 1}, - {&__pyx_n_s__BlockEntryToken, __pyx_k__BlockEntryToken, sizeof(__pyx_k__BlockEntryToken), 0, 0, 1, 1}, - {&__pyx_n_s__ComposerError, __pyx_k__ComposerError, sizeof(__pyx_k__ComposerError), 0, 0, 1, 1}, - {&__pyx_n_s__ConstructorError, __pyx_k__ConstructorError, sizeof(__pyx_k__ConstructorError), 0, 0, 1, 1}, - {&__pyx_n_s__DirectiveToken, __pyx_k__DirectiveToken, sizeof(__pyx_k__DirectiveToken), 0, 0, 1, 1}, - {&__pyx_n_s__DocumentEndEvent, __pyx_k__DocumentEndEvent, sizeof(__pyx_k__DocumentEndEvent), 0, 0, 1, 1}, - {&__pyx_n_s__DocumentEndToken, __pyx_k__DocumentEndToken, sizeof(__pyx_k__DocumentEndToken), 0, 0, 1, 1}, - {&__pyx_n_s__DocumentStartEvent, __pyx_k__DocumentStartEvent, sizeof(__pyx_k__DocumentStartEvent), 0, 0, 1, 1}, - {&__pyx_n_s__DocumentStartToken, __pyx_k__DocumentStartToken, sizeof(__pyx_k__DocumentStartToken), 0, 0, 1, 1}, - {&__pyx_n_s__EmitterError, __pyx_k__EmitterError, sizeof(__pyx_k__EmitterError), 0, 0, 1, 1}, - {&__pyx_n_s__FlowEntryToken, __pyx_k__FlowEntryToken, sizeof(__pyx_k__FlowEntryToken), 0, 0, 1, 1}, - {&__pyx_n_s__FlowMappingEndToken, __pyx_k__FlowMappingEndToken, sizeof(__pyx_k__FlowMappingEndToken), 0, 0, 1, 1}, - {&__pyx_n_s__KeyToken, __pyx_k__KeyToken, sizeof(__pyx_k__KeyToken), 0, 0, 1, 1}, - {&__pyx_n_s__MappingEndEvent, __pyx_k__MappingEndEvent, sizeof(__pyx_k__MappingEndEvent), 0, 0, 1, 1}, - {&__pyx_n_s__MappingNode, __pyx_k__MappingNode, sizeof(__pyx_k__MappingNode), 0, 0, 1, 1}, - {&__pyx_n_s__MappingStartEvent, __pyx_k__MappingStartEvent, sizeof(__pyx_k__MappingStartEvent), 0, 0, 1, 1}, - {&__pyx_n_s__MemoryError, __pyx_k__MemoryError, sizeof(__pyx_k__MemoryError), 0, 0, 1, 1}, - {&__pyx_n_s__ParserError, __pyx_k__ParserError, sizeof(__pyx_k__ParserError), 0, 0, 1, 1}, - {&__pyx_n_s__ReaderError, __pyx_k__ReaderError, sizeof(__pyx_k__ReaderError), 0, 0, 1, 1}, - {&__pyx_n_s__RepresenterError, __pyx_k__RepresenterError, sizeof(__pyx_k__RepresenterError), 0, 0, 1, 1}, - {&__pyx_n_s__ScalarEvent, __pyx_k__ScalarEvent, sizeof(__pyx_k__ScalarEvent), 0, 0, 1, 1}, - {&__pyx_n_s__ScalarNode, __pyx_k__ScalarNode, sizeof(__pyx_k__ScalarNode), 0, 0, 1, 1}, - {&__pyx_n_s__ScalarToken, __pyx_k__ScalarToken, sizeof(__pyx_k__ScalarToken), 0, 0, 1, 1}, - {&__pyx_n_s__ScannerError, __pyx_k__ScannerError, sizeof(__pyx_k__ScannerError), 0, 0, 1, 1}, - {&__pyx_n_s__SequenceEndEvent, __pyx_k__SequenceEndEvent, sizeof(__pyx_k__SequenceEndEvent), 0, 0, 1, 1}, - {&__pyx_n_s__SequenceNode, __pyx_k__SequenceNode, sizeof(__pyx_k__SequenceNode), 0, 0, 1, 1}, - {&__pyx_n_s__SequenceStartEvent, __pyx_k__SequenceStartEvent, sizeof(__pyx_k__SequenceStartEvent), 0, 0, 1, 1}, - {&__pyx_n_s__SerializerError, __pyx_k__SerializerError, sizeof(__pyx_k__SerializerError), 0, 0, 1, 1}, - {&__pyx_n_s__StreamEndEvent, __pyx_k__StreamEndEvent, sizeof(__pyx_k__StreamEndEvent), 0, 0, 1, 1}, - {&__pyx_n_s__StreamEndToken, __pyx_k__StreamEndToken, sizeof(__pyx_k__StreamEndToken), 0, 0, 1, 1}, - {&__pyx_n_s__StreamStartEvent, __pyx_k__StreamStartEvent, sizeof(__pyx_k__StreamStartEvent), 0, 0, 1, 1}, - {&__pyx_n_s__StreamStartToken, __pyx_k__StreamStartToken, sizeof(__pyx_k__StreamStartToken), 0, 0, 1, 1}, - {&__pyx_n_u__TAG, __pyx_k__TAG, sizeof(__pyx_k__TAG), 0, 1, 0, 1}, - {&__pyx_n_s__TagToken, __pyx_k__TagToken, sizeof(__pyx_k__TagToken), 0, 0, 1, 1}, - {&__pyx_n_s__TypeError, __pyx_k__TypeError, sizeof(__pyx_k__TypeError), 0, 0, 1, 1}, - {&__pyx_n_s__ValueError, __pyx_k__ValueError, sizeof(__pyx_k__ValueError), 0, 0, 1, 1}, - {&__pyx_n_s__ValueToken, __pyx_k__ValueToken, sizeof(__pyx_k__ValueToken), 0, 0, 1, 1}, - {&__pyx_n_u__YAML, __pyx_k__YAML, sizeof(__pyx_k__YAML), 0, 1, 0, 1}, - {&__pyx_n_s__YAMLError, __pyx_k__YAMLError, sizeof(__pyx_k__YAMLError), 0, 0, 1, 1}, - {&__pyx_n_s____class__, __pyx_k____class__, sizeof(__pyx_k____class__), 0, 0, 1, 1}, - {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1}, - {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1}, - {&__pyx_n_s___anchor_node, __pyx_k___anchor_node, sizeof(__pyx_k___anchor_node), 0, 0, 1, 1}, - {&__pyx_n_s___compose_document, __pyx_k___compose_document, sizeof(__pyx_k___compose_document), 0, 0, 1, 1}, - {&__pyx_n_s___compose_node, __pyx_k___compose_node, sizeof(__pyx_k___compose_node), 0, 0, 1, 1}, - {&__pyx_n_s___emitter_error, __pyx_k___emitter_error, sizeof(__pyx_k___emitter_error), 0, 0, 1, 1}, - {&__pyx_n_s___event_to_object, __pyx_k___event_to_object, sizeof(__pyx_k___event_to_object), 0, 0, 1, 1}, - {&__pyx_n_s___object_to_event, __pyx_k___object_to_event, sizeof(__pyx_k___object_to_event), 0, 0, 1, 1}, - {&__pyx_n_s___parse, __pyx_k___parse, sizeof(__pyx_k___parse), 0, 0, 1, 1}, - {&__pyx_n_s___parse_next_event, __pyx_k___parse_next_event, sizeof(__pyx_k___parse_next_event), 0, 0, 1, 1}, - {&__pyx_n_s___parser_error, __pyx_k___parser_error, sizeof(__pyx_k___parser_error), 0, 0, 1, 1}, - {&__pyx_n_s___scan, __pyx_k___scan, sizeof(__pyx_k___scan), 0, 0, 1, 1}, - {&__pyx_n_s___serialize_node, __pyx_k___serialize_node, sizeof(__pyx_k___serialize_node), 0, 0, 1, 1}, - {&__pyx_n_s___token_to_object, __pyx_k___token_to_object, sizeof(__pyx_k___token_to_object), 0, 0, 1, 1}, - {&__pyx_n_s___yaml, __pyx_k___yaml, sizeof(__pyx_k___yaml), 0, 0, 1, 1}, - {&__pyx_n_s__alias, __pyx_k__alias, sizeof(__pyx_k__alias), 0, 0, 1, 1}, - {&__pyx_n_s__allow_unicode, __pyx_k__allow_unicode, sizeof(__pyx_k__allow_unicode), 0, 0, 1, 1}, - {&__pyx_n_s__anchor, __pyx_k__anchor, sizeof(__pyx_k__anchor), 0, 0, 1, 1}, - {&__pyx_n_s__anchors, __pyx_k__anchors, sizeof(__pyx_k__anchors), 0, 0, 1, 1}, - {&__pyx_n_s__append, __pyx_k__append, sizeof(__pyx_k__append), 0, 0, 1, 1}, - {&__pyx_n_s__ascend_resolver, __pyx_k__ascend_resolver, sizeof(__pyx_k__ascend_resolver), 0, 0, 1, 1}, - {&__pyx_n_s__buffer, __pyx_k__buffer, sizeof(__pyx_k__buffer), 0, 0, 1, 1}, - {&__pyx_n_s__canonical, __pyx_k__canonical, sizeof(__pyx_k__canonical), 0, 0, 1, 1}, - {&__pyx_n_s__closed, __pyx_k__closed, sizeof(__pyx_k__closed), 0, 0, 1, 1}, - {&__pyx_n_s__column, __pyx_k__column, sizeof(__pyx_k__column), 0, 0, 1, 1}, - {&__pyx_n_s__composer, __pyx_k__composer, sizeof(__pyx_k__composer), 0, 0, 1, 1}, - {&__pyx_n_s__constructor, __pyx_k__constructor, sizeof(__pyx_k__constructor), 0, 0, 1, 1}, - {&__pyx_n_s__context, __pyx_k__context, sizeof(__pyx_k__context), 0, 0, 1, 1}, - {&__pyx_n_s__context_mark, __pyx_k__context_mark, sizeof(__pyx_k__context_mark), 0, 0, 1, 1}, - {&__pyx_n_s__current_event, __pyx_k__current_event, sizeof(__pyx_k__current_event), 0, 0, 1, 1}, - {&__pyx_n_s__current_token, __pyx_k__current_token, sizeof(__pyx_k__current_token), 0, 0, 1, 1}, - {&__pyx_n_s__data, __pyx_k__data, sizeof(__pyx_k__data), 0, 0, 1, 1}, - {&__pyx_n_s__descend_resolver, __pyx_k__descend_resolver, sizeof(__pyx_k__descend_resolver), 0, 0, 1, 1}, - {&__pyx_n_s__document_end, __pyx_k__document_end, sizeof(__pyx_k__document_end), 0, 0, 1, 1}, - {&__pyx_n_s__document_start, __pyx_k__document_start, sizeof(__pyx_k__document_start), 0, 0, 1, 1}, - {&__pyx_n_s__dump_unicode, __pyx_k__dump_unicode, sizeof(__pyx_k__dump_unicode), 0, 0, 1, 1}, - {&__pyx_n_s__emitter, __pyx_k__emitter, sizeof(__pyx_k__emitter), 0, 0, 1, 1}, - {&__pyx_n_s__encoding, __pyx_k__encoding, sizeof(__pyx_k__encoding), 0, 0, 1, 1}, - {&__pyx_n_u__encoding, __pyx_k__encoding, sizeof(__pyx_k__encoding), 0, 1, 0, 1}, - {&__pyx_n_s__end, __pyx_k__end, sizeof(__pyx_k__end), 0, 0, 1, 1}, - {&__pyx_n_s__end_mark, __pyx_k__end_mark, sizeof(__pyx_k__end_mark), 0, 0, 1, 1}, - {&__pyx_n_s__error, __pyx_k__error, sizeof(__pyx_k__error), 0, 0, 1, 1}, - {&__pyx_n_s__events, __pyx_k__events, sizeof(__pyx_k__events), 0, 0, 1, 1}, - {&__pyx_n_s__explicit, __pyx_k__explicit, sizeof(__pyx_k__explicit), 0, 0, 1, 1}, - {&__pyx_n_s__explicit_end, __pyx_k__explicit_end, sizeof(__pyx_k__explicit_end), 0, 0, 1, 1}, - {&__pyx_n_s__explicit_start, __pyx_k__explicit_start, sizeof(__pyx_k__explicit_start), 0, 0, 1, 1}, - {&__pyx_n_s__flow_style, __pyx_k__flow_style, sizeof(__pyx_k__flow_style), 0, 0, 1, 1}, - {&__pyx_n_s__get_version, __pyx_k__get_version, sizeof(__pyx_k__get_version), 0, 0, 1, 1}, - {&__pyx_n_s__get_version_string, __pyx_k__get_version_string, sizeof(__pyx_k__get_version_string), 0, 0, 1, 1}, - {&__pyx_n_s__handle, __pyx_k__handle, sizeof(__pyx_k__handle), 0, 0, 1, 1}, - {&__pyx_n_s__implicit, __pyx_k__implicit, sizeof(__pyx_k__implicit), 0, 0, 1, 1}, - {&__pyx_n_s__indent, __pyx_k__indent, sizeof(__pyx_k__indent), 0, 0, 1, 1}, - {&__pyx_n_s__index, __pyx_k__index, sizeof(__pyx_k__index), 0, 0, 1, 1}, - {&__pyx_n_s__last_alias_id, __pyx_k__last_alias_id, sizeof(__pyx_k__last_alias_id), 0, 0, 1, 1}, - {&__pyx_n_s__length, __pyx_k__length, sizeof(__pyx_k__length), 0, 0, 1, 1}, - {&__pyx_n_s__line, __pyx_k__line, sizeof(__pyx_k__line), 0, 0, 1, 1}, - {&__pyx_n_s__line_break, __pyx_k__line_break, sizeof(__pyx_k__line_break), 0, 0, 1, 1}, - {&__pyx_n_s__major, __pyx_k__major, sizeof(__pyx_k__major), 0, 0, 1, 1}, - {&__pyx_n_s__mapping_start, __pyx_k__mapping_start, sizeof(__pyx_k__mapping_start), 0, 0, 1, 1}, - {&__pyx_n_s__minor, __pyx_k__minor, sizeof(__pyx_k__minor), 0, 0, 1, 1}, - {&__pyx_n_s__name, __pyx_k__name, sizeof(__pyx_k__name), 0, 0, 1, 1}, - {&__pyx_n_s__nodes, __pyx_k__nodes, sizeof(__pyx_k__nodes), 0, 0, 1, 1}, - {&__pyx_n_s__parsed_event, __pyx_k__parsed_event, sizeof(__pyx_k__parsed_event), 0, 0, 1, 1}, - {&__pyx_n_s__parser, __pyx_k__parser, sizeof(__pyx_k__parser), 0, 0, 1, 1}, - {&__pyx_n_s__plain_implicit, __pyx_k__plain_implicit, sizeof(__pyx_k__plain_implicit), 0, 0, 1, 1}, - {&__pyx_n_s__pointer, __pyx_k__pointer, sizeof(__pyx_k__pointer), 0, 0, 1, 1}, - {&__pyx_n_s__prefix, __pyx_k__prefix, sizeof(__pyx_k__prefix), 0, 0, 1, 1}, - {&__pyx_n_s__problem, __pyx_k__problem, sizeof(__pyx_k__problem), 0, 0, 1, 1}, - {&__pyx_n_s__problem_mark, __pyx_k__problem_mark, sizeof(__pyx_k__problem_mark), 0, 0, 1, 1}, - {&__pyx_n_s__problem_offset, __pyx_k__problem_offset, sizeof(__pyx_k__problem_offset), 0, 0, 1, 1}, - {&__pyx_n_s__problem_value, __pyx_k__problem_value, sizeof(__pyx_k__problem_value), 0, 0, 1, 1}, - {&__pyx_n_s__quoted_implicit, __pyx_k__quoted_implicit, sizeof(__pyx_k__quoted_implicit), 0, 0, 1, 1}, - {&__pyx_n_s__read, __pyx_k__read, sizeof(__pyx_k__read), 0, 0, 1, 1}, - {&__pyx_n_s__reader, __pyx_k__reader, sizeof(__pyx_k__reader), 0, 0, 1, 1}, - {&__pyx_n_s__representer, __pyx_k__representer, sizeof(__pyx_k__representer), 0, 0, 1, 1}, - {&__pyx_n_s__resolve, __pyx_k__resolve, sizeof(__pyx_k__resolve), 0, 0, 1, 1}, - {&__pyx_n_s__scalar, __pyx_k__scalar, sizeof(__pyx_k__scalar), 0, 0, 1, 1}, - {&__pyx_n_s__scanner, __pyx_k__scanner, sizeof(__pyx_k__scanner), 0, 0, 1, 1}, - {&__pyx_n_s__sequence_start, __pyx_k__sequence_start, sizeof(__pyx_k__sequence_start), 0, 0, 1, 1}, - {&__pyx_n_s__serialized_nodes, __pyx_k__serialized_nodes, sizeof(__pyx_k__serialized_nodes), 0, 0, 1, 1}, - {&__pyx_n_s__serializer, __pyx_k__serializer, sizeof(__pyx_k__serializer), 0, 0, 1, 1}, - {&__pyx_n_s__start, __pyx_k__start, sizeof(__pyx_k__start), 0, 0, 1, 1}, - {&__pyx_n_s__start_mark, __pyx_k__start_mark, sizeof(__pyx_k__start_mark), 0, 0, 1, 1}, - {&__pyx_n_s__stream, __pyx_k__stream, sizeof(__pyx_k__stream), 0, 0, 1, 1}, - {&__pyx_n_s__stream_cache, __pyx_k__stream_cache, sizeof(__pyx_k__stream_cache), 0, 0, 1, 1}, - {&__pyx_n_s__stream_cache_len, __pyx_k__stream_cache_len, sizeof(__pyx_k__stream_cache_len), 0, 0, 1, 1}, - {&__pyx_n_s__stream_cache_pos, __pyx_k__stream_cache_pos, sizeof(__pyx_k__stream_cache_pos), 0, 0, 1, 1}, - {&__pyx_n_s__stream_name, __pyx_k__stream_name, sizeof(__pyx_k__stream_name), 0, 0, 1, 1}, - {&__pyx_n_s__stream_start, __pyx_k__stream_start, sizeof(__pyx_k__stream_start), 0, 0, 1, 1}, - {&__pyx_n_s__style, __pyx_k__style, sizeof(__pyx_k__style), 0, 0, 1, 1}, - {&__pyx_n_s__suffix, __pyx_k__suffix, sizeof(__pyx_k__suffix), 0, 0, 1, 1}, - {&__pyx_n_s__tag, __pyx_k__tag, sizeof(__pyx_k__tag), 0, 0, 1, 1}, - {&__pyx_n_s__tag_directive, __pyx_k__tag_directive, sizeof(__pyx_k__tag_directive), 0, 0, 1, 1}, - {&__pyx_n_s__tag_directives, __pyx_k__tag_directives, sizeof(__pyx_k__tag_directives), 0, 0, 1, 1}, - {&__pyx_n_s__tags, __pyx_k__tags, sizeof(__pyx_k__tags), 0, 0, 1, 1}, - {&__pyx_n_s__tokens, __pyx_k__tokens, sizeof(__pyx_k__tokens), 0, 0, 1, 1}, - {&__pyx_n_s__type, __pyx_k__type, sizeof(__pyx_k__type), 0, 0, 1, 1}, - {&__pyx_n_s__unicode_source, __pyx_k__unicode_source, sizeof(__pyx_k__unicode_source), 0, 0, 1, 1}, - {&__pyx_n_s__use_encoding, __pyx_k__use_encoding, sizeof(__pyx_k__use_encoding), 0, 0, 1, 1}, - {&__pyx_n_s__use_tags, __pyx_k__use_tags, sizeof(__pyx_k__use_tags), 0, 0, 1, 1}, - {&__pyx_n_s__use_version, __pyx_k__use_version, sizeof(__pyx_k__use_version), 0, 0, 1, 1}, - {&__pyx_n_s__value, __pyx_k__value, sizeof(__pyx_k__value), 0, 0, 1, 1}, - {&__pyx_n_s__version, __pyx_k__version, sizeof(__pyx_k__version), 0, 0, 1, 1}, - {&__pyx_n_s__version_directive, __pyx_k__version_directive, sizeof(__pyx_k__version_directive), 0, 0, 1, 1}, - {&__pyx_n_s__width, __pyx_k__width, sizeof(__pyx_k__width), 0, 0, 1, 1}, - {&__pyx_n_s__write, __pyx_k__write, sizeof(__pyx_k__write), 0, 0, 1, 1}, - {&__pyx_n_s__yaml, __pyx_k__yaml, sizeof(__pyx_k__yaml), 0, 0, 1, 1}, - {0, 0, 0, 0, 0, 0, 0} -}; -static int __Pyx_InitCachedBuiltins(void) { - __pyx_builtin_MemoryError = __Pyx_GetName(__pyx_b, __pyx_n_s__MemoryError); if (!__pyx_builtin_MemoryError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 265; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_builtin_AttributeError = __Pyx_GetName(__pyx_b, __pyx_n_s__AttributeError); if (!__pyx_builtin_AttributeError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_builtin_TypeError = __Pyx_GetName(__pyx_b, __pyx_n_s__TypeError); if (!__pyx_builtin_TypeError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_builtin_ValueError = __Pyx_GetName(__pyx_b, __pyx_n_s__ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - return 0; - __pyx_L1_error:; - return -1; -} - -static int __Pyx_InitCachedConstants(void) { - __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants"); - - /* "_yaml.pyx":301 - * if PyString_CheckExact(stream) == 0: - * if PY_MAJOR_VERSION < 3: - * raise TypeError("a string or stream input is required") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"a string or stream input is required") - */ - __pyx_k_tuple_6 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 301; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_6)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_5)); - PyTuple_SET_ITEM(__pyx_k_tuple_6, 0, ((PyObject *)__pyx_kp_s_5)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_5)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_6)); - - /* "_yaml.pyx":303 - * raise TypeError("a string or stream input is required") - * else: - * raise TypeError(u"a string or stream input is required") # <<<<<<<<<<<<<< - * self.stream = stream - * yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) - */ - __pyx_k_tuple_7 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 303; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_7)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_5)); - PyTuple_SET_ITEM(__pyx_k_tuple_7, 0, ((PyObject *)__pyx_kp_u_5)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_5)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_7)); - - /* "_yaml.pyx":356 - * return ParserError(context, context_mark, problem, problem_mark) - * if PY_MAJOR_VERSION < 3: - * raise ValueError("no parser error") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"no parser error") - */ - __pyx_k_tuple_10 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_10)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 356; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_10)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_9)); - PyTuple_SET_ITEM(__pyx_k_tuple_10, 0, ((PyObject *)__pyx_kp_s_9)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_9)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_10)); - - /* "_yaml.pyx":358 - * raise ValueError("no parser error") - * else: - * raise ValueError(u"no parser error") # <<<<<<<<<<<<<< - * - * def raw_scan(self): - */ - __pyx_k_tuple_11 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_11)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 358; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_11)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_9)); - PyTuple_SET_ITEM(__pyx_k_tuple_11, 0, ((PyObject *)__pyx_kp_u_9)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_9)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_11)); - - /* "_yaml.pyx":479 - * else: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("unknown token type") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"unknown token type") - */ - __pyx_k_tuple_26 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_26)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 479; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_26)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_25)); - PyTuple_SET_ITEM(__pyx_k_tuple_26, 0, ((PyObject *)__pyx_kp_s_25)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_25)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_26)); - - /* "_yaml.pyx":481 - * raise ValueError("unknown token type") - * else: - * raise ValueError(u"unknown token type") # <<<<<<<<<<<<<< - * - * def get_token(self): - */ - __pyx_k_tuple_27 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_27)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 481; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_27)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_25)); - PyTuple_SET_ITEM(__pyx_k_tuple_27, 0, ((PyObject *)__pyx_kp_u_25)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_25)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_27)); - - /* "_yaml.pyx":657 - * else: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("unknown event type") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"unknown event type") - */ - __pyx_k_tuple_29 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_29)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 657; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_29)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_28)); - PyTuple_SET_ITEM(__pyx_k_tuple_29, 0, ((PyObject *)__pyx_kp_s_28)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_28)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_29)); - - /* "_yaml.pyx":659 - * raise ValueError("unknown event type") - * else: - * raise ValueError(u"unknown event type") # <<<<<<<<<<<<<< - * - * def get_event(self): - */ - __pyx_k_tuple_30 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_30)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 659; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_30)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_28)); - PyTuple_SET_ITEM(__pyx_k_tuple_30, 0, ((PyObject *)__pyx_kp_u_28)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_28)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_30)); - - /* "_yaml.pyx":918 - * if PyString_CheckExact(value) == 0: - * if PY_MAJOR_VERSION < 3: - * raise TypeError("a string value is expected") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"a string value is expected") - */ - __pyx_k_tuple_40 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_40)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 918; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_40)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_39)); - PyTuple_SET_ITEM(__pyx_k_tuple_40, 0, ((PyObject *)__pyx_kp_s_39)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_39)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_40)); - - /* "_yaml.pyx":920 - * raise TypeError("a string value is expected") - * else: - * raise TypeError(u"a string value is expected") # <<<<<<<<<<<<<< - * parser.stream_cache = value - * parser.stream_cache_pos = 0 - */ - __pyx_k_tuple_41 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_41)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 920; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_41)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_39)); - PyTuple_SET_ITEM(__pyx_k_tuple_41, 0, ((PyObject *)__pyx_kp_u_39)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_39)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_41)); - - /* "_yaml.pyx":1012 - * return EmitterError(problem) - * if PY_MAJOR_VERSION < 3: - * raise ValueError("no emitter error") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"no emitter error") - */ - __pyx_k_tuple_48 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_48)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1012; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_48)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_47)); - PyTuple_SET_ITEM(__pyx_k_tuple_48, 0, ((PyObject *)__pyx_kp_s_47)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_47)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_48)); - - /* "_yaml.pyx":1014 - * raise ValueError("no emitter error") - * else: - * raise ValueError(u"no emitter error") # <<<<<<<<<<<<<< - * - * cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: - */ - __pyx_k_tuple_49 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_49)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1014; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_49)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_47)); - PyTuple_SET_ITEM(__pyx_k_tuple_49, 0, ((PyObject *)__pyx_kp_u_47)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_47)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_49)); - - /* "_yaml.pyx":1058 - * if len(event_object.tags) > 128: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("too many tags") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"too many tags") - */ - __pyx_k_tuple_51 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_51)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1058; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_51)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_50)); - PyTuple_SET_ITEM(__pyx_k_tuple_51, 0, ((PyObject *)__pyx_kp_s_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_51)); - - /* "_yaml.pyx":1060 - * raise ValueError("too many tags") - * else: - * raise ValueError(u"too many tags") # <<<<<<<<<<<<<< - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value - */ - __pyx_k_tuple_52 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_52)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1060; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_52)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_50)); - PyTuple_SET_ITEM(__pyx_k_tuple_52, 0, ((PyObject *)__pyx_kp_u_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_52)); - - /* "_yaml.pyx":1071 - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag handle must be a string") - */ - __pyx_k_tuple_54 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_54)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1071; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_54)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_53)); - PyTuple_SET_ITEM(__pyx_k_tuple_54, 0, ((PyObject *)__pyx_kp_s_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_54)); - - /* "_yaml.pyx":1073 - * raise TypeError("tag handle must be a string") - * else: - * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): - */ - __pyx_k_tuple_55 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_55)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1073; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_55)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_53)); - PyTuple_SET_ITEM(__pyx_k_tuple_55, 0, ((PyObject *)__pyx_kp_u_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_55)); - - /* "_yaml.pyx":1080 - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag prefix must be a string") - */ - __pyx_k_tuple_57 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_57)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1080; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_57)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_56)); - PyTuple_SET_ITEM(__pyx_k_tuple_57, 0, ((PyObject *)__pyx_kp_s_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_57)); - - /* "_yaml.pyx":1082 - * raise TypeError("tag prefix must be a string") - * else: - * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 - */ - __pyx_k_tuple_58 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_58)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1082; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_58)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_56)); - PyTuple_SET_ITEM(__pyx_k_tuple_58, 0, ((PyObject *)__pyx_kp_u_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_58)); - - /* "_yaml.pyx":1103 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_k_tuple_60 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_60)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1103; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_60)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_60, 0, ((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_60)); - - /* "_yaml.pyx":1105 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * if yaml_alias_event_initialize(event, anchor) == 0: - */ - __pyx_k_tuple_61 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_61)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1105; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_61)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_61, 0, ((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_61)); - - /* "_yaml.pyx":1117 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_k_tuple_62 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_62)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1117; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_62)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_62, 0, ((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_62)); - - /* "_yaml.pyx":1119 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - */ - __pyx_k_tuple_63 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_63)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1119; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_63)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_63, 0, ((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_63)); - - /* "_yaml.pyx":1128 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_k_tuple_65 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_65)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1128; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_65)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_65, 0, ((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_65)); - - /* "_yaml.pyx":1130 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * value_object = event_object.value - */ - __pyx_k_tuple_66 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_66)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1130; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_66)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_66, 0, ((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_66)); - - /* "_yaml.pyx":1137 - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("value must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"value must be a string") - */ - __pyx_k_tuple_68 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_68)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1137; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_68)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_67)); - PyTuple_SET_ITEM(__pyx_k_tuple_68, 0, ((PyObject *)__pyx_kp_s_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_68)); - - /* "_yaml.pyx":1139 - * raise TypeError("value must be a string") - * else: - * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<< - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) - */ - __pyx_k_tuple_69 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_69)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1139; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_69)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_67)); - PyTuple_SET_ITEM(__pyx_k_tuple_69, 0, ((PyObject *)__pyx_kp_u_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_69)); - - /* "_yaml.pyx":1168 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_k_tuple_70 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_70)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1168; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_70)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_70, 0, ((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_70)); - - /* "_yaml.pyx":1170 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - */ - __pyx_k_tuple_71 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_71)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1170; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_71)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_71, 0, ((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_71)); - - /* "_yaml.pyx":1179 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_k_tuple_72 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_72)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1179; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_72)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_72, 0, ((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_72)); - - /* "_yaml.pyx":1181 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 - */ - __pyx_k_tuple_73 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_73)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1181; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_73)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_73, 0, ((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_73)); - - /* "_yaml.pyx":1200 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_k_tuple_74 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_74)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1200; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_74)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_74, 0, ((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_74)); - - /* "_yaml.pyx":1202 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * tag = NULL - */ - __pyx_k_tuple_75 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_75)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1202; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_75)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_75, 0, ((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_75)); - - /* "_yaml.pyx":1211 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_k_tuple_76 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_76)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1211; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_76)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_76, 0, ((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_76)); - - /* "_yaml.pyx":1213 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * implicit = 0 - */ - __pyx_k_tuple_77 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_77)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1213; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_77)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_77, 0, ((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_77)); - - /* "_yaml.pyx":1263 - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is closed") - */ - __pyx_k_tuple_80 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_80)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1263; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_80)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_79)); - PyTuple_SET_ITEM(__pyx_k_tuple_80, 0, ((PyObject *)__pyx_kp_s_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_80)); - - /* "_yaml.pyx":1265 - * raise SerializerError("serializer is closed") - * else: - * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<< - * else: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_k_tuple_81 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_81)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1265; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_81)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_79)); - PyTuple_SET_ITEM(__pyx_k_tuple_81, 0, ((PyObject *)__pyx_kp_u_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_81)); - - /* "_yaml.pyx":1268 - * else: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is already opened") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is already opened") - */ - __pyx_k_tuple_83 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_83)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1268; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_83)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_82)); - PyTuple_SET_ITEM(__pyx_k_tuple_83, 0, ((PyObject *)__pyx_kp_s_82)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_82)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_83)); - - /* "_yaml.pyx":1270 - * raise SerializerError("serializer is already opened") - * else: - * raise SerializerError(u"serializer is already opened") # <<<<<<<<<<<<<< - * - * def close(self): - */ - __pyx_k_tuple_84 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_84)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1270; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_84)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_82)); - PyTuple_SET_ITEM(__pyx_k_tuple_84, 0, ((PyObject *)__pyx_kp_u_82)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_82)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_84)); - - /* "_yaml.pyx":1276 - * if self.closed == -1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is not opened") - */ - __pyx_k_tuple_86 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_86)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1276; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_86)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_85)); - PyTuple_SET_ITEM(__pyx_k_tuple_86, 0, ((PyObject *)__pyx_kp_s_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_86)); - - /* "_yaml.pyx":1278 - * raise SerializerError("serializer is not opened") - * else: - * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<< - * elif self.closed == 0: - * yaml_stream_end_event_initialize(&event) - */ - __pyx_k_tuple_87 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_87)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1278; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_87)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_85)); - PyTuple_SET_ITEM(__pyx_k_tuple_87, 0, ((PyObject *)__pyx_kp_u_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_87)); - - /* "_yaml.pyx":1295 - * if self.closed == -1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is not opened") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is not opened") - */ - __pyx_k_tuple_88 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_88)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1295; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_88)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_85)); - PyTuple_SET_ITEM(__pyx_k_tuple_88, 0, ((PyObject *)__pyx_kp_s_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_88)); - - /* "_yaml.pyx":1297 - * raise SerializerError("serializer is not opened") - * else: - * raise SerializerError(u"serializer is not opened") # <<<<<<<<<<<<<< - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - */ - __pyx_k_tuple_89 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_89)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1297; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_89)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_85)); - PyTuple_SET_ITEM(__pyx_k_tuple_89, 0, ((PyObject *)__pyx_kp_u_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_85)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_89)); - - /* "_yaml.pyx":1300 - * elif self.closed == 1: - * if PY_MAJOR_VERSION < 3: - * raise SerializerError("serializer is closed") # <<<<<<<<<<<<<< - * else: - * raise SerializerError(u"serializer is closed") - */ - __pyx_k_tuple_90 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_90)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1300; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_90)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_79)); - PyTuple_SET_ITEM(__pyx_k_tuple_90, 0, ((PyObject *)__pyx_kp_s_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_90)); - - /* "_yaml.pyx":1302 - * raise SerializerError("serializer is closed") - * else: - * raise SerializerError(u"serializer is closed") # <<<<<<<<<<<<<< - * cache = [] - * version_directive = NULL - */ - __pyx_k_tuple_91 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_91)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1302; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_91)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_79)); - PyTuple_SET_ITEM(__pyx_k_tuple_91, 0, ((PyObject *)__pyx_kp_u_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_79)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_91)); - - /* "_yaml.pyx":1314 - * if len(self.use_tags) > 128: - * if PY_MAJOR_VERSION < 3: - * raise ValueError("too many tags") # <<<<<<<<<<<<<< - * else: - * raise ValueError(u"too many tags") - */ - __pyx_k_tuple_92 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_92)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1314; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_92)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_50)); - PyTuple_SET_ITEM(__pyx_k_tuple_92, 0, ((PyObject *)__pyx_kp_s_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_92)); - - /* "_yaml.pyx":1316 - * raise ValueError("too many tags") - * else: - * raise ValueError(u"too many tags") # <<<<<<<<<<<<<< - * tag_directives_start = tag_directives_value - * tag_directives_end = tag_directives_value - */ - __pyx_k_tuple_93 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_93)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1316; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_93)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_50)); - PyTuple_SET_ITEM(__pyx_k_tuple_93, 0, ((PyObject *)__pyx_kp_u_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_50)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_93)); - - /* "_yaml.pyx":1326 - * if not PyString_CheckExact(handle): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag handle must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag handle must be a string") - */ - __pyx_k_tuple_94 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_94)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1326; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_94)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_53)); - PyTuple_SET_ITEM(__pyx_k_tuple_94, 0, ((PyObject *)__pyx_kp_s_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_94)); - - /* "_yaml.pyx":1328 - * raise TypeError("tag handle must be a string") - * else: - * raise TypeError(u"tag handle must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.handle = PyString_AS_STRING(handle) - * if PyUnicode_CheckExact(prefix): - */ - __pyx_k_tuple_95 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_95)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1328; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_95)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_53)); - PyTuple_SET_ITEM(__pyx_k_tuple_95, 0, ((PyObject *)__pyx_kp_u_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_53)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_95)); - - /* "_yaml.pyx":1335 - * if not PyString_CheckExact(prefix): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag prefix must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag prefix must be a string") - */ - __pyx_k_tuple_96 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_96)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1335; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_96)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_56)); - PyTuple_SET_ITEM(__pyx_k_tuple_96, 0, ((PyObject *)__pyx_kp_s_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_96)); - - /* "_yaml.pyx":1337 - * raise TypeError("tag prefix must be a string") - * else: - * raise TypeError(u"tag prefix must be a string") # <<<<<<<<<<<<<< - * tag_directives_end.prefix = PyString_AS_STRING(prefix) - * tag_directives_end = tag_directives_end+1 - */ - __pyx_k_tuple_97 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_97)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1337; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_97)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_56)); - PyTuple_SET_ITEM(__pyx_k_tuple_97, 0, ((PyObject *)__pyx_kp_u_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_56)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_97)); - - /* "_yaml.pyx":1394 - * if not PyString_CheckExact(anchor_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("anchor must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"anchor must be a string") - */ - __pyx_k_tuple_99 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_99)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1394; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_99)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_99, 0, ((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_99)); - - /* "_yaml.pyx":1396 - * raise TypeError("anchor must be a string") - * else: - * raise TypeError(u"anchor must be a string") # <<<<<<<<<<<<<< - * anchor = PyString_AS_STRING(anchor_object) - * if node in self.serialized_nodes: - */ - __pyx_k_tuple_100 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_100)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1396; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_100)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_59)); - PyTuple_SET_ITEM(__pyx_k_tuple_100, 0, ((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_59)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_100)); - - /* "_yaml.pyx":1422 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_k_tuple_101 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_101)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1422; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_101)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_101, 0, ((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_101)); - - /* "_yaml.pyx":1424 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * value_object = node.value - */ - __pyx_k_tuple_102 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_102)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1424; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_102)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_102, 0, ((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_102)); - - /* "_yaml.pyx":1431 - * if not PyString_CheckExact(value_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("value must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"value must be a string") - */ - __pyx_k_tuple_103 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_103)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1431; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_103)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_67)); - PyTuple_SET_ITEM(__pyx_k_tuple_103, 0, ((PyObject *)__pyx_kp_s_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_103)); - - /* "_yaml.pyx":1433 - * raise TypeError("value must be a string") - * else: - * raise TypeError(u"value must be a string") # <<<<<<<<<<<<<< - * value = PyString_AS_STRING(value_object) - * length = PyString_GET_SIZE(value_object) - */ - __pyx_k_tuple_104 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_104)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1433; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_104)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_67)); - PyTuple_SET_ITEM(__pyx_k_tuple_104, 0, ((PyObject *)__pyx_kp_u_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_67)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_104)); - - /* "_yaml.pyx":1463 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_k_tuple_105 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_105)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1463; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_105)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_105, 0, ((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_105)); - - /* "_yaml.pyx":1465 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * sequence_style = YAML_BLOCK_SEQUENCE_STYLE - */ - __pyx_k_tuple_106 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_106)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1465; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_106)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_106, 0, ((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_106)); - - /* "_yaml.pyx":1495 - * if not PyString_CheckExact(tag_object): - * if PY_MAJOR_VERSION < 3: - * raise TypeError("tag must be a string") # <<<<<<<<<<<<<< - * else: - * raise TypeError(u"tag must be a string") - */ - __pyx_k_tuple_107 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_107)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1495; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_107)); - __Pyx_INCREF(((PyObject *)__pyx_kp_s_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_107, 0, ((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_107)); - - /* "_yaml.pyx":1497 - * raise TypeError("tag must be a string") - * else: - * raise TypeError(u"tag must be a string") # <<<<<<<<<<<<<< - * tag = PyString_AS_STRING(tag_object) - * mapping_style = YAML_BLOCK_MAPPING_STYLE - */ - __pyx_k_tuple_108 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_108)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1497; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_108)); - __Pyx_INCREF(((PyObject *)__pyx_kp_u_64)); - PyTuple_SET_ITEM(__pyx_k_tuple_108, 0, ((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_kp_u_64)); - __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_108)); - __Pyx_RefNannyFinishContext(); - return 0; - __pyx_L1_error:; - __Pyx_RefNannyFinishContext(); - return -1; -} - -static int __Pyx_InitGlobals(void) { - if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - return 0; - __pyx_L1_error:; - return -1; -} - -#if PY_MAJOR_VERSION < 3 -PyMODINIT_FUNC init_yaml(void); /*proto*/ -PyMODINIT_FUNC init_yaml(void) -#else -PyMODINIT_FUNC PyInit__yaml(void); /*proto*/ -PyMODINIT_FUNC PyInit__yaml(void) -#endif -{ - PyObject *__pyx_t_1 = NULL; - PyObject *__pyx_t_2 = NULL; - #if CYTHON_REFNANNY - void* __pyx_refnanny = NULL; - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny"); - if (!__Pyx_RefNanny) { - PyErr_Clear(); - __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny"); - if (!__Pyx_RefNanny) - Py_FatalError("failed to import 'refnanny' module"); - } - __pyx_refnanny = __Pyx_RefNanny->SetupContext("PyMODINIT_FUNC PyInit__yaml(void)", __LINE__, __FILE__); - #endif - __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - #ifdef __pyx_binding_PyCFunctionType_USED - if (__pyx_binding_PyCFunctionType_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - #endif - /*--- Library function declarations ---*/ - /*--- Threads initialization code ---*/ - #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS - #ifdef WITH_THREAD /* Python build with threading support? */ - PyEval_InitThreads(); - #endif - #endif - /*--- Module creation code ---*/ - #if PY_MAJOR_VERSION < 3 - __pyx_m = Py_InitModule4(__Pyx_NAMESTR("_yaml"), __pyx_methods, 0, 0, PYTHON_API_VERSION); - #else - __pyx_m = PyModule_Create(&__pyx_moduledef); - #endif - if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - #if PY_MAJOR_VERSION < 3 - Py_INCREF(__pyx_m); - #endif - __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME)); - if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - /*--- Initialize various global constants etc. ---*/ - if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__pyx_module_is_main__yaml) { - if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;}; - } - /*--- Builtin init code ---*/ - if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - /*--- Constants init code ---*/ - if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - /*--- Global init code ---*/ - /*--- Function export code ---*/ - /*--- Type init code ---*/ - if (PyType_Ready(&__pyx_type_5_yaml_Mark) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__Pyx_SetAttrString(__pyx_m, "Mark", (PyObject *)&__pyx_type_5_yaml_Mark) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 64; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_ptype_5_yaml_Mark = &__pyx_type_5_yaml_Mark; - __pyx_vtabptr_5_yaml_CParser = &__pyx_vtable_5_yaml_CParser; - __pyx_vtable_5_yaml_CParser._parser_error = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parser_error; - __pyx_vtable_5_yaml_CParser._scan = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__scan; - __pyx_vtable_5_yaml_CParser._token_to_object = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, yaml_token_t *))__pyx_f_5_yaml_7CParser__token_to_object; - __pyx_vtable_5_yaml_CParser._parse = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parse; - __pyx_vtable_5_yaml_CParser._event_to_object = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, yaml_event_t *))__pyx_f_5_yaml_7CParser__event_to_object; - __pyx_vtable_5_yaml_CParser._compose_document = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__compose_document; - __pyx_vtable_5_yaml_CParser._compose_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *, PyObject *))__pyx_f_5_yaml_7CParser__compose_node; - __pyx_vtable_5_yaml_CParser._compose_scalar_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_scalar_node; - __pyx_vtable_5_yaml_CParser._compose_sequence_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_sequence_node; - __pyx_vtable_5_yaml_CParser._compose_mapping_node = (PyObject *(*)(struct __pyx_obj_5_yaml_CParser *, PyObject *))__pyx_f_5_yaml_7CParser__compose_mapping_node; - __pyx_vtable_5_yaml_CParser._parse_next_event = (int (*)(struct __pyx_obj_5_yaml_CParser *))__pyx_f_5_yaml_7CParser__parse_next_event; - if (PyType_Ready(&__pyx_type_5_yaml_CParser) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__Pyx_SetVtable(__pyx_type_5_yaml_CParser.tp_dict, __pyx_vtabptr_5_yaml_CParser) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__Pyx_SetAttrString(__pyx_m, "CParser", (PyObject *)&__pyx_type_5_yaml_CParser) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_ptype_5_yaml_CParser = &__pyx_type_5_yaml_CParser; - __pyx_vtabptr_5_yaml_CEmitter = &__pyx_vtable_5_yaml_CEmitter; - __pyx_vtable_5_yaml_CEmitter._emitter_error = (PyObject *(*)(struct __pyx_obj_5_yaml_CEmitter *))__pyx_f_5_yaml_8CEmitter__emitter_error; - __pyx_vtable_5_yaml_CEmitter._object_to_event = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, yaml_event_t *))__pyx_f_5_yaml_8CEmitter__object_to_event; - __pyx_vtable_5_yaml_CEmitter._anchor_node = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *))__pyx_f_5_yaml_8CEmitter__anchor_node; - __pyx_vtable_5_yaml_CEmitter._serialize_node = (int (*)(struct __pyx_obj_5_yaml_CEmitter *, PyObject *, PyObject *, PyObject *))__pyx_f_5_yaml_8CEmitter__serialize_node; - if (PyType_Ready(&__pyx_type_5_yaml_CEmitter) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 935; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__Pyx_SetVtable(__pyx_type_5_yaml_CEmitter.tp_dict, __pyx_vtabptr_5_yaml_CEmitter) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 935; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - if (__Pyx_SetAttrString(__pyx_m, "CEmitter", (PyObject *)&__pyx_type_5_yaml_CEmitter) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 935; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __pyx_ptype_5_yaml_CEmitter = &__pyx_type_5_yaml_CEmitter; - /*--- Type import code ---*/ - /*--- Function import code ---*/ - /*--- Execution code ---*/ - - /* "_yaml.pyx":2 - * - * import yaml # <<<<<<<<<<<<<< - * - * def get_version_string(): - */ - __pyx_t_1 = __Pyx_Import(((PyObject *)__pyx_n_s__yaml), 0); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__yaml, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":4 - * import yaml - * - * def get_version_string(): # <<<<<<<<<<<<<< - * cdef char *value - * value = yaml_get_version_string() - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_get_version_string, NULL, __pyx_n_s___yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__get_version_string, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":12 - * return PyUnicode_FromString(value) - * - * def get_version(): # <<<<<<<<<<<<<< - * cdef int major, minor, patch - * yaml_get_version(&major, &minor, &patch) - */ - __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_5_yaml_1get_version, NULL, __pyx_n_s___yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__get_version, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":18 - * - * #Mark = yaml.error.Mark - * YAMLError = yaml.error.YAMLError # <<<<<<<<<<<<<< - * ReaderError = yaml.reader.ReaderError - * ScannerError = yaml.scanner.ScannerError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__error); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__YAMLError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__YAMLError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 18; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":19 - * #Mark = yaml.error.Mark - * YAMLError = yaml.error.YAMLError - * ReaderError = yaml.reader.ReaderError # <<<<<<<<<<<<<< - * ScannerError = yaml.scanner.ScannerError - * ParserError = yaml.parser.ParserError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__reader); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ReaderError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ReaderError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":20 - * YAMLError = yaml.error.YAMLError - * ReaderError = yaml.reader.ReaderError - * ScannerError = yaml.scanner.ScannerError # <<<<<<<<<<<<<< - * ParserError = yaml.parser.ParserError - * ComposerError = yaml.composer.ComposerError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__scanner); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ScannerError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ScannerError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 20; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":21 - * ReaderError = yaml.reader.ReaderError - * ScannerError = yaml.scanner.ScannerError - * ParserError = yaml.parser.ParserError # <<<<<<<<<<<<<< - * ComposerError = yaml.composer.ComposerError - * ConstructorError = yaml.constructor.ConstructorError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__parser); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ParserError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ParserError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":22 - * ScannerError = yaml.scanner.ScannerError - * ParserError = yaml.parser.ParserError - * ComposerError = yaml.composer.ComposerError # <<<<<<<<<<<<<< - * ConstructorError = yaml.constructor.ConstructorError - * EmitterError = yaml.emitter.EmitterError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__composer); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ComposerError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ComposerError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 22; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":23 - * ParserError = yaml.parser.ParserError - * ComposerError = yaml.composer.ComposerError - * ConstructorError = yaml.constructor.ConstructorError # <<<<<<<<<<<<<< - * EmitterError = yaml.emitter.EmitterError - * SerializerError = yaml.serializer.SerializerError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__constructor); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ConstructorError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ConstructorError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":24 - * ComposerError = yaml.composer.ComposerError - * ConstructorError = yaml.constructor.ConstructorError - * EmitterError = yaml.emitter.EmitterError # <<<<<<<<<<<<<< - * SerializerError = yaml.serializer.SerializerError - * RepresenterError = yaml.representer.RepresenterError - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__emitter); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__EmitterError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EmitterError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":25 - * ConstructorError = yaml.constructor.ConstructorError - * EmitterError = yaml.emitter.EmitterError - * SerializerError = yaml.serializer.SerializerError # <<<<<<<<<<<<<< - * RepresenterError = yaml.representer.RepresenterError - * - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__serializer); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__SerializerError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__SerializerError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":26 - * EmitterError = yaml.emitter.EmitterError - * SerializerError = yaml.serializer.SerializerError - * RepresenterError = yaml.representer.RepresenterError # <<<<<<<<<<<<<< - * - * StreamStartToken = yaml.tokens.StreamStartToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__representer); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__RepresenterError); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__RepresenterError, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 26; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":28 - * RepresenterError = yaml.representer.RepresenterError - * - * StreamStartToken = yaml.tokens.StreamStartToken # <<<<<<<<<<<<<< - * StreamEndToken = yaml.tokens.StreamEndToken - * DirectiveToken = yaml.tokens.DirectiveToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__StreamStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__StreamStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":29 - * - * StreamStartToken = yaml.tokens.StreamStartToken - * StreamEndToken = yaml.tokens.StreamEndToken # <<<<<<<<<<<<<< - * DirectiveToken = yaml.tokens.DirectiveToken - * DocumentStartToken = yaml.tokens.DocumentStartToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__StreamEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__StreamEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":30 - * StreamStartToken = yaml.tokens.StreamStartToken - * StreamEndToken = yaml.tokens.StreamEndToken - * DirectiveToken = yaml.tokens.DirectiveToken # <<<<<<<<<<<<<< - * DocumentStartToken = yaml.tokens.DocumentStartToken - * DocumentEndToken = yaml.tokens.DocumentEndToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__DirectiveToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DirectiveToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 30; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":31 - * StreamEndToken = yaml.tokens.StreamEndToken - * DirectiveToken = yaml.tokens.DirectiveToken - * DocumentStartToken = yaml.tokens.DocumentStartToken # <<<<<<<<<<<<<< - * DocumentEndToken = yaml.tokens.DocumentEndToken - * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__DocumentStartToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DocumentStartToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 31; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":32 - * DirectiveToken = yaml.tokens.DirectiveToken - * DocumentStartToken = yaml.tokens.DocumentStartToken - * DocumentEndToken = yaml.tokens.DocumentEndToken # <<<<<<<<<<<<<< - * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken - * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__DocumentEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DocumentEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":33 - * DocumentStartToken = yaml.tokens.DocumentStartToken - * DocumentEndToken = yaml.tokens.DocumentEndToken - * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken # <<<<<<<<<<<<<< - * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken - * BlockEndToken = yaml.tokens.BlockEndToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s_15); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_15, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 33; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":34 - * DocumentEndToken = yaml.tokens.DocumentEndToken - * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken - * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken # <<<<<<<<<<<<<< - * BlockEndToken = yaml.tokens.BlockEndToken - * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s_16); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_16, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":35 - * BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken - * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken - * BlockEndToken = yaml.tokens.BlockEndToken # <<<<<<<<<<<<<< - * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken - * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__BlockEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__BlockEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 35; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":36 - * BlockMappingStartToken = yaml.tokens.BlockMappingStartToken - * BlockEndToken = yaml.tokens.BlockEndToken - * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken # <<<<<<<<<<<<<< - * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken - * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s_17); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_17, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 36; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":37 - * BlockEndToken = yaml.tokens.BlockEndToken - * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken - * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken # <<<<<<<<<<<<<< - * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken - * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s_19); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_19, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":38 - * FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken - * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken - * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken # <<<<<<<<<<<<<< - * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken - * KeyToken = yaml.tokens.KeyToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s_18); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s_18, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 38; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":39 - * FlowMappingStartToken = yaml.tokens.FlowMappingStartToken - * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken - * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken # <<<<<<<<<<<<<< - * KeyToken = yaml.tokens.KeyToken - * ValueToken = yaml.tokens.ValueToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__FlowMappingEndToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__FlowMappingEndToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 39; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":40 - * FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken - * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken - * KeyToken = yaml.tokens.KeyToken # <<<<<<<<<<<<<< - * ValueToken = yaml.tokens.ValueToken - * BlockEntryToken = yaml.tokens.BlockEntryToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__KeyToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__KeyToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 40; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":41 - * FlowMappingEndToken = yaml.tokens.FlowMappingEndToken - * KeyToken = yaml.tokens.KeyToken - * ValueToken = yaml.tokens.ValueToken # <<<<<<<<<<<<<< - * BlockEntryToken = yaml.tokens.BlockEntryToken - * FlowEntryToken = yaml.tokens.FlowEntryToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ValueToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ValueToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":42 - * KeyToken = yaml.tokens.KeyToken - * ValueToken = yaml.tokens.ValueToken - * BlockEntryToken = yaml.tokens.BlockEntryToken # <<<<<<<<<<<<<< - * FlowEntryToken = yaml.tokens.FlowEntryToken - * AliasToken = yaml.tokens.AliasToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__BlockEntryToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__BlockEntryToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":43 - * ValueToken = yaml.tokens.ValueToken - * BlockEntryToken = yaml.tokens.BlockEntryToken - * FlowEntryToken = yaml.tokens.FlowEntryToken # <<<<<<<<<<<<<< - * AliasToken = yaml.tokens.AliasToken - * AnchorToken = yaml.tokens.AnchorToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__FlowEntryToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__FlowEntryToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 43; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":44 - * BlockEntryToken = yaml.tokens.BlockEntryToken - * FlowEntryToken = yaml.tokens.FlowEntryToken - * AliasToken = yaml.tokens.AliasToken # <<<<<<<<<<<<<< - * AnchorToken = yaml.tokens.AnchorToken - * TagToken = yaml.tokens.TagToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__AliasToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__AliasToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 44; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":45 - * FlowEntryToken = yaml.tokens.FlowEntryToken - * AliasToken = yaml.tokens.AliasToken - * AnchorToken = yaml.tokens.AnchorToken # <<<<<<<<<<<<<< - * TagToken = yaml.tokens.TagToken - * ScalarToken = yaml.tokens.ScalarToken - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__AnchorToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__AnchorToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":46 - * AliasToken = yaml.tokens.AliasToken - * AnchorToken = yaml.tokens.AnchorToken - * TagToken = yaml.tokens.TagToken # <<<<<<<<<<<<<< - * ScalarToken = yaml.tokens.ScalarToken - * - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__TagToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__TagToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 46; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":47 - * AnchorToken = yaml.tokens.AnchorToken - * TagToken = yaml.tokens.TagToken - * ScalarToken = yaml.tokens.ScalarToken # <<<<<<<<<<<<<< - * - * StreamStartEvent = yaml.events.StreamStartEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__tokens); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ScalarToken); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ScalarToken, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 47; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":49 - * ScalarToken = yaml.tokens.ScalarToken - * - * StreamStartEvent = yaml.events.StreamStartEvent # <<<<<<<<<<<<<< - * StreamEndEvent = yaml.events.StreamEndEvent - * DocumentStartEvent = yaml.events.DocumentStartEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__StreamStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__StreamStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 49; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":50 - * - * StreamStartEvent = yaml.events.StreamStartEvent - * StreamEndEvent = yaml.events.StreamEndEvent # <<<<<<<<<<<<<< - * DocumentStartEvent = yaml.events.DocumentStartEvent - * DocumentEndEvent = yaml.events.DocumentEndEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__StreamEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__StreamEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":51 - * StreamStartEvent = yaml.events.StreamStartEvent - * StreamEndEvent = yaml.events.StreamEndEvent - * DocumentStartEvent = yaml.events.DocumentStartEvent # <<<<<<<<<<<<<< - * DocumentEndEvent = yaml.events.DocumentEndEvent - * AliasEvent = yaml.events.AliasEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__DocumentStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DocumentStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 51; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":52 - * StreamEndEvent = yaml.events.StreamEndEvent - * DocumentStartEvent = yaml.events.DocumentStartEvent - * DocumentEndEvent = yaml.events.DocumentEndEvent # <<<<<<<<<<<<<< - * AliasEvent = yaml.events.AliasEvent - * ScalarEvent = yaml.events.ScalarEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__DocumentEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__DocumentEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":53 - * DocumentStartEvent = yaml.events.DocumentStartEvent - * DocumentEndEvent = yaml.events.DocumentEndEvent - * AliasEvent = yaml.events.AliasEvent # <<<<<<<<<<<<<< - * ScalarEvent = yaml.events.ScalarEvent - * SequenceStartEvent = yaml.events.SequenceStartEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__AliasEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__AliasEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 53; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":54 - * DocumentEndEvent = yaml.events.DocumentEndEvent - * AliasEvent = yaml.events.AliasEvent - * ScalarEvent = yaml.events.ScalarEvent # <<<<<<<<<<<<<< - * SequenceStartEvent = yaml.events.SequenceStartEvent - * SequenceEndEvent = yaml.events.SequenceEndEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ScalarEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ScalarEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":55 - * AliasEvent = yaml.events.AliasEvent - * ScalarEvent = yaml.events.ScalarEvent - * SequenceStartEvent = yaml.events.SequenceStartEvent # <<<<<<<<<<<<<< - * SequenceEndEvent = yaml.events.SequenceEndEvent - * MappingStartEvent = yaml.events.MappingStartEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__SequenceStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__SequenceStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 55; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":56 - * ScalarEvent = yaml.events.ScalarEvent - * SequenceStartEvent = yaml.events.SequenceStartEvent - * SequenceEndEvent = yaml.events.SequenceEndEvent # <<<<<<<<<<<<<< - * MappingStartEvent = yaml.events.MappingStartEvent - * MappingEndEvent = yaml.events.MappingEndEvent - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__SequenceEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__SequenceEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 56; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":57 - * SequenceStartEvent = yaml.events.SequenceStartEvent - * SequenceEndEvent = yaml.events.SequenceEndEvent - * MappingStartEvent = yaml.events.MappingStartEvent # <<<<<<<<<<<<<< - * MappingEndEvent = yaml.events.MappingEndEvent - * - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__MappingStartEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__MappingStartEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 57; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":58 - * SequenceEndEvent = yaml.events.SequenceEndEvent - * MappingStartEvent = yaml.events.MappingStartEvent - * MappingEndEvent = yaml.events.MappingEndEvent # <<<<<<<<<<<<<< - * - * ScalarNode = yaml.nodes.ScalarNode - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__events); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__MappingEndEvent); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__MappingEndEvent, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":60 - * MappingEndEvent = yaml.events.MappingEndEvent - * - * ScalarNode = yaml.nodes.ScalarNode # <<<<<<<<<<<<<< - * SequenceNode = yaml.nodes.SequenceNode - * MappingNode = yaml.nodes.MappingNode - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__nodes); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__ScalarNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ScalarNode, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 60; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":61 - * - * ScalarNode = yaml.nodes.ScalarNode - * SequenceNode = yaml.nodes.SequenceNode # <<<<<<<<<<<<<< - * MappingNode = yaml.nodes.MappingNode - * - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__nodes); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__SequenceNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__SequenceNode, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 61; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":62 - * ScalarNode = yaml.nodes.ScalarNode - * SequenceNode = yaml.nodes.SequenceNode - * MappingNode = yaml.nodes.MappingNode # <<<<<<<<<<<<<< - * - * cdef class Mark: - */ - __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__yaml); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __pyx_t_2 = PyObject_GetAttr(__pyx_t_1, __pyx_n_s__nodes); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_2); - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - __pyx_t_1 = PyObject_GetAttr(__pyx_t_2, __pyx_n_s__MappingNode); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(__pyx_t_1); - __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0; - if (PyObject_SetAttr(__pyx_m, __pyx_n_s__MappingNode, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0; - - /* "_yaml.pyx":2 - * - * import yaml # <<<<<<<<<<<<<< - * - * def get_version_string(): - */ - __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_GOTREF(((PyObject *)__pyx_t_1)); - if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 2; __pyx_clineno = __LINE__; goto __pyx_L1_error;} - __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0; - goto __pyx_L0; - __pyx_L1_error:; - __Pyx_XDECREF(__pyx_t_1); - __Pyx_XDECREF(__pyx_t_2); - if (__pyx_m) { - __Pyx_AddTraceback("init _yaml"); - Py_DECREF(__pyx_m); __pyx_m = 0; - } else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_ImportError, "init _yaml"); - } - __pyx_L0:; - __Pyx_RefNannyFinishContext(); - #if PY_MAJOR_VERSION < 3 - return; - #else - return __pyx_m; - #endif -} - -/* Runtime support code */ - -static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) { - PyObject *result; - result = PyObject_GetAttr(dict, name); - if (!result) - PyErr_SetObject(PyExc_NameError, name); - return result; -} - -static void __Pyx_RaiseArgtupleInvalid( - const char* func_name, - int exact, - Py_ssize_t num_min, - Py_ssize_t num_max, - Py_ssize_t num_found) -{ - Py_ssize_t num_expected; - const char *number, *more_or_less; - - if (num_found < num_min) { - num_expected = num_min; - more_or_less = "at least"; - } else { - num_expected = num_max; - more_or_less = "at most"; - } - if (exact) { - more_or_less = "exactly"; - } - number = (num_expected == 1) ? "" : "s"; - PyErr_Format(PyExc_TypeError, - #if PY_VERSION_HEX < 0x02050000 - "%s() takes %s %d positional argument%s (%d given)", - #else - "%s() takes %s %zd positional argument%s (%zd given)", - #endif - func_name, more_or_less, num_expected, number, num_found); -} - -static void __Pyx_RaiseDoubleKeywordsError( - const char* func_name, - PyObject* kw_name) -{ - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION >= 3 - "%s() got multiple values for keyword argument '%U'", func_name, kw_name); - #else - "%s() got multiple values for keyword argument '%s'", func_name, - PyString_AS_STRING(kw_name)); - #endif -} - -static int __Pyx_ParseOptionalKeywords( - PyObject *kwds, - PyObject **argnames[], - PyObject *kwds2, - PyObject *values[], - Py_ssize_t num_pos_args, - const char* function_name) -{ - PyObject *key = 0, *value = 0; - Py_ssize_t pos = 0; - PyObject*** name; - PyObject*** first_kw_arg = argnames + num_pos_args; - - while (PyDict_Next(kwds, &pos, &key, &value)) { - name = first_kw_arg; - while (*name && (**name != key)) name++; - if (*name) { - values[name-argnames] = value; - } else { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) { - #else - if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key))) { - #endif - goto invalid_keyword_type; - } else { - for (name = first_kw_arg; *name; name++) { - #if PY_MAJOR_VERSION >= 3 - if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) && - PyUnicode_Compare(**name, key) == 0) break; - #else - if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) && - _PyString_Eq(**name, key)) break; - #endif - } - if (*name) { - values[name-argnames] = value; - } else { - /* unexpected keyword found */ - for (name=argnames; name != first_kw_arg; name++) { - if (**name == key) goto arg_passed_twice; - #if PY_MAJOR_VERSION >= 3 - if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) && - PyUnicode_Compare(**name, key) == 0) goto arg_passed_twice; - #else - if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) && - _PyString_Eq(**name, key)) goto arg_passed_twice; - #endif - } - if (kwds2) { - if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad; - } else { - goto invalid_keyword; - } - } - } - } - } - return 0; -arg_passed_twice: - __Pyx_RaiseDoubleKeywordsError(function_name, **name); - goto bad; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%s() keywords must be strings", function_name); - goto bad; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%s() got an unexpected keyword argument '%s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif -bad: - return -1; -} - -static int __Pyx_GetException(PyObject **type, PyObject **value, PyObject **tb) { - PyObject *local_type, *local_value, *local_tb; - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyThreadState *tstate = PyThreadState_GET(); - local_type = tstate->curexc_type; - local_value = tstate->curexc_value; - local_tb = tstate->curexc_traceback; - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; - PyErr_NormalizeException(&local_type, &local_value, &local_tb); - if (unlikely(tstate->curexc_type)) - goto bad; - #if PY_MAJOR_VERSION >= 3 - if (unlikely(PyException_SetTraceback(local_value, local_tb) < 0)) - goto bad; - #endif - *type = local_type; - *value = local_value; - *tb = local_tb; - Py_INCREF(local_type); - Py_INCREF(local_value); - Py_INCREF(local_tb); - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = local_type; - tstate->exc_value = local_value; - tstate->exc_traceback = local_tb; - /* Make sure tstate is in a consistent state when we XDECREF - these objects (XDECREF may run arbitrary code). */ - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); - return 0; -bad: - *type = 0; - *value = 0; - *tb = 0; - Py_XDECREF(local_type); - Py_XDECREF(local_value); - Py_XDECREF(local_tb); - return -1; -} - - -static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyThreadState *tstate = PyThreadState_GET(); - - tmp_type = tstate->curexc_type; - tmp_value = tstate->curexc_value; - tmp_tb = tstate->curexc_traceback; - tstate->curexc_type = type; - tstate->curexc_value = value; - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} - -static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) { - PyThreadState *tstate = PyThreadState_GET(); - *type = tstate->curexc_type; - *value = tstate->curexc_value; - *tb = tstate->curexc_traceback; - - tstate->curexc_type = 0; - tstate->curexc_value = 0; - tstate->curexc_traceback = 0; -} - - -#if PY_MAJOR_VERSION < 3 -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) { - Py_XINCREF(type); - Py_XINCREF(value); - Py_XINCREF(tb); - /* First, check the traceback argument, replacing None with NULL. */ - if (tb == Py_None) { - Py_DECREF(tb); - tb = 0; - } - else if (tb != NULL && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto raise_error; - } - /* Next, replace a missing value with None */ - if (value == NULL) { - value = Py_None; - Py_INCREF(value); - } - #if PY_VERSION_HEX < 0x02050000 - if (!PyClass_Check(type)) - #else - if (!PyType_Check(type)) - #endif - { - /* Raising an instance. The value should be a dummy. */ - if (value != Py_None) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto raise_error; - } - /* Normalize to raise , */ - Py_DECREF(value); - value = type; - #if PY_VERSION_HEX < 0x02050000 - if (PyInstance_Check(type)) { - type = (PyObject*) ((PyInstanceObject*)type)->in_class; - Py_INCREF(type); - } - else { - type = 0; - PyErr_SetString(PyExc_TypeError, - "raise: exception must be an old-style class or instance"); - goto raise_error; - } - #else - type = (PyObject*) Py_TYPE(type); - Py_INCREF(type); - if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto raise_error; - } - #endif - } - - __Pyx_ErrRestore(type, value, tb); - return; -raise_error: - Py_XDECREF(value); - Py_XDECREF(type); - Py_XDECREF(tb); - return; -} - -#else /* Python 3+ */ - -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) { - if (tb == Py_None) { - tb = 0; - } else if (tb && !PyTraceBack_Check(tb)) { - PyErr_SetString(PyExc_TypeError, - "raise: arg 3 must be a traceback or None"); - goto bad; - } - if (value == Py_None) - value = 0; - - if (PyExceptionInstance_Check(type)) { - if (value) { - PyErr_SetString(PyExc_TypeError, - "instance exception may not have a separate value"); - goto bad; - } - value = type; - type = (PyObject*) Py_TYPE(value); - } else if (!PyExceptionClass_Check(type)) { - PyErr_SetString(PyExc_TypeError, - "raise: exception class must be a subclass of BaseException"); - goto bad; - } - - PyErr_SetObject(type, value); - - if (tb) { - PyThreadState *tstate = PyThreadState_GET(); - PyObject* tmp_tb = tstate->curexc_traceback; - if (tb != tmp_tb) { - Py_INCREF(tb); - tstate->curexc_traceback = tb; - Py_XDECREF(tmp_tb); - } - } - -bad: - return; -} -#endif - -static CYTHON_INLINE int __Pyx_CheckKeywordStrings( - PyObject *kwdict, - const char* function_name, - int kw_allowed) -{ - PyObject* key = 0; - Py_ssize_t pos = 0; - while (PyDict_Next(kwdict, &pos, &key, 0)) { - #if PY_MAJOR_VERSION < 3 - if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) - #else - if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key))) - #endif - goto invalid_keyword_type; - } - if ((!kw_allowed) && unlikely(key)) - goto invalid_keyword; - return 1; -invalid_keyword_type: - PyErr_Format(PyExc_TypeError, - "%s() keywords must be strings", function_name); - return 0; -invalid_keyword: - PyErr_Format(PyExc_TypeError, - #if PY_MAJOR_VERSION < 3 - "%s() got an unexpected keyword argument '%s'", - function_name, PyString_AsString(key)); - #else - "%s() got an unexpected keyword argument '%U'", - function_name, key); - #endif - return 0; -} - - -static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) { - PyErr_Format(PyExc_ValueError, - #if PY_VERSION_HEX < 0x02050000 - "need more than %d value%s to unpack", (int)index, - #else - "need more than %zd value%s to unpack", index, - #endif - (index == 1) ? "" : "s"); -} - -static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) { - PyErr_Format(PyExc_ValueError, - #if PY_VERSION_HEX < 0x02050000 - "too many values to unpack (expected %d)", (int)expected); - #else - "too many values to unpack (expected %zd)", expected); - #endif -} - -static PyObject *__Pyx_UnpackItem(PyObject *iter, Py_ssize_t index) { - PyObject *item; - if (!(item = PyIter_Next(iter))) { - if (!PyErr_Occurred()) { - __Pyx_RaiseNeedMoreValuesError(index); - } - } - return item; -} - -static int __Pyx_EndUnpack(PyObject *iter, Py_ssize_t expected) { - PyObject *item; - if ((item = PyIter_Next(iter))) { - Py_DECREF(item); - __Pyx_RaiseTooManyValuesError(expected); - return -1; - } - else if (!PyErr_Occurred()) - return 0; - else - return -1; -} - -static CYTHON_INLINE void __Pyx_ExceptionSave(PyObject **type, PyObject **value, PyObject **tb) { - PyThreadState *tstate = PyThreadState_GET(); - *type = tstate->exc_type; - *value = tstate->exc_value; - *tb = tstate->exc_traceback; - Py_XINCREF(*type); - Py_XINCREF(*value); - Py_XINCREF(*tb); -} - -static void __Pyx_ExceptionReset(PyObject *type, PyObject *value, PyObject *tb) { - PyObject *tmp_type, *tmp_value, *tmp_tb; - PyThreadState *tstate = PyThreadState_GET(); - tmp_type = tstate->exc_type; - tmp_value = tstate->exc_value; - tmp_tb = tstate->exc_traceback; - tstate->exc_type = type; - tstate->exc_value = value; - tstate->exc_traceback = tb; - Py_XDECREF(tmp_type); - Py_XDECREF(tmp_value); - Py_XDECREF(tmp_tb); -} - -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list) { - PyObject *py_import = 0; - PyObject *empty_list = 0; - PyObject *module = 0; - PyObject *global_dict = 0; - PyObject *empty_dict = 0; - PyObject *list; - py_import = __Pyx_GetAttrString(__pyx_b, "__import__"); - if (!py_import) - goto bad; - if (from_list) - list = from_list; - else { - empty_list = PyList_New(0); - if (!empty_list) - goto bad; - list = empty_list; - } - global_dict = PyModule_GetDict(__pyx_m); - if (!global_dict) - goto bad; - empty_dict = PyDict_New(); - if (!empty_dict) - goto bad; - module = PyObject_CallFunctionObjArgs(py_import, - name, global_dict, empty_dict, list, NULL); -bad: - Py_XDECREF(empty_list); - Py_XDECREF(py_import); - Py_XDECREF(empty_dict); - return module; -} - -static CYTHON_INLINE PyObject *__Pyx_GetAttr3(PyObject *o, PyObject *n, PyObject *d) { - PyObject *r = PyObject_GetAttr(o, n); - if (!r) { - if (!PyErr_ExceptionMatches(PyExc_AttributeError)) - goto bad; - PyErr_Clear(); - r = d; - Py_INCREF(d); - } - return r; -bad: - return NULL; -} - -static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) { - if (s1 == s2) { /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */ - return (equals == Py_EQ); - } else if (PyUnicode_CheckExact(s1) & PyUnicode_CheckExact(s2)) { - if (PyUnicode_GET_SIZE(s1) != PyUnicode_GET_SIZE(s2)) { - return (equals == Py_NE); - } else if (PyUnicode_GET_SIZE(s1) == 1) { - if (equals == Py_EQ) - return (PyUnicode_AS_UNICODE(s1)[0] == PyUnicode_AS_UNICODE(s2)[0]); - else - return (PyUnicode_AS_UNICODE(s1)[0] != PyUnicode_AS_UNICODE(s2)[0]); - } else { - int result = PyUnicode_Compare(s1, s2); - if ((result == -1) && unlikely(PyErr_Occurred())) - return -1; - return (equals == Py_EQ) ? (result == 0) : (result != 0); - } - } else if ((s1 == Py_None) & (s2 == Py_None)) { - return (equals == Py_EQ); - } else if ((s1 == Py_None) & PyUnicode_CheckExact(s2)) { - return (equals == Py_NE); - } else if ((s2 == Py_None) & PyUnicode_CheckExact(s1)) { - return (equals == Py_NE); - } else { - int result; - PyObject* py_result = PyObject_RichCompare(s1, s2, equals); - if (!py_result) - return -1; - result = __Pyx_PyObject_IsTrue(py_result); - Py_DECREF(py_result); - return result; - } -} - -static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) { - const unsigned char neg_one = (unsigned char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned char" : - "value too large to convert to unsigned char"); - } - return (unsigned char)-1; - } - return (unsigned char)val; - } - return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x); -} - -static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) { - const unsigned short neg_one = (unsigned short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned short" : - "value too large to convert to unsigned short"); - } - return (unsigned short)-1; - } - return (unsigned short)val; - } - return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x); -} - -static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) { - const unsigned int neg_one = (unsigned int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(unsigned int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(unsigned int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to unsigned int" : - "value too large to convert to unsigned int"); - } - return (unsigned int)-1; - } - return (unsigned int)val; - } - return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x); -} - -static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) { - const char neg_one = (char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to char" : - "value too large to convert to char"); - } - return (char)-1; - } - return (char)val; - } - return (char)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) { - const short neg_one = (short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to short" : - "value too large to convert to short"); - } - return (short)-1; - } - return (short)val; - } - return (short)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) { - const int neg_one = (int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to int" : - "value too large to convert to int"); - } - return (int)-1; - } - return (int)val; - } - return (int)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) { - const signed char neg_one = (signed char)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed char) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed char)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed char" : - "value too large to convert to signed char"); - } - return (signed char)-1; - } - return (signed char)val; - } - return (signed char)__Pyx_PyInt_AsSignedLong(x); -} - -static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) { - const signed short neg_one = (signed short)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed short) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed short)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed short" : - "value too large to convert to signed short"); - } - return (signed short)-1; - } - return (signed short)val; - } - return (signed short)__Pyx_PyInt_AsSignedLong(x); -} - -static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) { - const signed int neg_one = (signed int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(signed int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(signed int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to signed int" : - "value too large to convert to signed int"); - } - return (signed int)-1; - } - return (signed int)val; - } - return (signed int)__Pyx_PyInt_AsSignedLong(x); -} - -static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) { - const int neg_one = (int)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; - if (sizeof(int) < sizeof(long)) { - long val = __Pyx_PyInt_AsLong(x); - if (unlikely(val != (long)(int)val)) { - if (!unlikely(val == -1 && PyErr_Occurred())) { - PyErr_SetString(PyExc_OverflowError, - (is_unsigned && unlikely(val < 0)) ? - "can't convert negative value to int" : - "value too large to convert to int"); - } - return (int)-1; - } - return (int)val; - } - return (int)__Pyx_PyInt_AsLong(x); -} - -static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) { - const unsigned long neg_one = (unsigned long)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned long"); - return (unsigned long)-1; - } - return (unsigned long)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned long"); - return (unsigned long)-1; - } - return PyLong_AsUnsignedLong(x); - } else { - return PyLong_AsLong(x); - } - } else { - unsigned long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (unsigned long)-1; - val = __Pyx_PyInt_AsUnsignedLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) { - const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG)-1; - } - return (unsigned PY_LONG_LONG)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to unsigned PY_LONG_LONG"); - return (unsigned PY_LONG_LONG)-1; - } - return PyLong_AsUnsignedLongLong(x); - } else { - return PyLong_AsLongLong(x); - } - } else { - unsigned PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (unsigned PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsUnsignedLongLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) { - const long neg_one = (long)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long)-1; - } - return (long)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to long"); - return (long)-1; - } - return PyLong_AsUnsignedLong(x); - } else { - return PyLong_AsLong(x); - } - } else { - long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (long)-1; - val = __Pyx_PyInt_AsLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) { - const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG)-1; - } - return (PY_LONG_LONG)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to PY_LONG_LONG"); - return (PY_LONG_LONG)-1; - } - return PyLong_AsUnsignedLongLong(x); - } else { - return PyLong_AsLongLong(x); - } - } else { - PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsLongLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) { - const signed long neg_one = (signed long)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed long"); - return (signed long)-1; - } - return (signed long)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed long"); - return (signed long)-1; - } - return PyLong_AsUnsignedLong(x); - } else { - return PyLong_AsLong(x); - } - } else { - signed long val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (signed long)-1; - val = __Pyx_PyInt_AsSignedLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) { - const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0; - const int is_unsigned = neg_one > const_zero; -#if PY_VERSION_HEX < 0x03000000 - if (likely(PyInt_Check(x))) { - long val = PyInt_AS_LONG(x); - if (is_unsigned && unlikely(val < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed PY_LONG_LONG"); - return (signed PY_LONG_LONG)-1; - } - return (signed PY_LONG_LONG)val; - } else -#endif - if (likely(PyLong_Check(x))) { - if (is_unsigned) { - if (unlikely(Py_SIZE(x) < 0)) { - PyErr_SetString(PyExc_OverflowError, - "can't convert negative value to signed PY_LONG_LONG"); - return (signed PY_LONG_LONG)-1; - } - return PyLong_AsUnsignedLongLong(x); - } else { - return PyLong_AsLongLong(x); - } - } else { - signed PY_LONG_LONG val; - PyObject *tmp = __Pyx_PyNumber_Int(x); - if (!tmp) return (signed PY_LONG_LONG)-1; - val = __Pyx_PyInt_AsSignedLongLong(tmp); - Py_DECREF(tmp); - return val; - } -} - -static int __Pyx_SetVtable(PyObject *dict, void *vtable) { -#if PY_VERSION_HEX >= 0x02070000 && !(PY_MAJOR_VERSION==3&&PY_MINOR_VERSION==0) - PyObject *ob = PyCapsule_New(vtable, 0, 0); -#else - PyObject *ob = PyCObject_FromVoidPtr(vtable, 0); -#endif - if (!ob) - goto bad; - if (PyDict_SetItemString(dict, "__pyx_vtable__", ob) < 0) - goto bad; - Py_DECREF(ob); - return 0; -bad: - Py_XDECREF(ob); - return -1; -} - -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" - -static void __Pyx_AddTraceback(const char *funcname) { - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - PyObject *py_globals = 0; - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - - #if PY_MAJOR_VERSION < 3 - py_srcfile = PyString_FromString(__pyx_filename); - #else - py_srcfile = PyUnicode_FromString(__pyx_filename); - #endif - if (!py_srcfile) goto bad; - if (__pyx_clineno) { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno); - #else - py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno); - #endif - } - else { - #if PY_MAJOR_VERSION < 3 - py_funcname = PyString_FromString(funcname); - #else - py_funcname = PyUnicode_FromString(funcname); - #endif - } - if (!py_funcname) goto bad; - py_globals = PyModule_GetDict(__pyx_m); - if (!py_globals) goto bad; - py_code = PyCode_New( - 0, /*int argcount,*/ - #if PY_MAJOR_VERSION >= 3 - 0, /*int kwonlyargcount,*/ - #endif - 0, /*int nlocals,*/ - 0, /*int stacksize,*/ - 0, /*int flags,*/ - __pyx_empty_bytes, /*PyObject *code,*/ - __pyx_empty_tuple, /*PyObject *consts,*/ - __pyx_empty_tuple, /*PyObject *names,*/ - __pyx_empty_tuple, /*PyObject *varnames,*/ - __pyx_empty_tuple, /*PyObject *freevars,*/ - __pyx_empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - __pyx_lineno, /*int firstlineno,*/ - __pyx_empty_bytes /*PyObject *lnotab*/ - ); - if (!py_code) goto bad; - py_frame = PyFrame_New( - PyThreadState_GET(), /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - py_globals, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - py_frame->f_lineno = __pyx_lineno; - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} - -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) { - while (t->p) { - #if PY_MAJOR_VERSION < 3 - if (t->is_unicode) { - *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL); - } else if (t->intern) { - *t->p = PyString_InternFromString(t->s); - } else { - *t->p = PyString_FromStringAndSize(t->s, t->n - 1); - } - #else /* Python 3+ has unicode identifiers */ - if (t->is_unicode | t->is_str) { - if (t->intern) { - *t->p = PyUnicode_InternFromString(t->s); - } else if (t->encoding) { - *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL); - } else { - *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1); - } - } else { - *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1); - } - #endif - if (!*t->p) - return -1; - ++t; - } - return 0; -} - -/* Type Conversion Functions */ - -static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) { - int is_true = x == Py_True; - if (is_true | (x == Py_False) | (x == Py_None)) return is_true; - else return PyObject_IsTrue(x); -} - -static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) { - PyNumberMethods *m; - const char *name = NULL; - PyObject *res = NULL; -#if PY_VERSION_HEX < 0x03000000 - if (PyInt_Check(x) || PyLong_Check(x)) -#else - if (PyLong_Check(x)) -#endif - return Py_INCREF(x), x; - m = Py_TYPE(x)->tp_as_number; -#if PY_VERSION_HEX < 0x03000000 - if (m && m->nb_int) { - name = "int"; - res = PyNumber_Int(x); - } - else if (m && m->nb_long) { - name = "long"; - res = PyNumber_Long(x); - } -#else - if (m && m->nb_int) { - name = "int"; - res = PyNumber_Long(x); - } -#endif - if (res) { -#if PY_VERSION_HEX < 0x03000000 - if (!PyInt_Check(res) && !PyLong_Check(res)) { -#else - if (!PyLong_Check(res)) { -#endif - PyErr_Format(PyExc_TypeError, - "__%s__ returned non-%s (type %.200s)", - name, name, Py_TYPE(res)->tp_name); - Py_DECREF(res); - return NULL; - } - } - else if (!PyErr_Occurred()) { - PyErr_SetString(PyExc_TypeError, - "an integer is required"); - } - return res; -} - -static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) { - Py_ssize_t ival; - PyObject* x = PyNumber_Index(b); - if (!x) return -1; - ival = PyInt_AsSsize_t(x); - Py_DECREF(x); - return ival; -} - -static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) { -#if PY_VERSION_HEX < 0x02050000 - if (ival <= LONG_MAX) - return PyInt_FromLong((long)ival); - else { - unsigned char *bytes = (unsigned char *) &ival; - int one = 1; int little = (int)*(unsigned char*)&one; - return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0); - } -#else - return PyInt_FromSize_t(ival); -#endif -} - -static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) { - unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x); - if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) { - return (size_t)-1; - } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) { - PyErr_SetString(PyExc_OverflowError, - "value too large to convert to size_t"); - return (size_t)-1; - } - return (size_t)val; -} - - -#endif /* Py_PYTHON_H */ diff --git a/libs/PyYAML-3.10/ext/_yaml.h b/libs/PyYAML-3.10/ext/_yaml.h deleted file mode 100644 index 21fd6a9..0000000 --- a/libs/PyYAML-3.10/ext/_yaml.h +++ /dev/null @@ -1,23 +0,0 @@ - -#include - -#if PY_MAJOR_VERSION < 3 - -#define PyUnicode_FromString(s) PyUnicode_DecodeUTF8((s), strlen(s), "strict") - -#else - -#define PyString_CheckExact PyBytes_CheckExact -#define PyString_AS_STRING PyBytes_AS_STRING -#define PyString_GET_SIZE PyBytes_GET_SIZE -#define PyString_FromStringAndSize PyBytes_FromStringAndSize - -#endif - -#ifdef _MSC_VER /* MS Visual C++ 6.0 */ -#if _MSC_VER == 1200 - -#define PyLong_FromUnsignedLongLong(z) PyInt_FromLong(i) - -#endif -#endif diff --git a/libs/PyYAML-3.10/ext/_yaml.pxd b/libs/PyYAML-3.10/ext/_yaml.pxd deleted file mode 100644 index f47f459..0000000 --- a/libs/PyYAML-3.10/ext/_yaml.pxd +++ /dev/null @@ -1,251 +0,0 @@ - -cdef extern from "_yaml.h": - - void malloc(int l) - void memcpy(char *d, char *s, int l) - int strlen(char *s) - int PyString_CheckExact(object o) - int PyUnicode_CheckExact(object o) - char *PyString_AS_STRING(object o) - int PyString_GET_SIZE(object o) - object PyString_FromStringAndSize(char *v, int l) - object PyUnicode_FromString(char *u) - object PyUnicode_DecodeUTF8(char *u, int s, char *e) - object PyUnicode_AsUTF8String(object o) - int PY_MAJOR_VERSION - - ctypedef enum: - SIZEOF_VOID_P - ctypedef enum yaml_encoding_t: - YAML_ANY_ENCODING - YAML_UTF8_ENCODING - YAML_UTF16LE_ENCODING - YAML_UTF16BE_ENCODING - ctypedef enum yaml_break_t: - YAML_ANY_BREAK - YAML_CR_BREAK - YAML_LN_BREAK - YAML_CRLN_BREAK - ctypedef enum yaml_error_type_t: - YAML_NO_ERROR - YAML_MEMORY_ERROR - YAML_READER_ERROR - YAML_SCANNER_ERROR - YAML_PARSER_ERROR - YAML_WRITER_ERROR - YAML_EMITTER_ERROR - ctypedef enum yaml_scalar_style_t: - YAML_ANY_SCALAR_STYLE - YAML_PLAIN_SCALAR_STYLE - YAML_SINGLE_QUOTED_SCALAR_STYLE - YAML_DOUBLE_QUOTED_SCALAR_STYLE - YAML_LITERAL_SCALAR_STYLE - YAML_FOLDED_SCALAR_STYLE - ctypedef enum yaml_sequence_style_t: - YAML_ANY_SEQUENCE_STYLE - YAML_BLOCK_SEQUENCE_STYLE - YAML_FLOW_SEQUENCE_STYLE - ctypedef enum yaml_mapping_style_t: - YAML_ANY_MAPPING_STYLE - YAML_BLOCK_MAPPING_STYLE - YAML_FLOW_MAPPING_STYLE - ctypedef enum yaml_token_type_t: - YAML_NO_TOKEN - YAML_STREAM_START_TOKEN - YAML_STREAM_END_TOKEN - YAML_VERSION_DIRECTIVE_TOKEN - YAML_TAG_DIRECTIVE_TOKEN - YAML_DOCUMENT_START_TOKEN - YAML_DOCUMENT_END_TOKEN - YAML_BLOCK_SEQUENCE_START_TOKEN - YAML_BLOCK_MAPPING_START_TOKEN - YAML_BLOCK_END_TOKEN - YAML_FLOW_SEQUENCE_START_TOKEN - YAML_FLOW_SEQUENCE_END_TOKEN - YAML_FLOW_MAPPING_START_TOKEN - YAML_FLOW_MAPPING_END_TOKEN - YAML_BLOCK_ENTRY_TOKEN - YAML_FLOW_ENTRY_TOKEN - YAML_KEY_TOKEN - YAML_VALUE_TOKEN - YAML_ALIAS_TOKEN - YAML_ANCHOR_TOKEN - YAML_TAG_TOKEN - YAML_SCALAR_TOKEN - ctypedef enum yaml_event_type_t: - YAML_NO_EVENT - YAML_STREAM_START_EVENT - YAML_STREAM_END_EVENT - YAML_DOCUMENT_START_EVENT - YAML_DOCUMENT_END_EVENT - YAML_ALIAS_EVENT - YAML_SCALAR_EVENT - YAML_SEQUENCE_START_EVENT - YAML_SEQUENCE_END_EVENT - YAML_MAPPING_START_EVENT - YAML_MAPPING_END_EVENT - - ctypedef int yaml_read_handler_t(void *data, char *buffer, - int size, int *size_read) except 0 - - ctypedef int yaml_write_handler_t(void *data, char *buffer, - int size) except 0 - - ctypedef struct yaml_mark_t: - int index - int line - int column - ctypedef struct yaml_version_directive_t: - int major - int minor - ctypedef struct yaml_tag_directive_t: - char *handle - char *prefix - - ctypedef struct _yaml_token_stream_start_data_t: - yaml_encoding_t encoding - ctypedef struct _yaml_token_alias_data_t: - char *value - ctypedef struct _yaml_token_anchor_data_t: - char *value - ctypedef struct _yaml_token_tag_data_t: - char *handle - char *suffix - ctypedef struct _yaml_token_scalar_data_t: - char *value - int length - yaml_scalar_style_t style - ctypedef struct _yaml_token_version_directive_data_t: - int major - int minor - ctypedef struct _yaml_token_tag_directive_data_t: - char *handle - char *prefix - ctypedef union _yaml_token_data_t: - _yaml_token_stream_start_data_t stream_start - _yaml_token_alias_data_t alias - _yaml_token_anchor_data_t anchor - _yaml_token_tag_data_t tag - _yaml_token_scalar_data_t scalar - _yaml_token_version_directive_data_t version_directive - _yaml_token_tag_directive_data_t tag_directive - ctypedef struct yaml_token_t: - yaml_token_type_t type - _yaml_token_data_t data - yaml_mark_t start_mark - yaml_mark_t end_mark - - ctypedef struct _yaml_event_stream_start_data_t: - yaml_encoding_t encoding - ctypedef struct _yaml_event_document_start_data_tag_directives_t: - yaml_tag_directive_t *start - yaml_tag_directive_t *end - ctypedef struct _yaml_event_document_start_data_t: - yaml_version_directive_t *version_directive - _yaml_event_document_start_data_tag_directives_t tag_directives - int implicit - ctypedef struct _yaml_event_document_end_data_t: - int implicit - ctypedef struct _yaml_event_alias_data_t: - char *anchor - ctypedef struct _yaml_event_scalar_data_t: - char *anchor - char *tag - char *value - int length - int plain_implicit - int quoted_implicit - yaml_scalar_style_t style - ctypedef struct _yaml_event_sequence_start_data_t: - char *anchor - char *tag - int implicit - yaml_sequence_style_t style - ctypedef struct _yaml_event_mapping_start_data_t: - char *anchor - char *tag - int implicit - yaml_mapping_style_t style - ctypedef union _yaml_event_data_t: - _yaml_event_stream_start_data_t stream_start - _yaml_event_document_start_data_t document_start - _yaml_event_document_end_data_t document_end - _yaml_event_alias_data_t alias - _yaml_event_scalar_data_t scalar - _yaml_event_sequence_start_data_t sequence_start - _yaml_event_mapping_start_data_t mapping_start - ctypedef struct yaml_event_t: - yaml_event_type_t type - _yaml_event_data_t data - yaml_mark_t start_mark - yaml_mark_t end_mark - - ctypedef struct yaml_parser_t: - yaml_error_type_t error - char *problem - int problem_offset - int problem_value - yaml_mark_t problem_mark - char *context - yaml_mark_t context_mark - - ctypedef struct yaml_emitter_t: - yaml_error_type_t error - char *problem - - char *yaml_get_version_string() - void yaml_get_version(int *major, int *minor, int *patch) - - void yaml_token_delete(yaml_token_t *token) - - int yaml_stream_start_event_initialize(yaml_event_t *event, - yaml_encoding_t encoding) - int yaml_stream_end_event_initialize(yaml_event_t *event) - int yaml_document_start_event_initialize(yaml_event_t *event, - yaml_version_directive_t *version_directive, - yaml_tag_directive_t *tag_directives_start, - yaml_tag_directive_t *tag_directives_end, - int implicit) - int yaml_document_end_event_initialize(yaml_event_t *event, - int implicit) - int yaml_alias_event_initialize(yaml_event_t *event, char *anchor) - int yaml_scalar_event_initialize(yaml_event_t *event, - char *anchor, char *tag, char *value, int length, - int plain_implicit, int quoted_implicit, - yaml_scalar_style_t style) - int yaml_sequence_start_event_initialize(yaml_event_t *event, - char *anchor, char *tag, int implicit, yaml_sequence_style_t style) - int yaml_sequence_end_event_initialize(yaml_event_t *event) - int yaml_mapping_start_event_initialize(yaml_event_t *event, - char *anchor, char *tag, int implicit, yaml_mapping_style_t style) - int yaml_mapping_end_event_initialize(yaml_event_t *event) - void yaml_event_delete(yaml_event_t *event) - - int yaml_parser_initialize(yaml_parser_t *parser) - void yaml_parser_delete(yaml_parser_t *parser) - void yaml_parser_set_input_string(yaml_parser_t *parser, - char *input, int size) - void yaml_parser_set_input(yaml_parser_t *parser, - yaml_read_handler_t *handler, void *data) - void yaml_parser_set_encoding(yaml_parser_t *parser, - yaml_encoding_t encoding) - int yaml_parser_scan(yaml_parser_t *parser, yaml_token_t *token) except * - int yaml_parser_parse(yaml_parser_t *parser, yaml_event_t *event) except * - - int yaml_emitter_initialize(yaml_emitter_t *emitter) - void yaml_emitter_delete(yaml_emitter_t *emitter) - void yaml_emitter_set_output_string(yaml_emitter_t *emitter, - char *output, int size, int *size_written) - void yaml_emitter_set_output(yaml_emitter_t *emitter, - yaml_write_handler_t *handler, void *data) - void yaml_emitter_set_encoding(yaml_emitter_t *emitter, - yaml_encoding_t encoding) - void yaml_emitter_set_canonical(yaml_emitter_t *emitter, int canonical) - void yaml_emitter_set_indent(yaml_emitter_t *emitter, int indent) - void yaml_emitter_set_width(yaml_emitter_t *emitter, int width) - void yaml_emitter_set_unicode(yaml_emitter_t *emitter, int unicode) - void yaml_emitter_set_break(yaml_emitter_t *emitter, - yaml_break_t line_break) - int yaml_emitter_emit(yaml_emitter_t *emitter, yaml_event_t *event) except * - int yaml_emitter_flush(yaml_emitter_t *emitter) - diff --git a/libs/PyYAML-3.10/ext/_yaml.pyx b/libs/PyYAML-3.10/ext/_yaml.pyx deleted file mode 100644 index 5158fb4..0000000 --- a/libs/PyYAML-3.10/ext/_yaml.pyx +++ /dev/null @@ -1,1527 +0,0 @@ - -import yaml - -def get_version_string(): - cdef char *value - value = yaml_get_version_string() - if PY_MAJOR_VERSION < 3: - return value - else: - return PyUnicode_FromString(value) - -def get_version(): - cdef int major, minor, patch - yaml_get_version(&major, &minor, &patch) - return (major, minor, patch) - -#Mark = yaml.error.Mark -YAMLError = yaml.error.YAMLError -ReaderError = yaml.reader.ReaderError -ScannerError = yaml.scanner.ScannerError -ParserError = yaml.parser.ParserError -ComposerError = yaml.composer.ComposerError -ConstructorError = yaml.constructor.ConstructorError -EmitterError = yaml.emitter.EmitterError -SerializerError = yaml.serializer.SerializerError -RepresenterError = yaml.representer.RepresenterError - -StreamStartToken = yaml.tokens.StreamStartToken -StreamEndToken = yaml.tokens.StreamEndToken -DirectiveToken = yaml.tokens.DirectiveToken -DocumentStartToken = yaml.tokens.DocumentStartToken -DocumentEndToken = yaml.tokens.DocumentEndToken -BlockSequenceStartToken = yaml.tokens.BlockSequenceStartToken -BlockMappingStartToken = yaml.tokens.BlockMappingStartToken -BlockEndToken = yaml.tokens.BlockEndToken -FlowSequenceStartToken = yaml.tokens.FlowSequenceStartToken -FlowMappingStartToken = yaml.tokens.FlowMappingStartToken -FlowSequenceEndToken = yaml.tokens.FlowSequenceEndToken -FlowMappingEndToken = yaml.tokens.FlowMappingEndToken -KeyToken = yaml.tokens.KeyToken -ValueToken = yaml.tokens.ValueToken -BlockEntryToken = yaml.tokens.BlockEntryToken -FlowEntryToken = yaml.tokens.FlowEntryToken -AliasToken = yaml.tokens.AliasToken -AnchorToken = yaml.tokens.AnchorToken -TagToken = yaml.tokens.TagToken -ScalarToken = yaml.tokens.ScalarToken - -StreamStartEvent = yaml.events.StreamStartEvent -StreamEndEvent = yaml.events.StreamEndEvent -DocumentStartEvent = yaml.events.DocumentStartEvent -DocumentEndEvent = yaml.events.DocumentEndEvent -AliasEvent = yaml.events.AliasEvent -ScalarEvent = yaml.events.ScalarEvent -SequenceStartEvent = yaml.events.SequenceStartEvent -SequenceEndEvent = yaml.events.SequenceEndEvent -MappingStartEvent = yaml.events.MappingStartEvent -MappingEndEvent = yaml.events.MappingEndEvent - -ScalarNode = yaml.nodes.ScalarNode -SequenceNode = yaml.nodes.SequenceNode -MappingNode = yaml.nodes.MappingNode - -cdef class Mark: - cdef readonly object name - cdef readonly int index - cdef readonly int line - cdef readonly int column - cdef readonly buffer - cdef readonly pointer - - def __init__(self, object name, int index, int line, int column, - object buffer, object pointer): - self.name = name - self.index = index - self.line = line - self.column = column - self.buffer = buffer - self.pointer = pointer - - def get_snippet(self): - return None - - def __str__(self): - where = " in \"%s\", line %d, column %d" \ - % (self.name, self.line+1, self.column+1) - return where - -#class YAMLError(Exception): -# pass -# -#class MarkedYAMLError(YAMLError): -# -# def __init__(self, context=None, context_mark=None, -# problem=None, problem_mark=None, note=None): -# self.context = context -# self.context_mark = context_mark -# self.problem = problem -# self.problem_mark = problem_mark -# self.note = note -# -# def __str__(self): -# lines = [] -# if self.context is not None: -# lines.append(self.context) -# if self.context_mark is not None \ -# and (self.problem is None or self.problem_mark is None -# or self.context_mark.name != self.problem_mark.name -# or self.context_mark.line != self.problem_mark.line -# or self.context_mark.column != self.problem_mark.column): -# lines.append(str(self.context_mark)) -# if self.problem is not None: -# lines.append(self.problem) -# if self.problem_mark is not None: -# lines.append(str(self.problem_mark)) -# if self.note is not None: -# lines.append(self.note) -# return '\n'.join(lines) -# -#class ReaderError(YAMLError): -# -# def __init__(self, name, position, character, encoding, reason): -# self.name = name -# self.character = character -# self.position = position -# self.encoding = encoding -# self.reason = reason -# -# def __str__(self): -# if isinstance(self.character, str): -# return "'%s' codec can't decode byte #x%02x: %s\n" \ -# " in \"%s\", position %d" \ -# % (self.encoding, ord(self.character), self.reason, -# self.name, self.position) -# else: -# return "unacceptable character #x%04x: %s\n" \ -# " in \"%s\", position %d" \ -# % (ord(self.character), self.reason, -# self.name, self.position) -# -#class ScannerError(MarkedYAMLError): -# pass -# -#class ParserError(MarkedYAMLError): -# pass -# -#class EmitterError(YAMLError): -# pass -# -#cdef class Token: -# cdef readonly Mark start_mark -# cdef readonly Mark end_mark -# def __init__(self, Mark start_mark, Mark end_mark): -# self.start_mark = start_mark -# self.end_mark = end_mark -# -#cdef class StreamStartToken(Token): -# cdef readonly object encoding -# def __init__(self, Mark start_mark, Mark end_mark, encoding): -# self.start_mark = start_mark -# self.end_mark = end_mark -# self.encoding = encoding -# -#cdef class StreamEndToken(Token): -# pass -# -#cdef class DirectiveToken(Token): -# cdef readonly object name -# cdef readonly object value -# def __init__(self, name, value, Mark start_mark, Mark end_mark): -# self.name = name -# self.value = value -# self.start_mark = start_mark -# self.end_mark = end_mark -# -#cdef class DocumentStartToken(Token): -# pass -# -#cdef class DocumentEndToken(Token): -# pass -# -#cdef class BlockSequenceStartToken(Token): -# pass -# -#cdef class BlockMappingStartToken(Token): -# pass -# -#cdef class BlockEndToken(Token): -# pass -# -#cdef class FlowSequenceStartToken(Token): -# pass -# -#cdef class FlowMappingStartToken(Token): -# pass -# -#cdef class FlowSequenceEndToken(Token): -# pass -# -#cdef class FlowMappingEndToken(Token): -# pass -# -#cdef class KeyToken(Token): -# pass -# -#cdef class ValueToken(Token): -# pass -# -#cdef class BlockEntryToken(Token): -# pass -# -#cdef class FlowEntryToken(Token): -# pass -# -#cdef class AliasToken(Token): -# cdef readonly object value -# def __init__(self, value, Mark start_mark, Mark end_mark): -# self.value = value -# self.start_mark = start_mark -# self.end_mark = end_mark -# -#cdef class AnchorToken(Token): -# cdef readonly object value -# def __init__(self, value, Mark start_mark, Mark end_mark): -# self.value = value -# self.start_mark = start_mark -# self.end_mark = end_mark -# -#cdef class TagToken(Token): -# cdef readonly object value -# def __init__(self, value, Mark start_mark, Mark end_mark): -# self.value = value -# self.start_mark = start_mark -# self.end_mark = end_mark -# -#cdef class ScalarToken(Token): -# cdef readonly object value -# cdef readonly object plain -# cdef readonly object style -# def __init__(self, value, plain, Mark start_mark, Mark end_mark, style=None): -# self.value = value -# self.plain = plain -# self.start_mark = start_mark -# self.end_mark = end_mark -# self.style = style - -cdef class CParser: - - cdef yaml_parser_t parser - cdef yaml_event_t parsed_event - - cdef object stream - cdef object stream_name - cdef object current_token - cdef object current_event - cdef object anchors - cdef object stream_cache - cdef int stream_cache_len - cdef int stream_cache_pos - cdef int unicode_source - - def __init__(self, stream): - cdef is_readable - if yaml_parser_initialize(&self.parser) == 0: - raise MemoryError - self.parsed_event.type = YAML_NO_EVENT - is_readable = 1 - try: - stream.read - except AttributeError: - is_readable = 0 - self.unicode_source = 0 - if is_readable: - self.stream = stream - try: - self.stream_name = stream.name - except AttributeError: - if PY_MAJOR_VERSION < 3: - self.stream_name = '' - else: - self.stream_name = u'' - self.stream_cache = None - self.stream_cache_len = 0 - self.stream_cache_pos = 0 - yaml_parser_set_input(&self.parser, input_handler, self) - else: - if PyUnicode_CheckExact(stream) != 0: - stream = PyUnicode_AsUTF8String(stream) - if PY_MAJOR_VERSION < 3: - self.stream_name = '' - else: - self.stream_name = u'' - self.unicode_source = 1 - else: - if PY_MAJOR_VERSION < 3: - self.stream_name = '' - else: - self.stream_name = u'' - if PyString_CheckExact(stream) == 0: - if PY_MAJOR_VERSION < 3: - raise TypeError("a string or stream input is required") - else: - raise TypeError(u"a string or stream input is required") - self.stream = stream - yaml_parser_set_input_string(&self.parser, PyString_AS_STRING(stream), PyString_GET_SIZE(stream)) - self.current_token = None - self.current_event = None - self.anchors = {} - - def __dealloc__(self): - yaml_parser_delete(&self.parser) - yaml_event_delete(&self.parsed_event) - - def dispose(self): - pass - - cdef object _parser_error(self): - if self.parser.error == YAML_MEMORY_ERROR: - return MemoryError - elif self.parser.error == YAML_READER_ERROR: - if PY_MAJOR_VERSION < 3: - return ReaderError(self.stream_name, self.parser.problem_offset, - self.parser.problem_value, '?', self.parser.problem) - else: - return ReaderError(self.stream_name, self.parser.problem_offset, - self.parser.problem_value, u'?', PyUnicode_FromString(self.parser.problem)) - elif self.parser.error == YAML_SCANNER_ERROR \ - or self.parser.error == YAML_PARSER_ERROR: - context_mark = None - problem_mark = None - if self.parser.context != NULL: - context_mark = Mark(self.stream_name, - self.parser.context_mark.index, - self.parser.context_mark.line, - self.parser.context_mark.column, None, None) - if self.parser.problem != NULL: - problem_mark = Mark(self.stream_name, - self.parser.problem_mark.index, - self.parser.problem_mark.line, - self.parser.problem_mark.column, None, None) - context = None - if self.parser.context != NULL: - if PY_MAJOR_VERSION < 3: - context = self.parser.context - else: - context = PyUnicode_FromString(self.parser.context) - if PY_MAJOR_VERSION < 3: - problem = self.parser.problem - else: - problem = PyUnicode_FromString(self.parser.problem) - if self.parser.error == YAML_SCANNER_ERROR: - return ScannerError(context, context_mark, problem, problem_mark) - else: - return ParserError(context, context_mark, problem, problem_mark) - if PY_MAJOR_VERSION < 3: - raise ValueError("no parser error") - else: - raise ValueError(u"no parser error") - - def raw_scan(self): - cdef yaml_token_t token - cdef int done - cdef int count - count = 0 - done = 0 - while done == 0: - if yaml_parser_scan(&self.parser, &token) == 0: - error = self._parser_error() - raise error - if token.type == YAML_NO_TOKEN: - done = 1 - else: - count = count+1 - yaml_token_delete(&token) - return count - - cdef object _scan(self): - cdef yaml_token_t token - if yaml_parser_scan(&self.parser, &token) == 0: - error = self._parser_error() - raise error - token_object = self._token_to_object(&token) - yaml_token_delete(&token) - return token_object - - cdef object _token_to_object(self, yaml_token_t *token): - start_mark = Mark(self.stream_name, - token.start_mark.index, - token.start_mark.line, - token.start_mark.column, - None, None) - end_mark = Mark(self.stream_name, - token.end_mark.index, - token.end_mark.line, - token.end_mark.column, - None, None) - if token.type == YAML_NO_TOKEN: - return None - elif token.type == YAML_STREAM_START_TOKEN: - encoding = None - if token.data.stream_start.encoding == YAML_UTF8_ENCODING: - if self.unicode_source == 0: - encoding = u"utf-8" - elif token.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - encoding = u"utf-16-le" - elif token.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - encoding = u"utf-16-be" - return StreamStartToken(start_mark, end_mark, encoding) - elif token.type == YAML_STREAM_END_TOKEN: - return StreamEndToken(start_mark, end_mark) - elif token.type == YAML_VERSION_DIRECTIVE_TOKEN: - return DirectiveToken(u"YAML", - (token.data.version_directive.major, - token.data.version_directive.minor), - start_mark, end_mark) - elif token.type == YAML_TAG_DIRECTIVE_TOKEN: - handle = PyUnicode_FromString(token.data.tag_directive.handle) - prefix = PyUnicode_FromString(token.data.tag_directive.prefix) - return DirectiveToken(u"TAG", (handle, prefix), - start_mark, end_mark) - elif token.type == YAML_DOCUMENT_START_TOKEN: - return DocumentStartToken(start_mark, end_mark) - elif token.type == YAML_DOCUMENT_END_TOKEN: - return DocumentEndToken(start_mark, end_mark) - elif token.type == YAML_BLOCK_SEQUENCE_START_TOKEN: - return BlockSequenceStartToken(start_mark, end_mark) - elif token.type == YAML_BLOCK_MAPPING_START_TOKEN: - return BlockMappingStartToken(start_mark, end_mark) - elif token.type == YAML_BLOCK_END_TOKEN: - return BlockEndToken(start_mark, end_mark) - elif token.type == YAML_FLOW_SEQUENCE_START_TOKEN: - return FlowSequenceStartToken(start_mark, end_mark) - elif token.type == YAML_FLOW_SEQUENCE_END_TOKEN: - return FlowSequenceEndToken(start_mark, end_mark) - elif token.type == YAML_FLOW_MAPPING_START_TOKEN: - return FlowMappingStartToken(start_mark, end_mark) - elif token.type == YAML_FLOW_MAPPING_END_TOKEN: - return FlowMappingEndToken(start_mark, end_mark) - elif token.type == YAML_BLOCK_ENTRY_TOKEN: - return BlockEntryToken(start_mark, end_mark) - elif token.type == YAML_FLOW_ENTRY_TOKEN: - return FlowEntryToken(start_mark, end_mark) - elif token.type == YAML_KEY_TOKEN: - return KeyToken(start_mark, end_mark) - elif token.type == YAML_VALUE_TOKEN: - return ValueToken(start_mark, end_mark) - elif token.type == YAML_ALIAS_TOKEN: - value = PyUnicode_FromString(token.data.alias.value) - return AliasToken(value, start_mark, end_mark) - elif token.type == YAML_ANCHOR_TOKEN: - value = PyUnicode_FromString(token.data.anchor.value) - return AnchorToken(value, start_mark, end_mark) - elif token.type == YAML_TAG_TOKEN: - handle = PyUnicode_FromString(token.data.tag.handle) - suffix = PyUnicode_FromString(token.data.tag.suffix) - if not handle: - handle = None - return TagToken((handle, suffix), start_mark, end_mark) - elif token.type == YAML_SCALAR_TOKEN: - value = PyUnicode_DecodeUTF8(token.data.scalar.value, - token.data.scalar.length, 'strict') - plain = False - style = None - if token.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - plain = True - style = u'' - elif token.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - style = u'\'' - elif token.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - style = u'"' - elif token.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - style = u'|' - elif token.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - style = u'>' - return ScalarToken(value, plain, - start_mark, end_mark, style) - else: - if PY_MAJOR_VERSION < 3: - raise ValueError("unknown token type") - else: - raise ValueError(u"unknown token type") - - def get_token(self): - if self.current_token is not None: - value = self.current_token - self.current_token = None - else: - value = self._scan() - return value - - def peek_token(self): - if self.current_token is None: - self.current_token = self._scan() - return self.current_token - - def check_token(self, *choices): - if self.current_token is None: - self.current_token = self._scan() - if self.current_token is None: - return False - if not choices: - return True - token_class = self.current_token.__class__ - for choice in choices: - if token_class is choice: - return True - return False - - def raw_parse(self): - cdef yaml_event_t event - cdef int done - cdef int count - count = 0 - done = 0 - while done == 0: - if yaml_parser_parse(&self.parser, &event) == 0: - error = self._parser_error() - raise error - if event.type == YAML_NO_EVENT: - done = 1 - else: - count = count+1 - yaml_event_delete(&event) - return count - - cdef object _parse(self): - cdef yaml_event_t event - if yaml_parser_parse(&self.parser, &event) == 0: - error = self._parser_error() - raise error - event_object = self._event_to_object(&event) - yaml_event_delete(&event) - return event_object - - cdef object _event_to_object(self, yaml_event_t *event): - cdef yaml_tag_directive_t *tag_directive - start_mark = Mark(self.stream_name, - event.start_mark.index, - event.start_mark.line, - event.start_mark.column, - None, None) - end_mark = Mark(self.stream_name, - event.end_mark.index, - event.end_mark.line, - event.end_mark.column, - None, None) - if event.type == YAML_NO_EVENT: - return None - elif event.type == YAML_STREAM_START_EVENT: - encoding = None - if event.data.stream_start.encoding == YAML_UTF8_ENCODING: - if self.unicode_source == 0: - encoding = u"utf-8" - elif event.data.stream_start.encoding == YAML_UTF16LE_ENCODING: - encoding = u"utf-16-le" - elif event.data.stream_start.encoding == YAML_UTF16BE_ENCODING: - encoding = u"utf-16-be" - return StreamStartEvent(start_mark, end_mark, encoding) - elif event.type == YAML_STREAM_END_EVENT: - return StreamEndEvent(start_mark, end_mark) - elif event.type == YAML_DOCUMENT_START_EVENT: - explicit = False - if event.data.document_start.implicit == 0: - explicit = True - version = None - if event.data.document_start.version_directive != NULL: - version = (event.data.document_start.version_directive.major, - event.data.document_start.version_directive.minor) - tags = None - if event.data.document_start.tag_directives.start != NULL: - tags = {} - tag_directive = event.data.document_start.tag_directives.start - while tag_directive != event.data.document_start.tag_directives.end: - handle = PyUnicode_FromString(tag_directive.handle) - prefix = PyUnicode_FromString(tag_directive.prefix) - tags[handle] = prefix - tag_directive = tag_directive+1 - return DocumentStartEvent(start_mark, end_mark, - explicit, version, tags) - elif event.type == YAML_DOCUMENT_END_EVENT: - explicit = False - if event.data.document_end.implicit == 0: - explicit = True - return DocumentEndEvent(start_mark, end_mark, explicit) - elif event.type == YAML_ALIAS_EVENT: - anchor = PyUnicode_FromString(event.data.alias.anchor) - return AliasEvent(anchor, start_mark, end_mark) - elif event.type == YAML_SCALAR_EVENT: - anchor = None - if event.data.scalar.anchor != NULL: - anchor = PyUnicode_FromString(event.data.scalar.anchor) - tag = None - if event.data.scalar.tag != NULL: - tag = PyUnicode_FromString(event.data.scalar.tag) - value = PyUnicode_DecodeUTF8(event.data.scalar.value, - event.data.scalar.length, 'strict') - plain_implicit = False - if event.data.scalar.plain_implicit == 1: - plain_implicit = True - quoted_implicit = False - if event.data.scalar.quoted_implicit == 1: - quoted_implicit = True - style = None - if event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - style = u'' - elif event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - style = u'\'' - elif event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - style = u'"' - elif event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - style = u'|' - elif event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - style = u'>' - return ScalarEvent(anchor, tag, - (plain_implicit, quoted_implicit), - value, start_mark, end_mark, style) - elif event.type == YAML_SEQUENCE_START_EVENT: - anchor = None - if event.data.sequence_start.anchor != NULL: - anchor = PyUnicode_FromString(event.data.sequence_start.anchor) - tag = None - if event.data.sequence_start.tag != NULL: - tag = PyUnicode_FromString(event.data.sequence_start.tag) - implicit = False - if event.data.sequence_start.implicit == 1: - implicit = True - flow_style = None - if event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - flow_style = True - elif event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - flow_style = False - return SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style) - elif event.type == YAML_MAPPING_START_EVENT: - anchor = None - if event.data.mapping_start.anchor != NULL: - anchor = PyUnicode_FromString(event.data.mapping_start.anchor) - tag = None - if event.data.mapping_start.tag != NULL: - tag = PyUnicode_FromString(event.data.mapping_start.tag) - implicit = False - if event.data.mapping_start.implicit == 1: - implicit = True - flow_style = None - if event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - flow_style = True - elif event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - flow_style = False - return MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style) - elif event.type == YAML_SEQUENCE_END_EVENT: - return SequenceEndEvent(start_mark, end_mark) - elif event.type == YAML_MAPPING_END_EVENT: - return MappingEndEvent(start_mark, end_mark) - else: - if PY_MAJOR_VERSION < 3: - raise ValueError("unknown event type") - else: - raise ValueError(u"unknown event type") - - def get_event(self): - if self.current_event is not None: - value = self.current_event - self.current_event = None - else: - value = self._parse() - return value - - def peek_event(self): - if self.current_event is None: - self.current_event = self._parse() - return self.current_event - - def check_event(self, *choices): - if self.current_event is None: - self.current_event = self._parse() - if self.current_event is None: - return False - if not choices: - return True - event_class = self.current_event.__class__ - for choice in choices: - if event_class is choice: - return True - return False - - def check_node(self): - self._parse_next_event() - if self.parsed_event.type == YAML_STREAM_START_EVENT: - yaml_event_delete(&self.parsed_event) - self._parse_next_event() - if self.parsed_event.type != YAML_STREAM_END_EVENT: - return True - return False - - def get_node(self): - self._parse_next_event() - if self.parsed_event.type != YAML_STREAM_END_EVENT: - return self._compose_document() - - def get_single_node(self): - self._parse_next_event() - yaml_event_delete(&self.parsed_event) - self._parse_next_event() - document = None - if self.parsed_event.type != YAML_STREAM_END_EVENT: - document = self._compose_document() - self._parse_next_event() - if self.parsed_event.type != YAML_STREAM_END_EVENT: - mark = Mark(self.stream_name, - self.parsed_event.start_mark.index, - self.parsed_event.start_mark.line, - self.parsed_event.start_mark.column, - None, None) - if PY_MAJOR_VERSION < 3: - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", mark) - else: - raise ComposerError(u"expected a single document in the stream", - document.start_mark, u"but found another document", mark) - return document - - cdef object _compose_document(self): - yaml_event_delete(&self.parsed_event) - node = self._compose_node(None, None) - self._parse_next_event() - yaml_event_delete(&self.parsed_event) - self.anchors = {} - return node - - cdef object _compose_node(self, object parent, object index): - self._parse_next_event() - if self.parsed_event.type == YAML_ALIAS_EVENT: - anchor = PyUnicode_FromString(self.parsed_event.data.alias.anchor) - if anchor not in self.anchors: - mark = Mark(self.stream_name, - self.parsed_event.start_mark.index, - self.parsed_event.start_mark.line, - self.parsed_event.start_mark.column, - None, None) - if PY_MAJOR_VERSION < 3: - raise ComposerError(None, None, "found undefined alias", mark) - else: - raise ComposerError(None, None, u"found undefined alias", mark) - yaml_event_delete(&self.parsed_event) - return self.anchors[anchor] - anchor = None - if self.parsed_event.type == YAML_SCALAR_EVENT \ - and self.parsed_event.data.scalar.anchor != NULL: - anchor = PyUnicode_FromString(self.parsed_event.data.scalar.anchor) - elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT \ - and self.parsed_event.data.sequence_start.anchor != NULL: - anchor = PyUnicode_FromString(self.parsed_event.data.sequence_start.anchor) - elif self.parsed_event.type == YAML_MAPPING_START_EVENT \ - and self.parsed_event.data.mapping_start.anchor != NULL: - anchor = PyUnicode_FromString(self.parsed_event.data.mapping_start.anchor) - if anchor is not None: - if anchor in self.anchors: - mark = Mark(self.stream_name, - self.parsed_event.start_mark.index, - self.parsed_event.start_mark.line, - self.parsed_event.start_mark.column, - None, None) - if PY_MAJOR_VERSION < 3: - raise ComposerError("found duplicate anchor; first occurence", - self.anchors[anchor].start_mark, "second occurence", mark) - else: - raise ComposerError(u"found duplicate anchor; first occurence", - self.anchors[anchor].start_mark, u"second occurence", mark) - self.descend_resolver(parent, index) - if self.parsed_event.type == YAML_SCALAR_EVENT: - node = self._compose_scalar_node(anchor) - elif self.parsed_event.type == YAML_SEQUENCE_START_EVENT: - node = self._compose_sequence_node(anchor) - elif self.parsed_event.type == YAML_MAPPING_START_EVENT: - node = self._compose_mapping_node(anchor) - self.ascend_resolver() - return node - - cdef _compose_scalar_node(self, object anchor): - start_mark = Mark(self.stream_name, - self.parsed_event.start_mark.index, - self.parsed_event.start_mark.line, - self.parsed_event.start_mark.column, - None, None) - end_mark = Mark(self.stream_name, - self.parsed_event.end_mark.index, - self.parsed_event.end_mark.line, - self.parsed_event.end_mark.column, - None, None) - value = PyUnicode_DecodeUTF8(self.parsed_event.data.scalar.value, - self.parsed_event.data.scalar.length, 'strict') - plain_implicit = False - if self.parsed_event.data.scalar.plain_implicit == 1: - plain_implicit = True - quoted_implicit = False - if self.parsed_event.data.scalar.quoted_implicit == 1: - quoted_implicit = True - if self.parsed_event.data.scalar.tag == NULL \ - or (self.parsed_event.data.scalar.tag[0] == c'!' - and self.parsed_event.data.scalar.tag[1] == c'\0'): - tag = self.resolve(ScalarNode, value, (plain_implicit, quoted_implicit)) - else: - tag = PyUnicode_FromString(self.parsed_event.data.scalar.tag) - style = None - if self.parsed_event.data.scalar.style == YAML_PLAIN_SCALAR_STYLE: - style = u'' - elif self.parsed_event.data.scalar.style == YAML_SINGLE_QUOTED_SCALAR_STYLE: - style = u'\'' - elif self.parsed_event.data.scalar.style == YAML_DOUBLE_QUOTED_SCALAR_STYLE: - style = u'"' - elif self.parsed_event.data.scalar.style == YAML_LITERAL_SCALAR_STYLE: - style = u'|' - elif self.parsed_event.data.scalar.style == YAML_FOLDED_SCALAR_STYLE: - style = u'>' - node = ScalarNode(tag, value, start_mark, end_mark, style) - if anchor is not None: - self.anchors[anchor] = node - yaml_event_delete(&self.parsed_event) - return node - - cdef _compose_sequence_node(self, object anchor): - cdef int index - start_mark = Mark(self.stream_name, - self.parsed_event.start_mark.index, - self.parsed_event.start_mark.line, - self.parsed_event.start_mark.column, - None, None) - implicit = False - if self.parsed_event.data.sequence_start.implicit == 1: - implicit = True - if self.parsed_event.data.sequence_start.tag == NULL \ - or (self.parsed_event.data.sequence_start.tag[0] == c'!' - and self.parsed_event.data.sequence_start.tag[1] == c'\0'): - tag = self.resolve(SequenceNode, None, implicit) - else: - tag = PyUnicode_FromString(self.parsed_event.data.sequence_start.tag) - flow_style = None - if self.parsed_event.data.sequence_start.style == YAML_FLOW_SEQUENCE_STYLE: - flow_style = True - elif self.parsed_event.data.sequence_start.style == YAML_BLOCK_SEQUENCE_STYLE: - flow_style = False - value = [] - node = SequenceNode(tag, value, start_mark, None, flow_style) - if anchor is not None: - self.anchors[anchor] = node - yaml_event_delete(&self.parsed_event) - index = 0 - self._parse_next_event() - while self.parsed_event.type != YAML_SEQUENCE_END_EVENT: - value.append(self._compose_node(node, index)) - index = index+1 - self._parse_next_event() - node.end_mark = Mark(self.stream_name, - self.parsed_event.end_mark.index, - self.parsed_event.end_mark.line, - self.parsed_event.end_mark.column, - None, None) - yaml_event_delete(&self.parsed_event) - return node - - cdef _compose_mapping_node(self, object anchor): - start_mark = Mark(self.stream_name, - self.parsed_event.start_mark.index, - self.parsed_event.start_mark.line, - self.parsed_event.start_mark.column, - None, None) - implicit = False - if self.parsed_event.data.mapping_start.implicit == 1: - implicit = True - if self.parsed_event.data.mapping_start.tag == NULL \ - or (self.parsed_event.data.mapping_start.tag[0] == c'!' - and self.parsed_event.data.mapping_start.tag[1] == c'\0'): - tag = self.resolve(MappingNode, None, implicit) - else: - tag = PyUnicode_FromString(self.parsed_event.data.mapping_start.tag) - flow_style = None - if self.parsed_event.data.mapping_start.style == YAML_FLOW_MAPPING_STYLE: - flow_style = True - elif self.parsed_event.data.mapping_start.style == YAML_BLOCK_MAPPING_STYLE: - flow_style = False - value = [] - node = MappingNode(tag, value, start_mark, None, flow_style) - if anchor is not None: - self.anchors[anchor] = node - yaml_event_delete(&self.parsed_event) - self._parse_next_event() - while self.parsed_event.type != YAML_MAPPING_END_EVENT: - item_key = self._compose_node(node, None) - item_value = self._compose_node(node, item_key) - value.append((item_key, item_value)) - self._parse_next_event() - node.end_mark = Mark(self.stream_name, - self.parsed_event.end_mark.index, - self.parsed_event.end_mark.line, - self.parsed_event.end_mark.column, - None, None) - yaml_event_delete(&self.parsed_event) - return node - - cdef int _parse_next_event(self) except 0: - if self.parsed_event.type == YAML_NO_EVENT: - if yaml_parser_parse(&self.parser, &self.parsed_event) == 0: - error = self._parser_error() - raise error - return 1 - -cdef int input_handler(void *data, char *buffer, int size, int *read) except 0: - cdef CParser parser - parser = data - if parser.stream_cache is None: - value = parser.stream.read(size) - if PyUnicode_CheckExact(value) != 0: - value = PyUnicode_AsUTF8String(value) - parser.unicode_source = 1 - if PyString_CheckExact(value) == 0: - if PY_MAJOR_VERSION < 3: - raise TypeError("a string value is expected") - else: - raise TypeError(u"a string value is expected") - parser.stream_cache = value - parser.stream_cache_pos = 0 - parser.stream_cache_len = PyString_GET_SIZE(value) - if (parser.stream_cache_len - parser.stream_cache_pos) < size: - size = parser.stream_cache_len - parser.stream_cache_pos - if size > 0: - memcpy(buffer, PyString_AS_STRING(parser.stream_cache) - + parser.stream_cache_pos, size) - read[0] = size - parser.stream_cache_pos += size - if parser.stream_cache_pos == parser.stream_cache_len: - parser.stream_cache = None - return 1 - -cdef class CEmitter: - - cdef yaml_emitter_t emitter - - cdef object stream - - cdef int document_start_implicit - cdef int document_end_implicit - cdef object use_version - cdef object use_tags - - cdef object serialized_nodes - cdef object anchors - cdef int last_alias_id - cdef int closed - cdef int dump_unicode - cdef object use_encoding - - def __init__(self, stream, canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, encoding=None, - explicit_start=None, explicit_end=None, version=None, tags=None): - if yaml_emitter_initialize(&self.emitter) == 0: - raise MemoryError - self.stream = stream - self.dump_unicode = 0 - if PY_MAJOR_VERSION < 3: - if getattr3(stream, 'encoding', None): - self.dump_unicode = 1 - else: - if hasattr(stream, u'encoding'): - self.dump_unicode = 1 - self.use_encoding = encoding - yaml_emitter_set_output(&self.emitter, output_handler, self) - if canonical: - yaml_emitter_set_canonical(&self.emitter, 1) - if indent is not None: - yaml_emitter_set_indent(&self.emitter, indent) - if width is not None: - yaml_emitter_set_width(&self.emitter, width) - if allow_unicode: - yaml_emitter_set_unicode(&self.emitter, 1) - if line_break is not None: - if line_break == '\r': - yaml_emitter_set_break(&self.emitter, YAML_CR_BREAK) - elif line_break == '\n': - yaml_emitter_set_break(&self.emitter, YAML_LN_BREAK) - elif line_break == '\r\n': - yaml_emitter_set_break(&self.emitter, YAML_CRLN_BREAK) - self.document_start_implicit = 1 - if explicit_start: - self.document_start_implicit = 0 - self.document_end_implicit = 1 - if explicit_end: - self.document_end_implicit = 0 - self.use_version = version - self.use_tags = tags - self.serialized_nodes = {} - self.anchors = {} - self.last_alias_id = 0 - self.closed = -1 - - def __dealloc__(self): - yaml_emitter_delete(&self.emitter) - - def dispose(self): - pass - - cdef object _emitter_error(self): - if self.emitter.error == YAML_MEMORY_ERROR: - return MemoryError - elif self.emitter.error == YAML_EMITTER_ERROR: - if PY_MAJOR_VERSION < 3: - problem = self.emitter.problem - else: - problem = PyUnicode_FromString(self.emitter.problem) - return EmitterError(problem) - if PY_MAJOR_VERSION < 3: - raise ValueError("no emitter error") - else: - raise ValueError(u"no emitter error") - - cdef int _object_to_event(self, object event_object, yaml_event_t *event) except 0: - cdef yaml_encoding_t encoding - cdef yaml_version_directive_t version_directive_value - cdef yaml_version_directive_t *version_directive - cdef yaml_tag_directive_t tag_directives_value[128] - cdef yaml_tag_directive_t *tag_directives_start - cdef yaml_tag_directive_t *tag_directives_end - cdef int implicit - cdef int plain_implicit - cdef int quoted_implicit - cdef char *anchor - cdef char *tag - cdef char *value - cdef int length - cdef yaml_scalar_style_t scalar_style - cdef yaml_sequence_style_t sequence_style - cdef yaml_mapping_style_t mapping_style - event_class = event_object.__class__ - if event_class is StreamStartEvent: - encoding = YAML_UTF8_ENCODING - if event_object.encoding == u'utf-16-le' or event_object.encoding == 'utf-16-le': - encoding = YAML_UTF16LE_ENCODING - elif event_object.encoding == u'utf-16-be' or event_object.encoding == 'utf-16-be': - encoding = YAML_UTF16BE_ENCODING - if event_object.encoding is None: - self.dump_unicode = 1 - if self.dump_unicode == 1: - encoding = YAML_UTF8_ENCODING - yaml_stream_start_event_initialize(event, encoding) - elif event_class is StreamEndEvent: - yaml_stream_end_event_initialize(event) - elif event_class is DocumentStartEvent: - version_directive = NULL - if event_object.version: - version_directive_value.major = event_object.version[0] - version_directive_value.minor = event_object.version[1] - version_directive = &version_directive_value - tag_directives_start = NULL - tag_directives_end = NULL - if event_object.tags: - if len(event_object.tags) > 128: - if PY_MAJOR_VERSION < 3: - raise ValueError("too many tags") - else: - raise ValueError(u"too many tags") - tag_directives_start = tag_directives_value - tag_directives_end = tag_directives_value - cache = [] - for handle in event_object.tags: - prefix = event_object.tags[handle] - if PyUnicode_CheckExact(handle): - handle = PyUnicode_AsUTF8String(handle) - cache.append(handle) - if not PyString_CheckExact(handle): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag handle must be a string") - else: - raise TypeError(u"tag handle must be a string") - tag_directives_end.handle = PyString_AS_STRING(handle) - if PyUnicode_CheckExact(prefix): - prefix = PyUnicode_AsUTF8String(prefix) - cache.append(prefix) - if not PyString_CheckExact(prefix): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag prefix must be a string") - else: - raise TypeError(u"tag prefix must be a string") - tag_directives_end.prefix = PyString_AS_STRING(prefix) - tag_directives_end = tag_directives_end+1 - implicit = 1 - if event_object.explicit: - implicit = 0 - if yaml_document_start_event_initialize(event, version_directive, - tag_directives_start, tag_directives_end, implicit) == 0: - raise MemoryError - elif event_class is DocumentEndEvent: - implicit = 1 - if event_object.explicit: - implicit = 0 - yaml_document_end_event_initialize(event, implicit) - elif event_class is AliasEvent: - anchor = NULL - anchor_object = event_object.anchor - if PyUnicode_CheckExact(anchor_object): - anchor_object = PyUnicode_AsUTF8String(anchor_object) - if not PyString_CheckExact(anchor_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("anchor must be a string") - else: - raise TypeError(u"anchor must be a string") - anchor = PyString_AS_STRING(anchor_object) - if yaml_alias_event_initialize(event, anchor) == 0: - raise MemoryError - elif event_class is ScalarEvent: - anchor = NULL - anchor_object = event_object.anchor - if anchor_object is not None: - if PyUnicode_CheckExact(anchor_object): - anchor_object = PyUnicode_AsUTF8String(anchor_object) - if not PyString_CheckExact(anchor_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("anchor must be a string") - else: - raise TypeError(u"anchor must be a string") - anchor = PyString_AS_STRING(anchor_object) - tag = NULL - tag_object = event_object.tag - if tag_object is not None: - if PyUnicode_CheckExact(tag_object): - tag_object = PyUnicode_AsUTF8String(tag_object) - if not PyString_CheckExact(tag_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag must be a string") - else: - raise TypeError(u"tag must be a string") - tag = PyString_AS_STRING(tag_object) - value_object = event_object.value - if PyUnicode_CheckExact(value_object): - value_object = PyUnicode_AsUTF8String(value_object) - if not PyString_CheckExact(value_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("value must be a string") - else: - raise TypeError(u"value must be a string") - value = PyString_AS_STRING(value_object) - length = PyString_GET_SIZE(value_object) - plain_implicit = 0 - quoted_implicit = 0 - if event_object.implicit is not None: - plain_implicit = event_object.implicit[0] - quoted_implicit = event_object.implicit[1] - style_object = event_object.style - scalar_style = YAML_PLAIN_SCALAR_STYLE - if style_object == "'" or style_object == u"'": - scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - elif style_object == "\"" or style_object == u"\"": - scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - elif style_object == "|" or style_object == u"|": - scalar_style = YAML_LITERAL_SCALAR_STYLE - elif style_object == ">" or style_object == u">": - scalar_style = YAML_FOLDED_SCALAR_STYLE - if yaml_scalar_event_initialize(event, anchor, tag, value, length, - plain_implicit, quoted_implicit, scalar_style) == 0: - raise MemoryError - elif event_class is SequenceStartEvent: - anchor = NULL - anchor_object = event_object.anchor - if anchor_object is not None: - if PyUnicode_CheckExact(anchor_object): - anchor_object = PyUnicode_AsUTF8String(anchor_object) - if not PyString_CheckExact(anchor_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("anchor must be a string") - else: - raise TypeError(u"anchor must be a string") - anchor = PyString_AS_STRING(anchor_object) - tag = NULL - tag_object = event_object.tag - if tag_object is not None: - if PyUnicode_CheckExact(tag_object): - tag_object = PyUnicode_AsUTF8String(tag_object) - if not PyString_CheckExact(tag_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag must be a string") - else: - raise TypeError(u"tag must be a string") - tag = PyString_AS_STRING(tag_object) - implicit = 0 - if event_object.implicit: - implicit = 1 - sequence_style = YAML_BLOCK_SEQUENCE_STYLE - if event_object.flow_style: - sequence_style = YAML_FLOW_SEQUENCE_STYLE - if yaml_sequence_start_event_initialize(event, anchor, tag, - implicit, sequence_style) == 0: - raise MemoryError - elif event_class is MappingStartEvent: - anchor = NULL - anchor_object = event_object.anchor - if anchor_object is not None: - if PyUnicode_CheckExact(anchor_object): - anchor_object = PyUnicode_AsUTF8String(anchor_object) - if not PyString_CheckExact(anchor_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("anchor must be a string") - else: - raise TypeError(u"anchor must be a string") - anchor = PyString_AS_STRING(anchor_object) - tag = NULL - tag_object = event_object.tag - if tag_object is not None: - if PyUnicode_CheckExact(tag_object): - tag_object = PyUnicode_AsUTF8String(tag_object) - if not PyString_CheckExact(tag_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag must be a string") - else: - raise TypeError(u"tag must be a string") - tag = PyString_AS_STRING(tag_object) - implicit = 0 - if event_object.implicit: - implicit = 1 - mapping_style = YAML_BLOCK_MAPPING_STYLE - if event_object.flow_style: - mapping_style = YAML_FLOW_MAPPING_STYLE - if yaml_mapping_start_event_initialize(event, anchor, tag, - implicit, mapping_style) == 0: - raise MemoryError - elif event_class is SequenceEndEvent: - yaml_sequence_end_event_initialize(event) - elif event_class is MappingEndEvent: - yaml_mapping_end_event_initialize(event) - else: - if PY_MAJOR_VERSION < 3: - raise TypeError("invalid event %s" % event_object) - else: - raise TypeError(u"invalid event %s" % event_object) - return 1 - - def emit(self, event_object): - cdef yaml_event_t event - self._object_to_event(event_object, &event) - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - - def open(self): - cdef yaml_event_t event - cdef yaml_encoding_t encoding - if self.closed == -1: - if self.use_encoding == u'utf-16-le' or self.use_encoding == 'utf-16-le': - encoding = YAML_UTF16LE_ENCODING - elif self.use_encoding == u'utf-16-be' or self.use_encoding == 'utf-16-be': - encoding = YAML_UTF16BE_ENCODING - else: - encoding = YAML_UTF8_ENCODING - if self.use_encoding is None: - self.dump_unicode = 1 - if self.dump_unicode == 1: - encoding = YAML_UTF8_ENCODING - yaml_stream_start_event_initialize(&event, encoding) - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - self.closed = 0 - elif self.closed == 1: - if PY_MAJOR_VERSION < 3: - raise SerializerError("serializer is closed") - else: - raise SerializerError(u"serializer is closed") - else: - if PY_MAJOR_VERSION < 3: - raise SerializerError("serializer is already opened") - else: - raise SerializerError(u"serializer is already opened") - - def close(self): - cdef yaml_event_t event - if self.closed == -1: - if PY_MAJOR_VERSION < 3: - raise SerializerError("serializer is not opened") - else: - raise SerializerError(u"serializer is not opened") - elif self.closed == 0: - yaml_stream_end_event_initialize(&event) - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - self.closed = 1 - - def serialize(self, node): - cdef yaml_event_t event - cdef yaml_version_directive_t version_directive_value - cdef yaml_version_directive_t *version_directive - cdef yaml_tag_directive_t tag_directives_value[128] - cdef yaml_tag_directive_t *tag_directives_start - cdef yaml_tag_directive_t *tag_directives_end - if self.closed == -1: - if PY_MAJOR_VERSION < 3: - raise SerializerError("serializer is not opened") - else: - raise SerializerError(u"serializer is not opened") - elif self.closed == 1: - if PY_MAJOR_VERSION < 3: - raise SerializerError("serializer is closed") - else: - raise SerializerError(u"serializer is closed") - cache = [] - version_directive = NULL - if self.use_version: - version_directive_value.major = self.use_version[0] - version_directive_value.minor = self.use_version[1] - version_directive = &version_directive_value - tag_directives_start = NULL - tag_directives_end = NULL - if self.use_tags: - if len(self.use_tags) > 128: - if PY_MAJOR_VERSION < 3: - raise ValueError("too many tags") - else: - raise ValueError(u"too many tags") - tag_directives_start = tag_directives_value - tag_directives_end = tag_directives_value - for handle in self.use_tags: - prefix = self.use_tags[handle] - if PyUnicode_CheckExact(handle): - handle = PyUnicode_AsUTF8String(handle) - cache.append(handle) - if not PyString_CheckExact(handle): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag handle must be a string") - else: - raise TypeError(u"tag handle must be a string") - tag_directives_end.handle = PyString_AS_STRING(handle) - if PyUnicode_CheckExact(prefix): - prefix = PyUnicode_AsUTF8String(prefix) - cache.append(prefix) - if not PyString_CheckExact(prefix): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag prefix must be a string") - else: - raise TypeError(u"tag prefix must be a string") - tag_directives_end.prefix = PyString_AS_STRING(prefix) - tag_directives_end = tag_directives_end+1 - if yaml_document_start_event_initialize(&event, version_directive, - tag_directives_start, tag_directives_end, - self.document_start_implicit) == 0: - raise MemoryError - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - self._anchor_node(node) - self._serialize_node(node, None, None) - yaml_document_end_event_initialize(&event, self.document_end_implicit) - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - self.serialized_nodes = {} - self.anchors = {} - self.last_alias_id = 0 - - cdef int _anchor_node(self, object node) except 0: - if node in self.anchors: - if self.anchors[node] is None: - self.last_alias_id = self.last_alias_id+1 - self.anchors[node] = u"id%03d" % self.last_alias_id - else: - self.anchors[node] = None - node_class = node.__class__ - if node_class is SequenceNode: - for item in node.value: - self._anchor_node(item) - elif node_class is MappingNode: - for key, value in node.value: - self._anchor_node(key) - self._anchor_node(value) - return 1 - - cdef int _serialize_node(self, object node, object parent, object index) except 0: - cdef yaml_event_t event - cdef int implicit - cdef int plain_implicit - cdef int quoted_implicit - cdef char *anchor - cdef char *tag - cdef char *value - cdef int length - cdef int item_index - cdef yaml_scalar_style_t scalar_style - cdef yaml_sequence_style_t sequence_style - cdef yaml_mapping_style_t mapping_style - anchor_object = self.anchors[node] - anchor = NULL - if anchor_object is not None: - if PyUnicode_CheckExact(anchor_object): - anchor_object = PyUnicode_AsUTF8String(anchor_object) - if not PyString_CheckExact(anchor_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("anchor must be a string") - else: - raise TypeError(u"anchor must be a string") - anchor = PyString_AS_STRING(anchor_object) - if node in self.serialized_nodes: - if yaml_alias_event_initialize(&event, anchor) == 0: - raise MemoryError - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - else: - node_class = node.__class__ - self.serialized_nodes[node] = True - self.descend_resolver(parent, index) - if node_class is ScalarNode: - plain_implicit = 0 - quoted_implicit = 0 - tag_object = node.tag - if self.resolve(ScalarNode, node.value, (True, False)) == tag_object: - plain_implicit = 1 - if self.resolve(ScalarNode, node.value, (False, True)) == tag_object: - quoted_implicit = 1 - tag = NULL - if tag_object is not None: - if PyUnicode_CheckExact(tag_object): - tag_object = PyUnicode_AsUTF8String(tag_object) - if not PyString_CheckExact(tag_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag must be a string") - else: - raise TypeError(u"tag must be a string") - tag = PyString_AS_STRING(tag_object) - value_object = node.value - if PyUnicode_CheckExact(value_object): - value_object = PyUnicode_AsUTF8String(value_object) - if not PyString_CheckExact(value_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("value must be a string") - else: - raise TypeError(u"value must be a string") - value = PyString_AS_STRING(value_object) - length = PyString_GET_SIZE(value_object) - style_object = node.style - scalar_style = YAML_PLAIN_SCALAR_STYLE - if style_object == "'" or style_object == u"'": - scalar_style = YAML_SINGLE_QUOTED_SCALAR_STYLE - elif style_object == "\"" or style_object == u"\"": - scalar_style = YAML_DOUBLE_QUOTED_SCALAR_STYLE - elif style_object == "|" or style_object == u"|": - scalar_style = YAML_LITERAL_SCALAR_STYLE - elif style_object == ">" or style_object == u">": - scalar_style = YAML_FOLDED_SCALAR_STYLE - if yaml_scalar_event_initialize(&event, anchor, tag, value, length, - plain_implicit, quoted_implicit, scalar_style) == 0: - raise MemoryError - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - elif node_class is SequenceNode: - implicit = 0 - tag_object = node.tag - if self.resolve(SequenceNode, node.value, True) == tag_object: - implicit = 1 - tag = NULL - if tag_object is not None: - if PyUnicode_CheckExact(tag_object): - tag_object = PyUnicode_AsUTF8String(tag_object) - if not PyString_CheckExact(tag_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag must be a string") - else: - raise TypeError(u"tag must be a string") - tag = PyString_AS_STRING(tag_object) - sequence_style = YAML_BLOCK_SEQUENCE_STYLE - if node.flow_style: - sequence_style = YAML_FLOW_SEQUENCE_STYLE - if yaml_sequence_start_event_initialize(&event, anchor, tag, - implicit, sequence_style) == 0: - raise MemoryError - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - item_index = 0 - for item in node.value: - self._serialize_node(item, node, item_index) - item_index = item_index+1 - yaml_sequence_end_event_initialize(&event) - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - elif node_class is MappingNode: - implicit = 0 - tag_object = node.tag - if self.resolve(MappingNode, node.value, True) == tag_object: - implicit = 1 - tag = NULL - if tag_object is not None: - if PyUnicode_CheckExact(tag_object): - tag_object = PyUnicode_AsUTF8String(tag_object) - if not PyString_CheckExact(tag_object): - if PY_MAJOR_VERSION < 3: - raise TypeError("tag must be a string") - else: - raise TypeError(u"tag must be a string") - tag = PyString_AS_STRING(tag_object) - mapping_style = YAML_BLOCK_MAPPING_STYLE - if node.flow_style: - mapping_style = YAML_FLOW_MAPPING_STYLE - if yaml_mapping_start_event_initialize(&event, anchor, tag, - implicit, mapping_style) == 0: - raise MemoryError - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - for item_key, item_value in node.value: - self._serialize_node(item_key, node, None) - self._serialize_node(item_value, node, item_key) - yaml_mapping_end_event_initialize(&event) - if yaml_emitter_emit(&self.emitter, &event) == 0: - error = self._emitter_error() - raise error - self.ascend_resolver() - return 1 - -cdef int output_handler(void *data, char *buffer, int size) except 0: - cdef CEmitter emitter - emitter = data - if emitter.dump_unicode == 0: - value = PyString_FromStringAndSize(buffer, size) - else: - value = PyUnicode_DecodeUTF8(buffer, size, 'strict') - emitter.stream.write(value) - return 1 - diff --git a/libs/PyYAML-3.10/lib/yaml/__init__.py b/libs/PyYAML-3.10/lib/yaml/__init__.py deleted file mode 100644 index f977f46..0000000 --- a/libs/PyYAML-3.10/lib/yaml/__init__.py +++ /dev/null @@ -1,315 +0,0 @@ - -from error import * - -from tokens import * -from events import * -from nodes import * - -from loader import * -from dumper import * - -__version__ = '3.10' - -try: - from cyaml import * - __with_libyaml__ = True -except ImportError: - __with_libyaml__ = False - -def scan(stream, Loader=Loader): - """ - Scan a YAML stream and produce scanning tokens. - """ - loader = Loader(stream) - try: - while loader.check_token(): - yield loader.get_token() - finally: - loader.dispose() - -def parse(stream, Loader=Loader): - """ - Parse a YAML stream and produce parsing events. - """ - loader = Loader(stream) - try: - while loader.check_event(): - yield loader.get_event() - finally: - loader.dispose() - -def compose(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding representation tree. - """ - loader = Loader(stream) - try: - return loader.get_single_node() - finally: - loader.dispose() - -def compose_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding representation trees. - """ - loader = Loader(stream) - try: - while loader.check_node(): - yield loader.get_node() - finally: - loader.dispose() - -def load(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - """ - loader = Loader(stream) - try: - return loader.get_single_data() - finally: - loader.dispose() - -def load_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - """ - loader = Loader(stream) - try: - while loader.check_data(): - yield loader.get_data() - finally: - loader.dispose() - -def safe_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - Resolve only basic YAML tags. - """ - return load(stream, SafeLoader) - -def safe_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - Resolve only basic YAML tags. - """ - return load_all(stream, SafeLoader) - -def emit(events, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - """ - Emit YAML parsing events into a stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - from StringIO import StringIO - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - try: - for event in events: - dumper.emit(event) - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize_all(nodes, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding='utf-8', explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of representation trees into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - from StringIO import StringIO - else: - from cStringIO import StringIO - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for node in nodes: - dumper.serialize(node) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize(node, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a representation tree into a YAML stream. - If stream is None, return the produced string instead. - """ - return serialize_all([node], stream, Dumper=Dumper, **kwds) - -def dump_all(documents, stream=None, Dumper=Dumper, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding='utf-8', explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of Python objects into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - from StringIO import StringIO - else: - from cStringIO import StringIO - stream = StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for data in documents: - dumper.represent(data) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def dump(data, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a Python object into a YAML stream. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=Dumper, **kwds) - -def safe_dump_all(documents, stream=None, **kwds): - """ - Serialize a sequence of Python objects into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all(documents, stream, Dumper=SafeDumper, **kwds) - -def safe_dump(data, stream=None, **kwds): - """ - Serialize a Python object into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=SafeDumper, **kwds) - -def add_implicit_resolver(tag, regexp, first=None, - Loader=Loader, Dumper=Dumper): - """ - Add an implicit scalar detector. - If an implicit scalar value matches the given regexp, - the corresponding tag is assigned to the scalar. - first is a sequence of possible initial characters or None. - """ - Loader.add_implicit_resolver(tag, regexp, first) - Dumper.add_implicit_resolver(tag, regexp, first) - -def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): - """ - Add a path based resolver for the given tag. - A path is a list of keys that forms a path - to a node in the representation tree. - Keys can be string values, integers, or None. - """ - Loader.add_path_resolver(tag, path, kind) - Dumper.add_path_resolver(tag, path, kind) - -def add_constructor(tag, constructor, Loader=Loader): - """ - Add a constructor for the given tag. - Constructor is a function that accepts a Loader instance - and a node object and produces the corresponding Python object. - """ - Loader.add_constructor(tag, constructor) - -def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): - """ - Add a multi-constructor for the given tag prefix. - Multi-constructor is called for a node if its tag starts with tag_prefix. - Multi-constructor accepts a Loader instance, a tag suffix, - and a node object and produces the corresponding Python object. - """ - Loader.add_multi_constructor(tag_prefix, multi_constructor) - -def add_representer(data_type, representer, Dumper=Dumper): - """ - Add a representer for the given type. - Representer is a function accepting a Dumper instance - and an instance of the given data type - and producing the corresponding representation node. - """ - Dumper.add_representer(data_type, representer) - -def add_multi_representer(data_type, multi_representer, Dumper=Dumper): - """ - Add a representer for the given type. - Multi-representer is a function accepting a Dumper instance - and an instance of the given data type or subtype - and producing the corresponding representation node. - """ - Dumper.add_multi_representer(data_type, multi_representer) - -class YAMLObjectMetaclass(type): - """ - The metaclass for YAMLObject. - """ - def __init__(cls, name, bases, kwds): - super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) - if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: - cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) - cls.yaml_dumper.add_representer(cls, cls.to_yaml) - -class YAMLObject(object): - """ - An object that can dump itself to a YAML stream - and load itself from a YAML stream. - """ - - __metaclass__ = YAMLObjectMetaclass - __slots__ = () # no direct instantiation, so allow immutable subclasses - - yaml_loader = Loader - yaml_dumper = Dumper - - yaml_tag = None - yaml_flow_style = None - - def from_yaml(cls, loader, node): - """ - Convert a representation node to a Python object. - """ - return loader.construct_yaml_object(node, cls) - from_yaml = classmethod(from_yaml) - - def to_yaml(cls, dumper, data): - """ - Convert a Python object to a representation node. - """ - return dumper.represent_yaml_object(cls.yaml_tag, data, cls, - flow_style=cls.yaml_flow_style) - to_yaml = classmethod(to_yaml) - diff --git a/libs/PyYAML-3.10/lib/yaml/composer.py b/libs/PyYAML-3.10/lib/yaml/composer.py deleted file mode 100644 index 06e5ac7..0000000 --- a/libs/PyYAML-3.10/lib/yaml/composer.py +++ /dev/null @@ -1,139 +0,0 @@ - -__all__ = ['Composer', 'ComposerError'] - -from error import MarkedYAMLError -from events import * -from nodes import * - -class ComposerError(MarkedYAMLError): - pass - -class Composer(object): - - def __init__(self): - self.anchors = {} - - def check_node(self): - # Drop the STREAM-START event. - if self.check_event(StreamStartEvent): - self.get_event() - - # If there are more documents available? - return not self.check_event(StreamEndEvent) - - def get_node(self): - # Get the root node of the next document. - if not self.check_event(StreamEndEvent): - return self.compose_document() - - def get_single_node(self): - # Drop the STREAM-START event. - self.get_event() - - # Compose a document if the stream is not empty. - document = None - if not self.check_event(StreamEndEvent): - document = self.compose_document() - - # Ensure that the stream contains no more documents. - if not self.check_event(StreamEndEvent): - event = self.get_event() - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) - - # Drop the STREAM-END event. - self.get_event() - - return document - - def compose_document(self): - # Drop the DOCUMENT-START event. - self.get_event() - - # Compose the root node. - node = self.compose_node(None, None) - - # Drop the DOCUMENT-END event. - self.get_event() - - self.anchors = {} - return node - - def compose_node(self, parent, index): - if self.check_event(AliasEvent): - event = self.get_event() - anchor = event.anchor - if anchor not in self.anchors: - raise ComposerError(None, None, "found undefined alias %r" - % anchor.encode('utf-8'), event.start_mark) - return self.anchors[anchor] - event = self.peek_event() - anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: - raise ComposerError("found duplicate anchor %r; first occurence" - % anchor.encode('utf-8'), self.anchors[anchor].start_mark, - "second occurence", event.start_mark) - self.descend_resolver(parent, index) - if self.check_event(ScalarEvent): - node = self.compose_scalar_node(anchor) - elif self.check_event(SequenceStartEvent): - node = self.compose_sequence_node(anchor) - elif self.check_event(MappingStartEvent): - node = self.compose_mapping_node(anchor) - self.ascend_resolver() - return node - - def compose_scalar_node(self, anchor): - event = self.get_event() - tag = event.tag - if tag is None or tag == u'!': - tag = self.resolve(ScalarNode, event.value, event.implicit) - node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) - if anchor is not None: - self.anchors[anchor] = node - return node - - def compose_sequence_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == u'!': - tag = self.resolve(SequenceNode, None, start_event.implicit) - node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - index = 0 - while not self.check_event(SequenceEndEvent): - node.value.append(self.compose_node(node, index)) - index += 1 - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - - def compose_mapping_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == u'!': - tag = self.resolve(MappingNode, None, start_event.implicit) - node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() - item_key = self.compose_node(node, None) - #if item_key in node.value: - # raise ComposerError("while composing a mapping", start_event.start_mark, - # "found duplicate key", key_event.start_mark) - item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value - node.value.append((item_key, item_value)) - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - diff --git a/libs/PyYAML-3.10/lib/yaml/constructor.py b/libs/PyYAML-3.10/lib/yaml/constructor.py deleted file mode 100644 index 635faac..0000000 --- a/libs/PyYAML-3.10/lib/yaml/constructor.py +++ /dev/null @@ -1,675 +0,0 @@ - -__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', - 'ConstructorError'] - -from error import * -from nodes import * - -import datetime - -import binascii, re, sys, types - -class ConstructorError(MarkedYAMLError): - pass - -class BaseConstructor(object): - - yaml_constructors = {} - yaml_multi_constructors = {} - - def __init__(self): - self.constructed_objects = {} - self.recursive_objects = {} - self.state_generators = [] - self.deep_construct = False - - def check_data(self): - # If there are more documents available? - return self.check_node() - - def get_data(self): - # Construct and return the next document. - if self.check_node(): - return self.construct_document(self.get_node()) - - def get_single_data(self): - # Ensure that the stream contains a single document and construct it. - node = self.get_single_node() - if node is not None: - return self.construct_document(node) - return None - - def construct_document(self, node): - data = self.construct_object(node) - while self.state_generators: - state_generators = self.state_generators - self.state_generators = [] - for generator in state_generators: - for dummy in generator: - pass - self.constructed_objects = {} - self.recursive_objects = {} - self.deep_construct = False - return data - - def construct_object(self, node, deep=False): - if node in self.constructed_objects: - return self.constructed_objects[node] - if deep: - old_deep = self.deep_construct - self.deep_construct = True - if node in self.recursive_objects: - raise ConstructorError(None, None, - "found unconstructable recursive node", node.start_mark) - self.recursive_objects[node] = None - constructor = None - tag_suffix = None - if node.tag in self.yaml_constructors: - constructor = self.yaml_constructors[node.tag] - else: - for tag_prefix in self.yaml_multi_constructors: - if node.tag.startswith(tag_prefix): - tag_suffix = node.tag[len(tag_prefix):] - constructor = self.yaml_multi_constructors[tag_prefix] - break - else: - if None in self.yaml_multi_constructors: - tag_suffix = node.tag - constructor = self.yaml_multi_constructors[None] - elif None in self.yaml_constructors: - constructor = self.yaml_constructors[None] - elif isinstance(node, ScalarNode): - constructor = self.__class__.construct_scalar - elif isinstance(node, SequenceNode): - constructor = self.__class__.construct_sequence - elif isinstance(node, MappingNode): - constructor = self.__class__.construct_mapping - if tag_suffix is None: - data = constructor(self, node) - else: - data = constructor(self, tag_suffix, node) - if isinstance(data, types.GeneratorType): - generator = data - data = generator.next() - if self.deep_construct: - for dummy in generator: - pass - else: - self.state_generators.append(generator) - self.constructed_objects[node] = data - del self.recursive_objects[node] - if deep: - self.deep_construct = old_deep - return data - - def construct_scalar(self, node): - if not isinstance(node, ScalarNode): - raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) - return node.value - - def construct_sequence(self, node, deep=False): - if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - return [self.construct_object(child, deep=deep) - for child in node.value] - - def construct_mapping(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - mapping = {} - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - try: - hash(key) - except TypeError, exc: - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unacceptable key (%s)" % exc, key_node.start_mark) - value = self.construct_object(value_node, deep=deep) - mapping[key] = value - return mapping - - def construct_pairs(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - pairs = [] - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - value = self.construct_object(value_node, deep=deep) - pairs.append((key, value)) - return pairs - - def add_constructor(cls, tag, constructor): - if not 'yaml_constructors' in cls.__dict__: - cls.yaml_constructors = cls.yaml_constructors.copy() - cls.yaml_constructors[tag] = constructor - add_constructor = classmethod(add_constructor) - - def add_multi_constructor(cls, tag_prefix, multi_constructor): - if not 'yaml_multi_constructors' in cls.__dict__: - cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() - cls.yaml_multi_constructors[tag_prefix] = multi_constructor - add_multi_constructor = classmethod(add_multi_constructor) - -class SafeConstructor(BaseConstructor): - - def construct_scalar(self, node): - if isinstance(node, MappingNode): - for key_node, value_node in node.value: - if key_node.tag == u'tag:yaml.org,2002:value': - return self.construct_scalar(value_node) - return BaseConstructor.construct_scalar(self, node) - - def flatten_mapping(self, node): - merge = [] - index = 0 - while index < len(node.value): - key_node, value_node = node.value[index] - if key_node.tag == u'tag:yaml.org,2002:merge': - del node.value[index] - if isinstance(value_node, MappingNode): - self.flatten_mapping(value_node) - merge.extend(value_node.value) - elif isinstance(value_node, SequenceNode): - submerge = [] - for subnode in value_node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) - self.flatten_mapping(subnode) - submerge.append(subnode.value) - submerge.reverse() - for value in submerge: - merge.extend(value) - else: - raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) - elif key_node.tag == u'tag:yaml.org,2002:value': - key_node.tag = u'tag:yaml.org,2002:str' - index += 1 - else: - index += 1 - if merge: - node.value = merge + node.value - - def construct_mapping(self, node, deep=False): - if isinstance(node, MappingNode): - self.flatten_mapping(node) - return BaseConstructor.construct_mapping(self, node, deep=deep) - - def construct_yaml_null(self, node): - self.construct_scalar(node) - return None - - bool_values = { - u'yes': True, - u'no': False, - u'true': True, - u'false': False, - u'on': True, - u'off': False, - } - - def construct_yaml_bool(self, node): - value = self.construct_scalar(node) - return self.bool_values[value.lower()] - - def construct_yaml_int(self, node): - value = str(self.construct_scalar(node)) - value = value.replace('_', '') - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '0': - return 0 - elif value.startswith('0b'): - return sign*int(value[2:], 2) - elif value.startswith('0x'): - return sign*int(value[2:], 16) - elif value[0] == '0': - return sign*int(value, 8) - elif ':' in value: - digits = [int(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*int(value) - - inf_value = 1e300 - while inf_value != inf_value*inf_value: - inf_value *= inf_value - nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). - - def construct_yaml_float(self, node): - value = str(self.construct_scalar(node)) - value = value.replace('_', '').lower() - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '.inf': - return sign*self.inf_value - elif value == '.nan': - return self.nan_value - elif ':' in value: - digits = [float(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0.0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*float(value) - - def construct_yaml_binary(self, node): - value = self.construct_scalar(node) - try: - return str(value).decode('base64') - except (binascii.Error, UnicodeEncodeError), exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - timestamp_regexp = re.compile( - ur'''^(?P[0-9][0-9][0-9][0-9]) - -(?P[0-9][0-9]?) - -(?P[0-9][0-9]?) - (?:(?:[Tt]|[ \t]+) - (?P[0-9][0-9]?) - :(?P[0-9][0-9]) - :(?P[0-9][0-9]) - (?:\.(?P[0-9]*))? - (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) - (?::(?P[0-9][0-9]))?))?)?$''', re.X) - - def construct_yaml_timestamp(self, node): - value = self.construct_scalar(node) - match = self.timestamp_regexp.match(node.value) - values = match.groupdict() - year = int(values['year']) - month = int(values['month']) - day = int(values['day']) - if not values['hour']: - return datetime.date(year, month, day) - hour = int(values['hour']) - minute = int(values['minute']) - second = int(values['second']) - fraction = 0 - if values['fraction']: - fraction = values['fraction'][:6] - while len(fraction) < 6: - fraction += '0' - fraction = int(fraction) - delta = None - if values['tz_sign']: - tz_hour = int(values['tz_hour']) - tz_minute = int(values['tz_minute'] or 0) - delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) - if values['tz_sign'] == '-': - delta = -delta - data = datetime.datetime(year, month, day, hour, minute, second, fraction) - if delta: - data -= delta - return data - - def construct_yaml_omap(self, node): - # Note: we do not check for duplicate keys, because it's too - # CPU-expensive. - omap = [] - yield omap - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - omap.append((key, value)) - - def construct_yaml_pairs(self, node): - # Note: the same code as `construct_yaml_omap`. - pairs = [] - yield pairs - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - pairs.append((key, value)) - - def construct_yaml_set(self, node): - data = set() - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_str(self, node): - value = self.construct_scalar(node) - try: - return value.encode('ascii') - except UnicodeEncodeError: - return value - - def construct_yaml_seq(self, node): - data = [] - yield data - data.extend(self.construct_sequence(node)) - - def construct_yaml_map(self, node): - data = {} - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_object(self, node, cls): - data = cls.__new__(cls) - yield data - if hasattr(data, '__setstate__'): - state = self.construct_mapping(node, deep=True) - data.__setstate__(state) - else: - state = self.construct_mapping(node) - data.__dict__.update(state) - - def construct_undefined(self, node): - raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag.encode('utf-8'), - node.start_mark) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:null', - SafeConstructor.construct_yaml_null) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:bool', - SafeConstructor.construct_yaml_bool) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:int', - SafeConstructor.construct_yaml_int) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:float', - SafeConstructor.construct_yaml_float) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:binary', - SafeConstructor.construct_yaml_binary) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:timestamp', - SafeConstructor.construct_yaml_timestamp) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:omap', - SafeConstructor.construct_yaml_omap) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:pairs', - SafeConstructor.construct_yaml_pairs) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:set', - SafeConstructor.construct_yaml_set) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:str', - SafeConstructor.construct_yaml_str) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:seq', - SafeConstructor.construct_yaml_seq) - -SafeConstructor.add_constructor( - u'tag:yaml.org,2002:map', - SafeConstructor.construct_yaml_map) - -SafeConstructor.add_constructor(None, - SafeConstructor.construct_undefined) - -class Constructor(SafeConstructor): - - def construct_python_str(self, node): - return self.construct_scalar(node).encode('utf-8') - - def construct_python_unicode(self, node): - return self.construct_scalar(node) - - def construct_python_long(self, node): - return long(self.construct_yaml_int(node)) - - def construct_python_complex(self, node): - return complex(self.construct_scalar(node)) - - def construct_python_tuple(self, node): - return tuple(self.construct_sequence(node)) - - def find_python_module(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python module", mark, - "expected non-empty name appended to the tag", mark) - try: - __import__(name) - except ImportError, exc: - raise ConstructorError("while constructing a Python module", mark, - "cannot find module %r (%s)" % (name.encode('utf-8'), exc), mark) - return sys.modules[name] - - def find_python_name(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python object", mark, - "expected non-empty name appended to the tag", mark) - if u'.' in name: - module_name, object_name = name.rsplit('.', 1) - else: - module_name = '__builtin__' - object_name = name - try: - __import__(module_name) - except ImportError, exc: - raise ConstructorError("while constructing a Python object", mark, - "cannot find module %r (%s)" % (module_name.encode('utf-8'), exc), mark) - module = sys.modules[module_name] - if not hasattr(module, object_name): - raise ConstructorError("while constructing a Python object", mark, - "cannot find %r in the module %r" % (object_name.encode('utf-8'), - module.__name__), mark) - return getattr(module, object_name) - - def construct_python_name(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python name", node.start_mark, - "expected the empty value, but found %r" % value.encode('utf-8'), - node.start_mark) - return self.find_python_name(suffix, node.start_mark) - - def construct_python_module(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python module", node.start_mark, - "expected the empty value, but found %r" % value.encode('utf-8'), - node.start_mark) - return self.find_python_module(suffix, node.start_mark) - - class classobj: pass - - def make_python_instance(self, suffix, node, - args=None, kwds=None, newobj=False): - if not args: - args = [] - if not kwds: - kwds = {} - cls = self.find_python_name(suffix, node.start_mark) - if newobj and isinstance(cls, type(self.classobj)) \ - and not args and not kwds: - instance = self.classobj() - instance.__class__ = cls - return instance - elif newobj and isinstance(cls, type): - return cls.__new__(cls, *args, **kwds) - else: - return cls(*args, **kwds) - - def set_python_instance_state(self, instance, state): - if hasattr(instance, '__setstate__'): - instance.__setstate__(state) - else: - slotstate = {} - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - if hasattr(instance, '__dict__'): - instance.__dict__.update(state) - elif state: - slotstate.update(state) - for key, value in slotstate.items(): - setattr(object, key, value) - - def construct_python_object(self, suffix, node): - # Format: - # !!python/object:module.name { ... state ... } - instance = self.make_python_instance(suffix, node, newobj=True) - yield instance - deep = hasattr(instance, '__setstate__') - state = self.construct_mapping(node, deep=deep) - self.set_python_instance_state(instance, state) - - def construct_python_object_apply(self, suffix, node, newobj=False): - # Format: - # !!python/object/apply # (or !!python/object/new) - # args: [ ... arguments ... ] - # kwds: { ... keywords ... } - # state: ... state ... - # listitems: [ ... listitems ... ] - # dictitems: { ... dictitems ... } - # or short format: - # !!python/object/apply [ ... arguments ... ] - # The difference between !!python/object/apply and !!python/object/new - # is how an object is created, check make_python_instance for details. - if isinstance(node, SequenceNode): - args = self.construct_sequence(node, deep=True) - kwds = {} - state = {} - listitems = [] - dictitems = {} - else: - value = self.construct_mapping(node, deep=True) - args = value.get('args', []) - kwds = value.get('kwds', {}) - state = value.get('state', {}) - listitems = value.get('listitems', []) - dictitems = value.get('dictitems', {}) - instance = self.make_python_instance(suffix, node, args, kwds, newobj) - if state: - self.set_python_instance_state(instance, state) - if listitems: - instance.extend(listitems) - if dictitems: - for key in dictitems: - instance[key] = dictitems[key] - return instance - - def construct_python_object_new(self, suffix, node): - return self.construct_python_object_apply(suffix, node, newobj=True) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/none', - Constructor.construct_yaml_null) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/bool', - Constructor.construct_yaml_bool) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/str', - Constructor.construct_python_str) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/unicode', - Constructor.construct_python_unicode) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/int', - Constructor.construct_yaml_int) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/long', - Constructor.construct_python_long) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/float', - Constructor.construct_yaml_float) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/complex', - Constructor.construct_python_complex) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/list', - Constructor.construct_yaml_seq) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/tuple', - Constructor.construct_python_tuple) - -Constructor.add_constructor( - u'tag:yaml.org,2002:python/dict', - Constructor.construct_yaml_map) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/name:', - Constructor.construct_python_name) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/module:', - Constructor.construct_python_module) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/object:', - Constructor.construct_python_object) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/object/apply:', - Constructor.construct_python_object_apply) - -Constructor.add_multi_constructor( - u'tag:yaml.org,2002:python/object/new:', - Constructor.construct_python_object_new) - diff --git a/libs/PyYAML-3.10/lib/yaml/cyaml.py b/libs/PyYAML-3.10/lib/yaml/cyaml.py deleted file mode 100644 index 68dcd75..0000000 --- a/libs/PyYAML-3.10/lib/yaml/cyaml.py +++ /dev/null @@ -1,85 +0,0 @@ - -__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', - 'CBaseDumper', 'CSafeDumper', 'CDumper'] - -from _yaml import CParser, CEmitter - -from constructor import * - -from serializer import * -from representer import * - -from resolver import * - -class CBaseLoader(CParser, BaseConstructor, BaseResolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class CSafeLoader(CParser, SafeConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class CLoader(CParser, Constructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - Constructor.__init__(self) - Resolver.__init__(self) - -class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CSafeDumper(CEmitter, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CDumper(CEmitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/lib/yaml/dumper.py b/libs/PyYAML-3.10/lib/yaml/dumper.py deleted file mode 100644 index f811d2c..0000000 --- a/libs/PyYAML-3.10/lib/yaml/dumper.py +++ /dev/null @@ -1,62 +0,0 @@ - -__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] - -from emitter import * -from serializer import * -from representer import * -from resolver import * - -class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class Dumper(Emitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/lib/yaml/emitter.py b/libs/PyYAML-3.10/lib/yaml/emitter.py deleted file mode 100644 index e5bcdcc..0000000 --- a/libs/PyYAML-3.10/lib/yaml/emitter.py +++ /dev/null @@ -1,1140 +0,0 @@ - -# Emitter expects events obeying the following grammar: -# stream ::= STREAM-START document* STREAM-END -# document ::= DOCUMENT-START node DOCUMENT-END -# node ::= SCALAR | sequence | mapping -# sequence ::= SEQUENCE-START node* SEQUENCE-END -# mapping ::= MAPPING-START (node node)* MAPPING-END - -__all__ = ['Emitter', 'EmitterError'] - -from error import YAMLError -from events import * - -class EmitterError(YAMLError): - pass - -class ScalarAnalysis(object): - def __init__(self, scalar, empty, multiline, - allow_flow_plain, allow_block_plain, - allow_single_quoted, allow_double_quoted, - allow_block): - self.scalar = scalar - self.empty = empty - self.multiline = multiline - self.allow_flow_plain = allow_flow_plain - self.allow_block_plain = allow_block_plain - self.allow_single_quoted = allow_single_quoted - self.allow_double_quoted = allow_double_quoted - self.allow_block = allow_block - -class Emitter(object): - - DEFAULT_TAG_PREFIXES = { - u'!' : u'!', - u'tag:yaml.org,2002:' : u'!!', - } - - def __init__(self, stream, canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - - # The stream should have the methods `write` and possibly `flush`. - self.stream = stream - - # Encoding can be overriden by STREAM-START. - self.encoding = None - - # Emitter is a state machine with a stack of states to handle nested - # structures. - self.states = [] - self.state = self.expect_stream_start - - # Current event and the event queue. - self.events = [] - self.event = None - - # The current indentation level and the stack of previous indents. - self.indents = [] - self.indent = None - - # Flow level. - self.flow_level = 0 - - # Contexts. - self.root_context = False - self.sequence_context = False - self.mapping_context = False - self.simple_key_context = False - - # Characteristics of the last emitted character: - # - current position. - # - is it a whitespace? - # - is it an indention character - # (indentation space, '-', '?', or ':')? - self.line = 0 - self.column = 0 - self.whitespace = True - self.indention = True - - # Whether the document requires an explicit document indicator - self.open_ended = False - - # Formatting details. - self.canonical = canonical - self.allow_unicode = allow_unicode - self.best_indent = 2 - if indent and 1 < indent < 10: - self.best_indent = indent - self.best_width = 80 - if width and width > self.best_indent*2: - self.best_width = width - self.best_line_break = u'\n' - if line_break in [u'\r', u'\n', u'\r\n']: - self.best_line_break = line_break - - # Tag prefixes. - self.tag_prefixes = None - - # Prepared anchor and tag. - self.prepared_anchor = None - self.prepared_tag = None - - # Scalar analysis and style. - self.analysis = None - self.style = None - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def emit(self, event): - self.events.append(event) - while not self.need_more_events(): - self.event = self.events.pop(0) - self.state() - self.event = None - - # In some cases, we wait for a few next events before emitting. - - def need_more_events(self): - if not self.events: - return True - event = self.events[0] - if isinstance(event, DocumentStartEvent): - return self.need_events(1) - elif isinstance(event, SequenceStartEvent): - return self.need_events(2) - elif isinstance(event, MappingStartEvent): - return self.need_events(3) - else: - return False - - def need_events(self, count): - level = 0 - for event in self.events[1:]: - if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): - level += 1 - elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): - level -= 1 - elif isinstance(event, StreamEndEvent): - level = -1 - if level < 0: - return False - return (len(self.events) < count+1) - - def increase_indent(self, flow=False, indentless=False): - self.indents.append(self.indent) - if self.indent is None: - if flow: - self.indent = self.best_indent - else: - self.indent = 0 - elif not indentless: - self.indent += self.best_indent - - # States. - - # Stream handlers. - - def expect_stream_start(self): - if isinstance(self.event, StreamStartEvent): - if self.event.encoding and not getattr(self.stream, 'encoding', None): - self.encoding = self.event.encoding - self.write_stream_start() - self.state = self.expect_first_document_start - else: - raise EmitterError("expected StreamStartEvent, but got %s" - % self.event) - - def expect_nothing(self): - raise EmitterError("expected nothing, but got %s" % self.event) - - # Document handlers. - - def expect_first_document_start(self): - return self.expect_document_start(first=True) - - def expect_document_start(self, first=False): - if isinstance(self.event, DocumentStartEvent): - if (self.event.version or self.event.tags) and self.open_ended: - self.write_indicator(u'...', True) - self.write_indent() - if self.event.version: - version_text = self.prepare_version(self.event.version) - self.write_version_directive(version_text) - self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() - if self.event.tags: - handles = self.event.tags.keys() - handles.sort() - for handle in handles: - prefix = self.event.tags[handle] - self.tag_prefixes[prefix] = handle - handle_text = self.prepare_tag_handle(handle) - prefix_text = self.prepare_tag_prefix(prefix) - self.write_tag_directive(handle_text, prefix_text) - implicit = (first and not self.event.explicit and not self.canonical - and not self.event.version and not self.event.tags - and not self.check_empty_document()) - if not implicit: - self.write_indent() - self.write_indicator(u'---', True) - if self.canonical: - self.write_indent() - self.state = self.expect_document_root - elif isinstance(self.event, StreamEndEvent): - if self.open_ended: - self.write_indicator(u'...', True) - self.write_indent() - self.write_stream_end() - self.state = self.expect_nothing - else: - raise EmitterError("expected DocumentStartEvent, but got %s" - % self.event) - - def expect_document_end(self): - if isinstance(self.event, DocumentEndEvent): - self.write_indent() - if self.event.explicit: - self.write_indicator(u'...', True) - self.write_indent() - self.flush_stream() - self.state = self.expect_document_start - else: - raise EmitterError("expected DocumentEndEvent, but got %s" - % self.event) - - def expect_document_root(self): - self.states.append(self.expect_document_end) - self.expect_node(root=True) - - # Node handlers. - - def expect_node(self, root=False, sequence=False, mapping=False, - simple_key=False): - self.root_context = root - self.sequence_context = sequence - self.mapping_context = mapping - self.simple_key_context = simple_key - if isinstance(self.event, AliasEvent): - self.expect_alias() - elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): - self.process_anchor(u'&') - self.process_tag() - if isinstance(self.event, ScalarEvent): - self.expect_scalar() - elif isinstance(self.event, SequenceStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_sequence(): - self.expect_flow_sequence() - else: - self.expect_block_sequence() - elif isinstance(self.event, MappingStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_mapping(): - self.expect_flow_mapping() - else: - self.expect_block_mapping() - else: - raise EmitterError("expected NodeEvent, but got %s" % self.event) - - def expect_alias(self): - if self.event.anchor is None: - raise EmitterError("anchor is not specified for alias") - self.process_anchor(u'*') - self.state = self.states.pop() - - def expect_scalar(self): - self.increase_indent(flow=True) - self.process_scalar() - self.indent = self.indents.pop() - self.state = self.states.pop() - - # Flow sequence handlers. - - def expect_flow_sequence(self): - self.write_indicator(u'[', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_sequence_item - - def expect_first_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(u']', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - def expect_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(u',', False) - self.write_indent() - self.write_indicator(u']', False) - self.state = self.states.pop() - else: - self.write_indicator(u',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - # Flow mapping handlers. - - def expect_flow_mapping(self): - self.write_indicator(u'{', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_mapping_key - - def expect_first_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(u'}', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator(u'?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(u',', False) - self.write_indent() - self.write_indicator(u'}', False) - self.state = self.states.pop() - else: - self.write_indicator(u',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator(u'?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_simple_value(self): - self.write_indicator(u':', False) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - def expect_flow_mapping_value(self): - if self.canonical or self.column > self.best_width: - self.write_indent() - self.write_indicator(u':', True) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - # Block sequence handlers. - - def expect_block_sequence(self): - indentless = (self.mapping_context and not self.indention) - self.increase_indent(flow=False, indentless=indentless) - self.state = self.expect_first_block_sequence_item - - def expect_first_block_sequence_item(self): - return self.expect_block_sequence_item(first=True) - - def expect_block_sequence_item(self, first=False): - if not first and isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - self.write_indicator(u'-', True, indention=True) - self.states.append(self.expect_block_sequence_item) - self.expect_node(sequence=True) - - # Block mapping handlers. - - def expect_block_mapping(self): - self.increase_indent(flow=False) - self.state = self.expect_first_block_mapping_key - - def expect_first_block_mapping_key(self): - return self.expect_block_mapping_key(first=True) - - def expect_block_mapping_key(self, first=False): - if not first and isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - if self.check_simple_key(): - self.states.append(self.expect_block_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator(u'?', True, indention=True) - self.states.append(self.expect_block_mapping_value) - self.expect_node(mapping=True) - - def expect_block_mapping_simple_value(self): - self.write_indicator(u':', False) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - def expect_block_mapping_value(self): - self.write_indent() - self.write_indicator(u':', True, indention=True) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - # Checkers. - - def check_empty_sequence(self): - return (isinstance(self.event, SequenceStartEvent) and self.events - and isinstance(self.events[0], SequenceEndEvent)) - - def check_empty_mapping(self): - return (isinstance(self.event, MappingStartEvent) and self.events - and isinstance(self.events[0], MappingEndEvent)) - - def check_empty_document(self): - if not isinstance(self.event, DocumentStartEvent) or not self.events: - return False - event = self.events[0] - return (isinstance(event, ScalarEvent) and event.anchor is None - and event.tag is None and event.implicit and event.value == u'') - - def check_simple_key(self): - length = 0 - if isinstance(self.event, NodeEvent) and self.event.anchor is not None: - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - length += len(self.prepared_anchor) - if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ - and self.event.tag is not None: - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(self.event.tag) - length += len(self.prepared_tag) - if isinstance(self.event, ScalarEvent): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - length += len(self.analysis.scalar) - return (length < 128 and (isinstance(self.event, AliasEvent) - or (isinstance(self.event, ScalarEvent) - and not self.analysis.empty and not self.analysis.multiline) - or self.check_empty_sequence() or self.check_empty_mapping())) - - # Anchor, Tag, and Scalar processors. - - def process_anchor(self, indicator): - if self.event.anchor is None: - self.prepared_anchor = None - return - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - if self.prepared_anchor: - self.write_indicator(indicator+self.prepared_anchor, True) - self.prepared_anchor = None - - def process_tag(self): - tag = self.event.tag - if isinstance(self.event, ScalarEvent): - if self.style is None: - self.style = self.choose_scalar_style() - if ((not self.canonical or tag is None) and - ((self.style == '' and self.event.implicit[0]) - or (self.style != '' and self.event.implicit[1]))): - self.prepared_tag = None - return - if self.event.implicit[0] and tag is None: - tag = u'!' - self.prepared_tag = None - else: - if (not self.canonical or tag is None) and self.event.implicit: - self.prepared_tag = None - return - if tag is None: - raise EmitterError("tag is not specified") - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(tag) - if self.prepared_tag: - self.write_indicator(self.prepared_tag, True) - self.prepared_tag = None - - def choose_scalar_style(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.event.style == '"' or self.canonical: - return '"' - if not self.event.style and self.event.implicit[0]: - if (not (self.simple_key_context and - (self.analysis.empty or self.analysis.multiline)) - and (self.flow_level and self.analysis.allow_flow_plain - or (not self.flow_level and self.analysis.allow_block_plain))): - return '' - if self.event.style and self.event.style in '|>': - if (not self.flow_level and not self.simple_key_context - and self.analysis.allow_block): - return self.event.style - if not self.event.style or self.event.style == '\'': - if (self.analysis.allow_single_quoted and - not (self.simple_key_context and self.analysis.multiline)): - return '\'' - return '"' - - def process_scalar(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.style is None: - self.style = self.choose_scalar_style() - split = (not self.simple_key_context) - #if self.analysis.multiline and split \ - # and (not self.style or self.style in '\'\"'): - # self.write_indent() - if self.style == '"': - self.write_double_quoted(self.analysis.scalar, split) - elif self.style == '\'': - self.write_single_quoted(self.analysis.scalar, split) - elif self.style == '>': - self.write_folded(self.analysis.scalar) - elif self.style == '|': - self.write_literal(self.analysis.scalar) - else: - self.write_plain(self.analysis.scalar, split) - self.analysis = None - self.style = None - - # Analyzers. - - def prepare_version(self, version): - major, minor = version - if major != 1: - raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) - return u'%d.%d' % (major, minor) - - def prepare_tag_handle(self, handle): - if not handle: - raise EmitterError("tag handle must not be empty") - if handle[0] != u'!' or handle[-1] != u'!': - raise EmitterError("tag handle must start and end with '!': %r" - % (handle.encode('utf-8'))) - for ch in handle[1:-1]: - if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_'): - raise EmitterError("invalid character %r in the tag handle: %r" - % (ch.encode('utf-8'), handle.encode('utf-8'))) - return handle - - def prepare_tag_prefix(self, prefix): - if not prefix: - raise EmitterError("tag prefix must not be empty") - chunks = [] - start = end = 0 - if prefix[0] == u'!': - end = 1 - while end < len(prefix): - ch = prefix[end] - if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-;/?!:@&=+$,_.~*\'()[]': - end += 1 - else: - if start < end: - chunks.append(prefix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append(u'%%%02X' % ord(ch)) - if start < end: - chunks.append(prefix[start:end]) - return u''.join(chunks) - - def prepare_tag(self, tag): - if not tag: - raise EmitterError("tag must not be empty") - if tag == u'!': - return tag - handle = None - suffix = tag - prefixes = self.tag_prefixes.keys() - prefixes.sort() - for prefix in prefixes: - if tag.startswith(prefix) \ - and (prefix == u'!' or len(prefix) < len(tag)): - handle = self.tag_prefixes[prefix] - suffix = tag[len(prefix):] - chunks = [] - start = end = 0 - while end < len(suffix): - ch = suffix[end] - if u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-;/?:@&=+$,_.~*\'()[]' \ - or (ch == u'!' and handle != u'!'): - end += 1 - else: - if start < end: - chunks.append(suffix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append(u'%%%02X' % ord(ch)) - if start < end: - chunks.append(suffix[start:end]) - suffix_text = u''.join(chunks) - if handle: - return u'%s%s' % (handle, suffix_text) - else: - return u'!<%s>' % suffix_text - - def prepare_anchor(self, anchor): - if not anchor: - raise EmitterError("anchor must not be empty") - for ch in anchor: - if not (u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_'): - raise EmitterError("invalid character %r in the anchor: %r" - % (ch.encode('utf-8'), anchor.encode('utf-8'))) - return anchor - - def analyze_scalar(self, scalar): - - # Empty scalar is a special case. - if not scalar: - return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, - allow_flow_plain=False, allow_block_plain=True, - allow_single_quoted=True, allow_double_quoted=True, - allow_block=False) - - # Indicators and special characters. - block_indicators = False - flow_indicators = False - line_breaks = False - special_characters = False - - # Important whitespace combinations. - leading_space = False - leading_break = False - trailing_space = False - trailing_break = False - break_space = False - space_break = False - - # Check document indicators. - if scalar.startswith(u'---') or scalar.startswith(u'...'): - block_indicators = True - flow_indicators = True - - # First character or preceded by a whitespace. - preceeded_by_whitespace = True - - # Last character or followed by a whitespace. - followed_by_whitespace = (len(scalar) == 1 or - scalar[1] in u'\0 \t\r\n\x85\u2028\u2029') - - # The previous character is a space. - previous_space = False - - # The previous character is a break. - previous_break = False - - index = 0 - while index < len(scalar): - ch = scalar[index] - - # Check for indicators. - if index == 0: - # Leading indicators are special characters. - if ch in u'#,[]{}&*!|>\'\"%@`': - flow_indicators = True - block_indicators = True - if ch in u'?:': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == u'-' and followed_by_whitespace: - flow_indicators = True - block_indicators = True - else: - # Some indicators cannot appear within a scalar as well. - if ch in u',?[]{}': - flow_indicators = True - if ch == u':': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == u'#' and preceeded_by_whitespace: - flow_indicators = True - block_indicators = True - - # Check for line breaks, special, and unicode characters. - if ch in u'\n\x85\u2028\u2029': - line_breaks = True - if not (ch == u'\n' or u'\x20' <= ch <= u'\x7E'): - if (ch == u'\x85' or u'\xA0' <= ch <= u'\uD7FF' - or u'\uE000' <= ch <= u'\uFFFD') and ch != u'\uFEFF': - unicode_characters = True - if not self.allow_unicode: - special_characters = True - else: - special_characters = True - - # Detect important whitespace combinations. - if ch == u' ': - if index == 0: - leading_space = True - if index == len(scalar)-1: - trailing_space = True - if previous_break: - break_space = True - previous_space = True - previous_break = False - elif ch in u'\n\x85\u2028\u2029': - if index == 0: - leading_break = True - if index == len(scalar)-1: - trailing_break = True - if previous_space: - space_break = True - previous_space = False - previous_break = True - else: - previous_space = False - previous_break = False - - # Prepare for the next character. - index += 1 - preceeded_by_whitespace = (ch in u'\0 \t\r\n\x85\u2028\u2029') - followed_by_whitespace = (index+1 >= len(scalar) or - scalar[index+1] in u'\0 \t\r\n\x85\u2028\u2029') - - # Let's decide what styles are allowed. - allow_flow_plain = True - allow_block_plain = True - allow_single_quoted = True - allow_double_quoted = True - allow_block = True - - # Leading and trailing whitespaces are bad for plain scalars. - if (leading_space or leading_break - or trailing_space or trailing_break): - allow_flow_plain = allow_block_plain = False - - # We do not permit trailing spaces for block scalars. - if trailing_space: - allow_block = False - - # Spaces at the beginning of a new line are only acceptable for block - # scalars. - if break_space: - allow_flow_plain = allow_block_plain = allow_single_quoted = False - - # Spaces followed by breaks, as well as special character are only - # allowed for double quoted scalars. - if space_break or special_characters: - allow_flow_plain = allow_block_plain = \ - allow_single_quoted = allow_block = False - - # Although the plain scalar writer supports breaks, we never emit - # multiline plain scalars. - if line_breaks: - allow_flow_plain = allow_block_plain = False - - # Flow indicators are forbidden for flow plain scalars. - if flow_indicators: - allow_flow_plain = False - - # Block indicators are forbidden for block plain scalars. - if block_indicators: - allow_block_plain = False - - return ScalarAnalysis(scalar=scalar, - empty=False, multiline=line_breaks, - allow_flow_plain=allow_flow_plain, - allow_block_plain=allow_block_plain, - allow_single_quoted=allow_single_quoted, - allow_double_quoted=allow_double_quoted, - allow_block=allow_block) - - # Writers. - - def flush_stream(self): - if hasattr(self.stream, 'flush'): - self.stream.flush() - - def write_stream_start(self): - # Write BOM if needed. - if self.encoding and self.encoding.startswith('utf-16'): - self.stream.write(u'\uFEFF'.encode(self.encoding)) - - def write_stream_end(self): - self.flush_stream() - - def write_indicator(self, indicator, need_whitespace, - whitespace=False, indention=False): - if self.whitespace or not need_whitespace: - data = indicator - else: - data = u' '+indicator - self.whitespace = whitespace - self.indention = self.indention and indention - self.column += len(data) - self.open_ended = False - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_indent(self): - indent = self.indent or 0 - if not self.indention or self.column > indent \ - or (self.column == indent and not self.whitespace): - self.write_line_break() - if self.column < indent: - self.whitespace = True - data = u' '*(indent-self.column) - self.column = indent - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_line_break(self, data=None): - if data is None: - data = self.best_line_break - self.whitespace = True - self.indention = True - self.line += 1 - self.column = 0 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_version_directive(self, version_text): - data = u'%%YAML %s' % version_text - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - def write_tag_directive(self, handle_text, prefix_text): - data = u'%%TAG %s %s' % (handle_text, prefix_text) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - # Scalar streams. - - def write_single_quoted(self, text, split=True): - self.write_indicator(u'\'', True) - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch is None or ch != u' ': - if start+1 == end and self.column > self.best_width and split \ - and start != 0 and end != len(text): - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch is None or ch not in u'\n\x85\u2028\u2029': - if text[start] == u'\n': - self.write_line_break() - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - start = end - else: - if ch is None or ch in u' \n\x85\u2028\u2029' or ch == u'\'': - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch == u'\'': - data = u'\'\'' - self.column += 2 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end + 1 - if ch is not None: - spaces = (ch == u' ') - breaks = (ch in u'\n\x85\u2028\u2029') - end += 1 - self.write_indicator(u'\'', False) - - ESCAPE_REPLACEMENTS = { - u'\0': u'0', - u'\x07': u'a', - u'\x08': u'b', - u'\x09': u't', - u'\x0A': u'n', - u'\x0B': u'v', - u'\x0C': u'f', - u'\x0D': u'r', - u'\x1B': u'e', - u'\"': u'\"', - u'\\': u'\\', - u'\x85': u'N', - u'\xA0': u'_', - u'\u2028': u'L', - u'\u2029': u'P', - } - - def write_double_quoted(self, text, split=True): - self.write_indicator(u'"', True) - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if ch is None or ch in u'"\\\x85\u2028\u2029\uFEFF' \ - or not (u'\x20' <= ch <= u'\x7E' - or (self.allow_unicode - and (u'\xA0' <= ch <= u'\uD7FF' - or u'\uE000' <= ch <= u'\uFFFD'))): - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - if ch in self.ESCAPE_REPLACEMENTS: - data = u'\\'+self.ESCAPE_REPLACEMENTS[ch] - elif ch <= u'\xFF': - data = u'\\x%02X' % ord(ch) - elif ch <= u'\uFFFF': - data = u'\\u%04X' % ord(ch) - else: - data = u'\\U%08X' % ord(ch) - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end+1 - if 0 < end < len(text)-1 and (ch == u' ' or start >= end) \ - and self.column+(end-start) > self.best_width and split: - data = text[start:end]+u'\\' - if start < end: - start = end - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_indent() - self.whitespace = False - self.indention = False - if text[start] == u' ': - data = u'\\' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - end += 1 - self.write_indicator(u'"', False) - - def determine_block_hints(self, text): - hints = u'' - if text: - if text[0] in u' \n\x85\u2028\u2029': - hints += unicode(self.best_indent) - if text[-1] not in u'\n\x85\u2028\u2029': - hints += u'-' - elif len(text) == 1 or text[-2] in u'\n\x85\u2028\u2029': - hints += u'+' - return hints - - def write_folded(self, text): - hints = self.determine_block_hints(text) - self.write_indicator(u'>'+hints, True) - if hints[-1:] == u'+': - self.open_ended = True - self.write_line_break() - leading_space = True - spaces = False - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in u'\n\x85\u2028\u2029': - if not leading_space and ch is not None and ch != u' ' \ - and text[start] == u'\n': - self.write_line_break() - leading_space = (ch == u' ') - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - elif spaces: - if ch != u' ': - if start+1 == end and self.column > self.best_width: - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - else: - if ch is None or ch in u' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in u'\n\x85\u2028\u2029') - spaces = (ch == u' ') - end += 1 - - def write_literal(self, text): - hints = self.determine_block_hints(text) - self.write_indicator(u'|'+hints, True) - if hints[-1:] == u'+': - self.open_ended = True - self.write_line_break() - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in u'\n\x85\u2028\u2029': - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - else: - if ch is None or ch in u'\n\x85\u2028\u2029': - data = text[start:end] - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in u'\n\x85\u2028\u2029') - end += 1 - - def write_plain(self, text, split=True): - if self.root_context: - self.open_ended = True - if not text: - return - if not self.whitespace: - data = u' ' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.whitespace = False - self.indention = False - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch != u' ': - if start+1 == end and self.column > self.best_width and split: - self.write_indent() - self.whitespace = False - self.indention = False - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch not in u'\n\x85\u2028\u2029': - if text[start] == u'\n': - self.write_line_break() - for br in text[start:end]: - if br == u'\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - self.whitespace = False - self.indention = False - start = end - else: - if ch is None or ch in u' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - spaces = (ch == u' ') - breaks = (ch in u'\n\x85\u2028\u2029') - end += 1 - diff --git a/libs/PyYAML-3.10/lib/yaml/error.py b/libs/PyYAML-3.10/lib/yaml/error.py deleted file mode 100644 index 577686d..0000000 --- a/libs/PyYAML-3.10/lib/yaml/error.py +++ /dev/null @@ -1,75 +0,0 @@ - -__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] - -class Mark(object): - - def __init__(self, name, index, line, column, buffer, pointer): - self.name = name - self.index = index - self.line = line - self.column = column - self.buffer = buffer - self.pointer = pointer - - def get_snippet(self, indent=4, max_length=75): - if self.buffer is None: - return None - head = '' - start = self.pointer - while start > 0 and self.buffer[start-1] not in u'\0\r\n\x85\u2028\u2029': - start -= 1 - if self.pointer-start > max_length/2-1: - head = ' ... ' - start += 5 - break - tail = '' - end = self.pointer - while end < len(self.buffer) and self.buffer[end] not in u'\0\r\n\x85\u2028\u2029': - end += 1 - if end-self.pointer > max_length/2-1: - tail = ' ... ' - end -= 5 - break - snippet = self.buffer[start:end].encode('utf-8') - return ' '*indent + head + snippet + tail + '\n' \ - + ' '*(indent+self.pointer-start+len(head)) + '^' - - def __str__(self): - snippet = self.get_snippet() - where = " in \"%s\", line %d, column %d" \ - % (self.name, self.line+1, self.column+1) - if snippet is not None: - where += ":\n"+snippet - return where - -class YAMLError(Exception): - pass - -class MarkedYAMLError(YAMLError): - - def __init__(self, context=None, context_mark=None, - problem=None, problem_mark=None, note=None): - self.context = context - self.context_mark = context_mark - self.problem = problem - self.problem_mark = problem_mark - self.note = note - - def __str__(self): - lines = [] - if self.context is not None: - lines.append(self.context) - if self.context_mark is not None \ - and (self.problem is None or self.problem_mark is None - or self.context_mark.name != self.problem_mark.name - or self.context_mark.line != self.problem_mark.line - or self.context_mark.column != self.problem_mark.column): - lines.append(str(self.context_mark)) - if self.problem is not None: - lines.append(self.problem) - if self.problem_mark is not None: - lines.append(str(self.problem_mark)) - if self.note is not None: - lines.append(self.note) - return '\n'.join(lines) - diff --git a/libs/PyYAML-3.10/lib/yaml/events.py b/libs/PyYAML-3.10/lib/yaml/events.py deleted file mode 100644 index f79ad38..0000000 --- a/libs/PyYAML-3.10/lib/yaml/events.py +++ /dev/null @@ -1,86 +0,0 @@ - -# Abstract classes. - -class Event(object): - def __init__(self, start_mark=None, end_mark=None): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] - if hasattr(self, key)] - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -class NodeEvent(Event): - def __init__(self, anchor, start_mark=None, end_mark=None): - self.anchor = anchor - self.start_mark = start_mark - self.end_mark = end_mark - -class CollectionStartEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, - flow_style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class CollectionEndEvent(Event): - pass - -# Implementations. - -class StreamStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndEvent(Event): - pass - -class DocumentStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None, version=None, tags=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - self.version = version - self.tags = tags - -class DocumentEndEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - -class AliasEvent(NodeEvent): - pass - -class ScalarEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, value, - start_mark=None, end_mark=None, style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class SequenceStartEvent(CollectionStartEvent): - pass - -class SequenceEndEvent(CollectionEndEvent): - pass - -class MappingStartEvent(CollectionStartEvent): - pass - -class MappingEndEvent(CollectionEndEvent): - pass - diff --git a/libs/PyYAML-3.10/lib/yaml/loader.py b/libs/PyYAML-3.10/lib/yaml/loader.py deleted file mode 100644 index 293ff46..0000000 --- a/libs/PyYAML-3.10/lib/yaml/loader.py +++ /dev/null @@ -1,40 +0,0 @@ - -__all__ = ['BaseLoader', 'SafeLoader', 'Loader'] - -from reader import * -from scanner import * -from parser import * -from composer import * -from constructor import * -from resolver import * - -class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - Constructor.__init__(self) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/lib/yaml/nodes.py b/libs/PyYAML-3.10/lib/yaml/nodes.py deleted file mode 100644 index c4f070c..0000000 --- a/libs/PyYAML-3.10/lib/yaml/nodes.py +++ /dev/null @@ -1,49 +0,0 @@ - -class Node(object): - def __init__(self, tag, value, start_mark, end_mark): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - value = self.value - #if isinstance(value, list): - # if len(value) == 0: - # value = '' - # elif len(value) == 1: - # value = '<1 item>' - # else: - # value = '<%d items>' % len(value) - #else: - # if len(value) > 75: - # value = repr(value[:70]+u' ... ') - # else: - # value = repr(value) - value = repr(value) - return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) - -class ScalarNode(Node): - id = 'scalar' - def __init__(self, tag, value, - start_mark=None, end_mark=None, style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class CollectionNode(Node): - def __init__(self, tag, value, - start_mark=None, end_mark=None, flow_style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class SequenceNode(CollectionNode): - id = 'sequence' - -class MappingNode(CollectionNode): - id = 'mapping' - diff --git a/libs/PyYAML-3.10/lib/yaml/parser.py b/libs/PyYAML-3.10/lib/yaml/parser.py deleted file mode 100644 index f9e3057..0000000 --- a/libs/PyYAML-3.10/lib/yaml/parser.py +++ /dev/null @@ -1,589 +0,0 @@ - -# The following YAML grammar is LL(1) and is parsed by a recursive descent -# parser. -# -# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -# implicit_document ::= block_node DOCUMENT-END* -# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -# block_node_or_indentless_sequence ::= -# ALIAS -# | properties (block_content | indentless_block_sequence)? -# | block_content -# | indentless_block_sequence -# block_node ::= ALIAS -# | properties block_content? -# | block_content -# flow_node ::= ALIAS -# | properties flow_content? -# | flow_content -# properties ::= TAG ANCHOR? | ANCHOR TAG? -# block_content ::= block_collection | flow_collection | SCALAR -# flow_content ::= flow_collection | SCALAR -# block_collection ::= block_sequence | block_mapping -# flow_collection ::= flow_sequence | flow_mapping -# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -# block_mapping ::= BLOCK-MAPPING_START -# ((KEY block_node_or_indentless_sequence?)? -# (VALUE block_node_or_indentless_sequence?)?)* -# BLOCK-END -# flow_sequence ::= FLOW-SEQUENCE-START -# (flow_sequence_entry FLOW-ENTRY)* -# flow_sequence_entry? -# FLOW-SEQUENCE-END -# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# flow_mapping ::= FLOW-MAPPING-START -# (flow_mapping_entry FLOW-ENTRY)* -# flow_mapping_entry? -# FLOW-MAPPING-END -# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# -# FIRST sets: -# -# stream: { STREAM-START } -# explicit_document: { DIRECTIVE DOCUMENT-START } -# implicit_document: FIRST(block_node) -# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_sequence: { BLOCK-SEQUENCE-START } -# block_mapping: { BLOCK-MAPPING-START } -# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } -# indentless_sequence: { ENTRY } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_sequence: { FLOW-SEQUENCE-START } -# flow_mapping: { FLOW-MAPPING-START } -# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } -# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } - -__all__ = ['Parser', 'ParserError'] - -from error import MarkedYAMLError -from tokens import * -from events import * -from scanner import * - -class ParserError(MarkedYAMLError): - pass - -class Parser(object): - # Since writing a recursive-descendant parser is a straightforward task, we - # do not give many comments here. - - DEFAULT_TAGS = { - u'!': u'!', - u'!!': u'tag:yaml.org,2002:', - } - - def __init__(self): - self.current_event = None - self.yaml_version = None - self.tag_handles = {} - self.states = [] - self.marks = [] - self.state = self.parse_stream_start - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def check_event(self, *choices): - # Check the type of the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - if self.current_event is not None: - if not choices: - return True - for choice in choices: - if isinstance(self.current_event, choice): - return True - return False - - def peek_event(self): - # Get the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - return self.current_event - - def get_event(self): - # Get the next event and proceed further. - if self.current_event is None: - if self.state: - self.current_event = self.state() - value = self.current_event - self.current_event = None - return value - - # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END - # implicit_document ::= block_node DOCUMENT-END* - # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* - - def parse_stream_start(self): - - # Parse the stream start. - token = self.get_token() - event = StreamStartEvent(token.start_mark, token.end_mark, - encoding=token.encoding) - - # Prepare the next state. - self.state = self.parse_implicit_document_start - - return event - - def parse_implicit_document_start(self): - - # Parse an implicit document. - if not self.check_token(DirectiveToken, DocumentStartToken, - StreamEndToken): - self.tag_handles = self.DEFAULT_TAGS - token = self.peek_token() - start_mark = end_mark = token.start_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=False) - - # Prepare the next state. - self.states.append(self.parse_document_end) - self.state = self.parse_block_node - - return event - - else: - return self.parse_document_start() - - def parse_document_start(self): - - # Parse any extra document end indicators. - while self.check_token(DocumentEndToken): - self.get_token() - - # Parse an explicit document. - if not self.check_token(StreamEndToken): - token = self.peek_token() - start_mark = token.start_mark - version, tags = self.process_directives() - if not self.check_token(DocumentStartToken): - raise ParserError(None, None, - "expected '', but found %r" - % self.peek_token().id, - self.peek_token().start_mark) - token = self.get_token() - end_mark = token.end_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=True, version=version, tags=tags) - self.states.append(self.parse_document_end) - self.state = self.parse_document_content - else: - # Parse the end of the stream. - token = self.get_token() - event = StreamEndEvent(token.start_mark, token.end_mark) - assert not self.states - assert not self.marks - self.state = None - return event - - def parse_document_end(self): - - # Parse the document end. - token = self.peek_token() - start_mark = end_mark = token.start_mark - explicit = False - if self.check_token(DocumentEndToken): - token = self.get_token() - end_mark = token.end_mark - explicit = True - event = DocumentEndEvent(start_mark, end_mark, - explicit=explicit) - - # Prepare the next state. - self.state = self.parse_document_start - - return event - - def parse_document_content(self): - if self.check_token(DirectiveToken, - DocumentStartToken, DocumentEndToken, StreamEndToken): - event = self.process_empty_scalar(self.peek_token().start_mark) - self.state = self.states.pop() - return event - else: - return self.parse_block_node() - - def process_directives(self): - self.yaml_version = None - self.tag_handles = {} - while self.check_token(DirectiveToken): - token = self.get_token() - if token.name == u'YAML': - if self.yaml_version is not None: - raise ParserError(None, None, - "found duplicate YAML directive", token.start_mark) - major, minor = token.value - if major != 1: - raise ParserError(None, None, - "found incompatible YAML document (version 1.* is required)", - token.start_mark) - self.yaml_version = token.value - elif token.name == u'TAG': - handle, prefix = token.value - if handle in self.tag_handles: - raise ParserError(None, None, - "duplicate tag handle %r" % handle.encode('utf-8'), - token.start_mark) - self.tag_handles[handle] = prefix - if self.tag_handles: - value = self.yaml_version, self.tag_handles.copy() - else: - value = self.yaml_version, None - for key in self.DEFAULT_TAGS: - if key not in self.tag_handles: - self.tag_handles[key] = self.DEFAULT_TAGS[key] - return value - - # block_node_or_indentless_sequence ::= ALIAS - # | properties (block_content | indentless_block_sequence)? - # | block_content - # | indentless_block_sequence - # block_node ::= ALIAS - # | properties block_content? - # | block_content - # flow_node ::= ALIAS - # | properties flow_content? - # | flow_content - # properties ::= TAG ANCHOR? | ANCHOR TAG? - # block_content ::= block_collection | flow_collection | SCALAR - # flow_content ::= flow_collection | SCALAR - # block_collection ::= block_sequence | block_mapping - # flow_collection ::= flow_sequence | flow_mapping - - def parse_block_node(self): - return self.parse_node(block=True) - - def parse_flow_node(self): - return self.parse_node() - - def parse_block_node_or_indentless_sequence(self): - return self.parse_node(block=True, indentless_sequence=True) - - def parse_node(self, block=False, indentless_sequence=False): - if self.check_token(AliasToken): - token = self.get_token() - event = AliasEvent(token.value, token.start_mark, token.end_mark) - self.state = self.states.pop() - else: - anchor = None - tag = None - start_mark = end_mark = tag_mark = None - if self.check_token(AnchorToken): - token = self.get_token() - start_mark = token.start_mark - end_mark = token.end_mark - anchor = token.value - if self.check_token(TagToken): - token = self.get_token() - tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - elif self.check_token(TagToken): - token = self.get_token() - start_mark = tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - if self.check_token(AnchorToken): - token = self.get_token() - end_mark = token.end_mark - anchor = token.value - if tag is not None: - handle, suffix = tag - if handle is not None: - if handle not in self.tag_handles: - raise ParserError("while parsing a node", start_mark, - "found undefined tag handle %r" % handle.encode('utf-8'), - tag_mark) - tag = self.tag_handles[handle]+suffix - else: - tag = suffix - #if tag == u'!': - # raise ParserError("while parsing a node", start_mark, - # "found non-specific tag '!'", tag_mark, - # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") - if start_mark is None: - start_mark = end_mark = self.peek_token().start_mark - event = None - implicit = (tag is None or tag == u'!') - if indentless_sequence and self.check_token(BlockEntryToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark) - self.state = self.parse_indentless_sequence_entry - else: - if self.check_token(ScalarToken): - token = self.get_token() - end_mark = token.end_mark - if (token.plain and tag is None) or tag == u'!': - implicit = (True, False) - elif tag is None: - implicit = (False, True) - else: - implicit = (False, False) - event = ScalarEvent(anchor, tag, implicit, token.value, - start_mark, end_mark, style=token.style) - self.state = self.states.pop() - elif self.check_token(FlowSequenceStartToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_sequence_first_entry - elif self.check_token(FlowMappingStartToken): - end_mark = self.peek_token().end_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_mapping_first_key - elif block and self.check_token(BlockSequenceStartToken): - end_mark = self.peek_token().start_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_sequence_first_entry - elif block and self.check_token(BlockMappingStartToken): - end_mark = self.peek_token().start_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_mapping_first_key - elif anchor is not None or tag is not None: - # Empty scalars are allowed even if a tag or an anchor is - # specified. - event = ScalarEvent(anchor, tag, (implicit, False), u'', - start_mark, end_mark) - self.state = self.states.pop() - else: - if block: - node = 'block' - else: - node = 'flow' - token = self.peek_token() - raise ParserError("while parsing a %s node" % node, start_mark, - "expected the node content, but found %r" % token.id, - token.start_mark) - return event - - # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END - - def parse_block_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_sequence_entry() - - def parse_block_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, BlockEndToken): - self.states.append(self.parse_block_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_block_sequence_entry - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block collection", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ - - def parse_indentless_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, - KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_indentless_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_indentless_sequence_entry - return self.process_empty_scalar(token.end_mark) - token = self.peek_token() - event = SequenceEndEvent(token.start_mark, token.start_mark) - self.state = self.states.pop() - return event - - # block_mapping ::= BLOCK-MAPPING_START - # ((KEY block_node_or_indentless_sequence?)? - # (VALUE block_node_or_indentless_sequence?)?)* - # BLOCK-END - - def parse_block_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_mapping_key() - - def parse_block_mapping_key(self): - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_value) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_value - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block mapping", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_block_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_key) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_block_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - # flow_sequence ::= FLOW-SEQUENCE-START - # (flow_sequence_entry FLOW-ENTRY)* - # flow_sequence_entry? - # FLOW-SEQUENCE-END - # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - # - # Note that while production rules for both flow_sequence_entry and - # flow_mapping_entry are equal, their interpretations are different. - # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` - # generate an inline mapping (set syntax). - - def parse_flow_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_sequence_entry(first=True) - - def parse_flow_sequence_entry(self, first=False): - if not self.check_token(FlowSequenceEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow sequence", self.marks[-1], - "expected ',' or ']', but got %r" % token.id, token.start_mark) - - if self.check_token(KeyToken): - token = self.peek_token() - event = MappingStartEvent(None, None, True, - token.start_mark, token.end_mark, - flow_style=True) - self.state = self.parse_flow_sequence_entry_mapping_key - return event - elif not self.check_token(FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry) - return self.parse_flow_node() - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_sequence_entry_mapping_key(self): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_value - return self.process_empty_scalar(token.end_mark) - - def parse_flow_sequence_entry_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_end) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_end - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_sequence_entry_mapping_end - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_sequence_entry_mapping_end(self): - self.state = self.parse_flow_sequence_entry - token = self.peek_token() - return MappingEndEvent(token.start_mark, token.start_mark) - - # flow_mapping ::= FLOW-MAPPING-START - # (flow_mapping_entry FLOW-ENTRY)* - # flow_mapping_entry? - # FLOW-MAPPING-END - # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - - def parse_flow_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_mapping_key(first=True) - - def parse_flow_mapping_key(self, first=False): - if not self.check_token(FlowMappingEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ',' or '}', but got %r" % token.id, token.start_mark) - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_value - return self.process_empty_scalar(token.end_mark) - elif not self.check_token(FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_empty_value) - return self.parse_flow_node() - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_key) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_mapping_empty_value(self): - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(self.peek_token().start_mark) - - def process_empty_scalar(self, mark): - return ScalarEvent(None, None, (True, False), u'', mark, mark) - diff --git a/libs/PyYAML-3.10/lib/yaml/reader.py b/libs/PyYAML-3.10/lib/yaml/reader.py deleted file mode 100644 index 3249e6b..0000000 --- a/libs/PyYAML-3.10/lib/yaml/reader.py +++ /dev/null @@ -1,190 +0,0 @@ -# This module contains abstractions for the input stream. You don't have to -# looks further, there are no pretty code. -# -# We define two classes here. -# -# Mark(source, line, column) -# It's just a record and its only use is producing nice error messages. -# Parser does not use it for any other purposes. -# -# Reader(source, data) -# Reader determines the encoding of `data` and converts it to unicode. -# Reader provides the following methods and attributes: -# reader.peek(length=1) - return the next `length` characters -# reader.forward(length=1) - move the current position to `length` characters. -# reader.index - the number of the current character. -# reader.line, stream.column - the line and the column of the current character. - -__all__ = ['Reader', 'ReaderError'] - -from error import YAMLError, Mark - -import codecs, re - -class ReaderError(YAMLError): - - def __init__(self, name, position, character, encoding, reason): - self.name = name - self.character = character - self.position = position - self.encoding = encoding - self.reason = reason - - def __str__(self): - if isinstance(self.character, str): - return "'%s' codec can't decode byte #x%02x: %s\n" \ - " in \"%s\", position %d" \ - % (self.encoding, ord(self.character), self.reason, - self.name, self.position) - else: - return "unacceptable character #x%04x: %s\n" \ - " in \"%s\", position %d" \ - % (self.character, self.reason, - self.name, self.position) - -class Reader(object): - # Reader: - # - determines the data encoding and converts it to unicode, - # - checks if characters are in allowed range, - # - adds '\0' to the end. - - # Reader accepts - # - a `str` object, - # - a `unicode` object, - # - a file-like object with its `read` method returning `str`, - # - a file-like object with its `read` method returning `unicode`. - - # Yeah, it's ugly and slow. - - def __init__(self, stream): - self.name = None - self.stream = None - self.stream_pointer = 0 - self.eof = True - self.buffer = u'' - self.pointer = 0 - self.raw_buffer = None - self.raw_decode = None - self.encoding = None - self.index = 0 - self.line = 0 - self.column = 0 - if isinstance(stream, unicode): - self.name = "" - self.check_printable(stream) - self.buffer = stream+u'\0' - elif isinstance(stream, str): - self.name = "" - self.raw_buffer = stream - self.determine_encoding() - else: - self.stream = stream - self.name = getattr(stream, 'name', "") - self.eof = False - self.raw_buffer = '' - self.determine_encoding() - - def peek(self, index=0): - try: - return self.buffer[self.pointer+index] - except IndexError: - self.update(index+1) - return self.buffer[self.pointer+index] - - def prefix(self, length=1): - if self.pointer+length >= len(self.buffer): - self.update(length) - return self.buffer[self.pointer:self.pointer+length] - - def forward(self, length=1): - if self.pointer+length+1 >= len(self.buffer): - self.update(length+1) - while length: - ch = self.buffer[self.pointer] - self.pointer += 1 - self.index += 1 - if ch in u'\n\x85\u2028\u2029' \ - or (ch == u'\r' and self.buffer[self.pointer] != u'\n'): - self.line += 1 - self.column = 0 - elif ch != u'\uFEFF': - self.column += 1 - length -= 1 - - def get_mark(self): - if self.stream is None: - return Mark(self.name, self.index, self.line, self.column, - self.buffer, self.pointer) - else: - return Mark(self.name, self.index, self.line, self.column, - None, None) - - def determine_encoding(self): - while not self.eof and len(self.raw_buffer) < 2: - self.update_raw() - if not isinstance(self.raw_buffer, unicode): - if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): - self.raw_decode = codecs.utf_16_le_decode - self.encoding = 'utf-16-le' - elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): - self.raw_decode = codecs.utf_16_be_decode - self.encoding = 'utf-16-be' - else: - self.raw_decode = codecs.utf_8_decode - self.encoding = 'utf-8' - self.update(1) - - NON_PRINTABLE = re.compile(u'[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]') - def check_printable(self, data): - match = self.NON_PRINTABLE.search(data) - if match: - character = match.group() - position = self.index+(len(self.buffer)-self.pointer)+match.start() - raise ReaderError(self.name, position, ord(character), - 'unicode', "special characters are not allowed") - - def update(self, length): - if self.raw_buffer is None: - return - self.buffer = self.buffer[self.pointer:] - self.pointer = 0 - while len(self.buffer) < length: - if not self.eof: - self.update_raw() - if self.raw_decode is not None: - try: - data, converted = self.raw_decode(self.raw_buffer, - 'strict', self.eof) - except UnicodeDecodeError, exc: - character = exc.object[exc.start] - if self.stream is not None: - position = self.stream_pointer-len(self.raw_buffer)+exc.start - else: - position = exc.start - raise ReaderError(self.name, position, character, - exc.encoding, exc.reason) - else: - data = self.raw_buffer - converted = len(data) - self.check_printable(data) - self.buffer += data - self.raw_buffer = self.raw_buffer[converted:] - if self.eof: - self.buffer += u'\0' - self.raw_buffer = None - break - - def update_raw(self, size=1024): - data = self.stream.read(size) - if data: - self.raw_buffer += data - self.stream_pointer += len(data) - else: - self.eof = True - -#try: -# import psyco -# psyco.bind(Reader) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/lib/yaml/representer.py b/libs/PyYAML-3.10/lib/yaml/representer.py deleted file mode 100644 index 5f4fc70..0000000 --- a/libs/PyYAML-3.10/lib/yaml/representer.py +++ /dev/null @@ -1,484 +0,0 @@ - -__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', - 'RepresenterError'] - -from error import * -from nodes import * - -import datetime - -import sys, copy_reg, types - -class RepresenterError(YAMLError): - pass - -class BaseRepresenter(object): - - yaml_representers = {} - yaml_multi_representers = {} - - def __init__(self, default_style=None, default_flow_style=None): - self.default_style = default_style - self.default_flow_style = default_flow_style - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent(self, data): - node = self.represent_data(data) - self.serialize(node) - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def get_classobj_bases(self, cls): - bases = [cls] - for base in cls.__bases__: - bases.extend(self.get_classobj_bases(base)) - return bases - - def represent_data(self, data): - if self.ignore_aliases(data): - self.alias_key = None - else: - self.alias_key = id(data) - if self.alias_key is not None: - if self.alias_key in self.represented_objects: - node = self.represented_objects[self.alias_key] - #if node is None: - # raise RepresenterError("recursive objects are not allowed: %r" % data) - return node - #self.represented_objects[alias_key] = None - self.object_keeper.append(data) - data_types = type(data).__mro__ - if type(data) is types.InstanceType: - data_types = self.get_classobj_bases(data.__class__)+list(data_types) - if data_types[0] in self.yaml_representers: - node = self.yaml_representers[data_types[0]](self, data) - else: - for data_type in data_types: - if data_type in self.yaml_multi_representers: - node = self.yaml_multi_representers[data_type](self, data) - break - else: - if None in self.yaml_multi_representers: - node = self.yaml_multi_representers[None](self, data) - elif None in self.yaml_representers: - node = self.yaml_representers[None](self, data) - else: - node = ScalarNode(None, unicode(data)) - #if alias_key is not None: - # self.represented_objects[alias_key] = node - return node - - def add_representer(cls, data_type, representer): - if not 'yaml_representers' in cls.__dict__: - cls.yaml_representers = cls.yaml_representers.copy() - cls.yaml_representers[data_type] = representer - add_representer = classmethod(add_representer) - - def add_multi_representer(cls, data_type, representer): - if not 'yaml_multi_representers' in cls.__dict__: - cls.yaml_multi_representers = cls.yaml_multi_representers.copy() - cls.yaml_multi_representers[data_type] = representer - add_multi_representer = classmethod(add_multi_representer) - - def represent_scalar(self, tag, value, style=None): - if style is None: - style = self.default_style - node = ScalarNode(tag, value, style=style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - return node - - def represent_sequence(self, tag, sequence, flow_style=None): - value = [] - node = SequenceNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - for item in sequence: - node_item = self.represent_data(item) - if not (isinstance(node_item, ScalarNode) and not node_item.style): - best_style = False - value.append(node_item) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def represent_mapping(self, tag, mapping, flow_style=None): - value = [] - node = MappingNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - if hasattr(mapping, 'items'): - mapping = mapping.items() - mapping.sort() - for item_key, item_value in mapping: - node_key = self.represent_data(item_key) - node_value = self.represent_data(item_value) - if not (isinstance(node_key, ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def ignore_aliases(self, data): - return False - -class SafeRepresenter(BaseRepresenter): - - def ignore_aliases(self, data): - if data in [None, ()]: - return True - if isinstance(data, (str, unicode, bool, int, float)): - return True - - def represent_none(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:null', - u'null') - - def represent_str(self, data): - tag = None - style = None - try: - data = unicode(data, 'ascii') - tag = u'tag:yaml.org,2002:str' - except UnicodeDecodeError: - try: - data = unicode(data, 'utf-8') - tag = u'tag:yaml.org,2002:str' - except UnicodeDecodeError: - data = data.encode('base64') - tag = u'tag:yaml.org,2002:binary' - style = '|' - return self.represent_scalar(tag, data, style=style) - - def represent_unicode(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:str', data) - - def represent_bool(self, data): - if data: - value = u'true' - else: - value = u'false' - return self.represent_scalar(u'tag:yaml.org,2002:bool', value) - - def represent_int(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) - - def represent_long(self, data): - return self.represent_scalar(u'tag:yaml.org,2002:int', unicode(data)) - - inf_value = 1e300 - while repr(inf_value) != repr(inf_value*inf_value): - inf_value *= inf_value - - def represent_float(self, data): - if data != data or (data == 0.0 and data == 1.0): - value = u'.nan' - elif data == self.inf_value: - value = u'.inf' - elif data == -self.inf_value: - value = u'-.inf' - else: - value = unicode(repr(data)).lower() - # Note that in some cases `repr(data)` represents a float number - # without the decimal parts. For instance: - # >>> repr(1e17) - # '1e17' - # Unfortunately, this is not a valid float representation according - # to the definition of the `!!float` tag. We fix this by adding - # '.0' before the 'e' symbol. - if u'.' not in value and u'e' in value: - value = value.replace(u'e', u'.0e', 1) - return self.represent_scalar(u'tag:yaml.org,2002:float', value) - - def represent_list(self, data): - #pairs = (len(data) > 0 and isinstance(data, list)) - #if pairs: - # for item in data: - # if not isinstance(item, tuple) or len(item) != 2: - # pairs = False - # break - #if not pairs: - return self.represent_sequence(u'tag:yaml.org,2002:seq', data) - #value = [] - #for item_key, item_value in data: - # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', - # [(item_key, item_value)])) - #return SequenceNode(u'tag:yaml.org,2002:pairs', value) - - def represent_dict(self, data): - return self.represent_mapping(u'tag:yaml.org,2002:map', data) - - def represent_set(self, data): - value = {} - for key in data: - value[key] = None - return self.represent_mapping(u'tag:yaml.org,2002:set', value) - - def represent_date(self, data): - value = unicode(data.isoformat()) - return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) - - def represent_datetime(self, data): - value = unicode(data.isoformat(' ')) - return self.represent_scalar(u'tag:yaml.org,2002:timestamp', value) - - def represent_yaml_object(self, tag, data, cls, flow_style=None): - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__.copy() - return self.represent_mapping(tag, state, flow_style=flow_style) - - def represent_undefined(self, data): - raise RepresenterError("cannot represent an object: %s" % data) - -SafeRepresenter.add_representer(type(None), - SafeRepresenter.represent_none) - -SafeRepresenter.add_representer(str, - SafeRepresenter.represent_str) - -SafeRepresenter.add_representer(unicode, - SafeRepresenter.represent_unicode) - -SafeRepresenter.add_representer(bool, - SafeRepresenter.represent_bool) - -SafeRepresenter.add_representer(int, - SafeRepresenter.represent_int) - -SafeRepresenter.add_representer(long, - SafeRepresenter.represent_long) - -SafeRepresenter.add_representer(float, - SafeRepresenter.represent_float) - -SafeRepresenter.add_representer(list, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(tuple, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(dict, - SafeRepresenter.represent_dict) - -SafeRepresenter.add_representer(set, - SafeRepresenter.represent_set) - -SafeRepresenter.add_representer(datetime.date, - SafeRepresenter.represent_date) - -SafeRepresenter.add_representer(datetime.datetime, - SafeRepresenter.represent_datetime) - -SafeRepresenter.add_representer(None, - SafeRepresenter.represent_undefined) - -class Representer(SafeRepresenter): - - def represent_str(self, data): - tag = None - style = None - try: - data = unicode(data, 'ascii') - tag = u'tag:yaml.org,2002:str' - except UnicodeDecodeError: - try: - data = unicode(data, 'utf-8') - tag = u'tag:yaml.org,2002:python/str' - except UnicodeDecodeError: - data = data.encode('base64') - tag = u'tag:yaml.org,2002:binary' - style = '|' - return self.represent_scalar(tag, data, style=style) - - def represent_unicode(self, data): - tag = None - try: - data.encode('ascii') - tag = u'tag:yaml.org,2002:python/unicode' - except UnicodeEncodeError: - tag = u'tag:yaml.org,2002:str' - return self.represent_scalar(tag, data) - - def represent_long(self, data): - tag = u'tag:yaml.org,2002:int' - if int(data) is not data: - tag = u'tag:yaml.org,2002:python/long' - return self.represent_scalar(tag, unicode(data)) - - def represent_complex(self, data): - if data.imag == 0.0: - data = u'%r' % data.real - elif data.real == 0.0: - data = u'%rj' % data.imag - elif data.imag > 0: - data = u'%r+%rj' % (data.real, data.imag) - else: - data = u'%r%rj' % (data.real, data.imag) - return self.represent_scalar(u'tag:yaml.org,2002:python/complex', data) - - def represent_tuple(self, data): - return self.represent_sequence(u'tag:yaml.org,2002:python/tuple', data) - - def represent_name(self, data): - name = u'%s.%s' % (data.__module__, data.__name__) - return self.represent_scalar(u'tag:yaml.org,2002:python/name:'+name, u'') - - def represent_module(self, data): - return self.represent_scalar( - u'tag:yaml.org,2002:python/module:'+data.__name__, u'') - - def represent_instance(self, data): - # For instances of classic classes, we use __getinitargs__ and - # __getstate__ to serialize the data. - - # If data.__getinitargs__ exists, the object must be reconstructed by - # calling cls(**args), where args is a tuple returned by - # __getinitargs__. Otherwise, the cls.__init__ method should never be - # called and the class instance is created by instantiating a trivial - # class and assigning to the instance's __class__ variable. - - # If data.__getstate__ exists, it returns the state of the object. - # Otherwise, the state of the object is data.__dict__. - - # We produce either a !!python/object or !!python/object/new node. - # If data.__getinitargs__ does not exist and state is a dictionary, we - # produce a !!python/object node . Otherwise we produce a - # !!python/object/new node. - - cls = data.__class__ - class_name = u'%s.%s' % (cls.__module__, cls.__name__) - args = None - state = None - if hasattr(data, '__getinitargs__'): - args = list(data.__getinitargs__()) - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__ - if args is None and isinstance(state, dict): - return self.represent_mapping( - u'tag:yaml.org,2002:python/object:'+class_name, state) - if isinstance(state, dict) and not state: - return self.represent_sequence( - u'tag:yaml.org,2002:python/object/new:'+class_name, args) - value = {} - if args: - value['args'] = args - value['state'] = state - return self.represent_mapping( - u'tag:yaml.org,2002:python/object/new:'+class_name, value) - - def represent_object(self, data): - # We use __reduce__ API to save the data. data.__reduce__ returns - # a tuple of length 2-5: - # (function, args, state, listitems, dictitems) - - # For reconstructing, we calls function(*args), then set its state, - # listitems, and dictitems if they are not None. - - # A special case is when function.__name__ == '__newobj__'. In this - # case we create the object with args[0].__new__(*args). - - # Another special case is when __reduce__ returns a string - we don't - # support it. - - # We produce a !!python/object, !!python/object/new or - # !!python/object/apply node. - - cls = type(data) - if cls in copy_reg.dispatch_table: - reduce = copy_reg.dispatch_table[cls](data) - elif hasattr(data, '__reduce_ex__'): - reduce = data.__reduce_ex__(2) - elif hasattr(data, '__reduce__'): - reduce = data.__reduce__() - else: - raise RepresenterError("cannot represent object: %r" % data) - reduce = (list(reduce)+[None]*5)[:5] - function, args, state, listitems, dictitems = reduce - args = list(args) - if state is None: - state = {} - if listitems is not None: - listitems = list(listitems) - if dictitems is not None: - dictitems = dict(dictitems) - if function.__name__ == '__newobj__': - function = args[0] - args = args[1:] - tag = u'tag:yaml.org,2002:python/object/new:' - newobj = True - else: - tag = u'tag:yaml.org,2002:python/object/apply:' - newobj = False - function_name = u'%s.%s' % (function.__module__, function.__name__) - if not args and not listitems and not dictitems \ - and isinstance(state, dict) and newobj: - return self.represent_mapping( - u'tag:yaml.org,2002:python/object:'+function_name, state) - if not listitems and not dictitems \ - and isinstance(state, dict) and not state: - return self.represent_sequence(tag+function_name, args) - value = {} - if args: - value['args'] = args - if state or not isinstance(state, dict): - value['state'] = state - if listitems: - value['listitems'] = listitems - if dictitems: - value['dictitems'] = dictitems - return self.represent_mapping(tag+function_name, value) - -Representer.add_representer(str, - Representer.represent_str) - -Representer.add_representer(unicode, - Representer.represent_unicode) - -Representer.add_representer(long, - Representer.represent_long) - -Representer.add_representer(complex, - Representer.represent_complex) - -Representer.add_representer(tuple, - Representer.represent_tuple) - -Representer.add_representer(type, - Representer.represent_name) - -Representer.add_representer(types.ClassType, - Representer.represent_name) - -Representer.add_representer(types.FunctionType, - Representer.represent_name) - -Representer.add_representer(types.BuiltinFunctionType, - Representer.represent_name) - -Representer.add_representer(types.ModuleType, - Representer.represent_module) - -Representer.add_multi_representer(types.InstanceType, - Representer.represent_instance) - -Representer.add_multi_representer(object, - Representer.represent_object) - diff --git a/libs/PyYAML-3.10/lib/yaml/resolver.py b/libs/PyYAML-3.10/lib/yaml/resolver.py deleted file mode 100644 index 6b5ab87..0000000 --- a/libs/PyYAML-3.10/lib/yaml/resolver.py +++ /dev/null @@ -1,224 +0,0 @@ - -__all__ = ['BaseResolver', 'Resolver'] - -from error import * -from nodes import * - -import re - -class ResolverError(YAMLError): - pass - -class BaseResolver(object): - - DEFAULT_SCALAR_TAG = u'tag:yaml.org,2002:str' - DEFAULT_SEQUENCE_TAG = u'tag:yaml.org,2002:seq' - DEFAULT_MAPPING_TAG = u'tag:yaml.org,2002:map' - - yaml_implicit_resolvers = {} - yaml_path_resolvers = {} - - def __init__(self): - self.resolver_exact_paths = [] - self.resolver_prefix_paths = [] - - def add_implicit_resolver(cls, tag, regexp, first): - if not 'yaml_implicit_resolvers' in cls.__dict__: - cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() - if first is None: - first = [None] - for ch in first: - cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) - add_implicit_resolver = classmethod(add_implicit_resolver) - - def add_path_resolver(cls, tag, path, kind=None): - # Note: `add_path_resolver` is experimental. The API could be changed. - # `new_path` is a pattern that is matched against the path from the - # root to the node that is being considered. `node_path` elements are - # tuples `(node_check, index_check)`. `node_check` is a node class: - # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` - # matches any kind of a node. `index_check` could be `None`, a boolean - # value, a string value, or a number. `None` and `False` match against - # any _value_ of sequence and mapping nodes. `True` matches against - # any _key_ of a mapping node. A string `index_check` matches against - # a mapping value that corresponds to a scalar key which content is - # equal to the `index_check` value. An integer `index_check` matches - # against a sequence value with the index equal to `index_check`. - if not 'yaml_path_resolvers' in cls.__dict__: - cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() - new_path = [] - for element in path: - if isinstance(element, (list, tuple)): - if len(element) == 2: - node_check, index_check = element - elif len(element) == 1: - node_check = element[0] - index_check = True - else: - raise ResolverError("Invalid path element: %s" % element) - else: - node_check = None - index_check = element - if node_check is str: - node_check = ScalarNode - elif node_check is list: - node_check = SequenceNode - elif node_check is dict: - node_check = MappingNode - elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ - and not isinstance(node_check, basestring) \ - and node_check is not None: - raise ResolverError("Invalid node checker: %s" % node_check) - if not isinstance(index_check, (basestring, int)) \ - and index_check is not None: - raise ResolverError("Invalid index checker: %s" % index_check) - new_path.append((node_check, index_check)) - if kind is str: - kind = ScalarNode - elif kind is list: - kind = SequenceNode - elif kind is dict: - kind = MappingNode - elif kind not in [ScalarNode, SequenceNode, MappingNode] \ - and kind is not None: - raise ResolverError("Invalid node kind: %s" % kind) - cls.yaml_path_resolvers[tuple(new_path), kind] = tag - add_path_resolver = classmethod(add_path_resolver) - - def descend_resolver(self, current_node, current_index): - if not self.yaml_path_resolvers: - return - exact_paths = {} - prefix_paths = [] - if current_node: - depth = len(self.resolver_prefix_paths) - for path, kind in self.resolver_prefix_paths[-1]: - if self.check_resolver_prefix(depth, path, kind, - current_node, current_index): - if len(path) > depth: - prefix_paths.append((path, kind)) - else: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - for path, kind in self.yaml_path_resolvers: - if not path: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - prefix_paths.append((path, kind)) - self.resolver_exact_paths.append(exact_paths) - self.resolver_prefix_paths.append(prefix_paths) - - def ascend_resolver(self): - if not self.yaml_path_resolvers: - return - self.resolver_exact_paths.pop() - self.resolver_prefix_paths.pop() - - def check_resolver_prefix(self, depth, path, kind, - current_node, current_index): - node_check, index_check = path[depth-1] - if isinstance(node_check, basestring): - if current_node.tag != node_check: - return - elif node_check is not None: - if not isinstance(current_node, node_check): - return - if index_check is True and current_index is not None: - return - if (index_check is False or index_check is None) \ - and current_index is None: - return - if isinstance(index_check, basestring): - if not (isinstance(current_index, ScalarNode) - and index_check == current_index.value): - return - elif isinstance(index_check, int) and not isinstance(index_check, bool): - if index_check != current_index: - return - return True - - def resolve(self, kind, value, implicit): - if kind is ScalarNode and implicit[0]: - if value == u'': - resolvers = self.yaml_implicit_resolvers.get(u'', []) - else: - resolvers = self.yaml_implicit_resolvers.get(value[0], []) - resolvers += self.yaml_implicit_resolvers.get(None, []) - for tag, regexp in resolvers: - if regexp.match(value): - return tag - implicit = implicit[1] - if self.yaml_path_resolvers: - exact_paths = self.resolver_exact_paths[-1] - if kind in exact_paths: - return exact_paths[kind] - if None in exact_paths: - return exact_paths[None] - if kind is ScalarNode: - return self.DEFAULT_SCALAR_TAG - elif kind is SequenceNode: - return self.DEFAULT_SEQUENCE_TAG - elif kind is MappingNode: - return self.DEFAULT_MAPPING_TAG - -class Resolver(BaseResolver): - pass - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:bool', - re.compile(ur'''^(?:yes|Yes|YES|no|No|NO - |true|True|TRUE|false|False|FALSE - |on|On|ON|off|Off|OFF)$''', re.X), - list(u'yYnNtTfFoO')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:float', - re.compile(ur'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? - |\.[0-9_]+(?:[eE][-+][0-9]+)? - |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* - |[-+]?\.(?:inf|Inf|INF) - |\.(?:nan|NaN|NAN))$''', re.X), - list(u'-+0123456789.')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:int', - re.compile(ur'''^(?:[-+]?0b[0-1_]+ - |[-+]?0[0-7_]+ - |[-+]?(?:0|[1-9][0-9_]*) - |[-+]?0x[0-9a-fA-F_]+ - |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), - list(u'-+0123456789')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:merge', - re.compile(ur'^(?:<<)$'), - [u'<']) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:null', - re.compile(ur'''^(?: ~ - |null|Null|NULL - | )$''', re.X), - [u'~', u'n', u'N', u'']) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:timestamp', - re.compile(ur'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] - |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? - (?:[Tt]|[ \t]+)[0-9][0-9]? - :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? - (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), - list(u'0123456789')) - -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:value', - re.compile(ur'^(?:=)$'), - [u'=']) - -# The following resolver is only for documentation purposes. It cannot work -# because plain scalars cannot start with '!', '&', or '*'. -Resolver.add_implicit_resolver( - u'tag:yaml.org,2002:yaml', - re.compile(ur'^(?:!|&|\*)$'), - list(u'!&*')) - diff --git a/libs/PyYAML-3.10/lib/yaml/scanner.py b/libs/PyYAML-3.10/lib/yaml/scanner.py deleted file mode 100644 index 5228fad..0000000 --- a/libs/PyYAML-3.10/lib/yaml/scanner.py +++ /dev/null @@ -1,1457 +0,0 @@ - -# Scanner produces tokens of the following types: -# STREAM-START -# STREAM-END -# DIRECTIVE(name, value) -# DOCUMENT-START -# DOCUMENT-END -# BLOCK-SEQUENCE-START -# BLOCK-MAPPING-START -# BLOCK-END -# FLOW-SEQUENCE-START -# FLOW-MAPPING-START -# FLOW-SEQUENCE-END -# FLOW-MAPPING-END -# BLOCK-ENTRY -# FLOW-ENTRY -# KEY -# VALUE -# ALIAS(value) -# ANCHOR(value) -# TAG(value) -# SCALAR(value, plain, style) -# -# Read comments in the Scanner code for more details. -# - -__all__ = ['Scanner', 'ScannerError'] - -from error import MarkedYAMLError -from tokens import * - -class ScannerError(MarkedYAMLError): - pass - -class SimpleKey(object): - # See below simple keys treatment. - - def __init__(self, token_number, required, index, line, column, mark): - self.token_number = token_number - self.required = required - self.index = index - self.line = line - self.column = column - self.mark = mark - -class Scanner(object): - - def __init__(self): - """Initialize the scanner.""" - # It is assumed that Scanner and Reader will have a common descendant. - # Reader do the dirty work of checking for BOM and converting the - # input data to Unicode. It also adds NUL to the end. - # - # Reader supports the following methods - # self.peek(i=0) # peek the next i-th character - # self.prefix(l=1) # peek the next l characters - # self.forward(l=1) # read the next l characters and move the pointer. - - # Had we reached the end of the stream? - self.done = False - - # The number of unclosed '{' and '['. `flow_level == 0` means block - # context. - self.flow_level = 0 - - # List of processed tokens that are not yet emitted. - self.tokens = [] - - # Add the STREAM-START token. - self.fetch_stream_start() - - # Number of tokens that were emitted through the `get_token` method. - self.tokens_taken = 0 - - # The current indentation level. - self.indent = -1 - - # Past indentation levels. - self.indents = [] - - # Variables related to simple keys treatment. - - # A simple key is a key that is not denoted by the '?' indicator. - # Example of simple keys: - # --- - # block simple key: value - # ? not a simple key: - # : { flow simple key: value } - # We emit the KEY token before all keys, so when we find a potential - # simple key, we try to locate the corresponding ':' indicator. - # Simple keys should be limited to a single line and 1024 characters. - - # Can a simple key start at the current position? A simple key may - # start: - # - at the beginning of the line, not counting indentation spaces - # (in block context), - # - after '{', '[', ',' (in the flow context), - # - after '?', ':', '-' (in the block context). - # In the block context, this flag also signifies if a block collection - # may start at the current position. - self.allow_simple_key = True - - # Keep track of possible simple keys. This is a dictionary. The key - # is `flow_level`; there can be no more that one possible simple key - # for each level. The value is a SimpleKey record: - # (token_number, required, index, line, column, mark) - # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), - # '[', or '{' tokens. - self.possible_simple_keys = {} - - # Public methods. - - def check_token(self, *choices): - # Check if the next token is one of the given types. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - # Return the next token, but do not delete if from the queue. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - return self.tokens[0] - - def get_token(self): - # Return the next token. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - self.tokens_taken += 1 - return self.tokens.pop(0) - - # Private methods. - - def need_more_tokens(self): - if self.done: - return False - if not self.tokens: - return True - # The current token may be a potential simple key, so we - # need to look further. - self.stale_possible_simple_keys() - if self.next_possible_simple_key() == self.tokens_taken: - return True - - def fetch_more_tokens(self): - - # Eat whitespaces and comments until we reach the next token. - self.scan_to_next_token() - - # Remove obsolete possible simple keys. - self.stale_possible_simple_keys() - - # Compare the current indentation and column. It may add some tokens - # and decrease the current indentation level. - self.unwind_indent(self.column) - - # Peek the next character. - ch = self.peek() - - # Is it the end of stream? - if ch == u'\0': - return self.fetch_stream_end() - - # Is it a directive? - if ch == u'%' and self.check_directive(): - return self.fetch_directive() - - # Is it the document start? - if ch == u'-' and self.check_document_start(): - return self.fetch_document_start() - - # Is it the document end? - if ch == u'.' and self.check_document_end(): - return self.fetch_document_end() - - # TODO: support for BOM within a stream. - #if ch == u'\uFEFF': - # return self.fetch_bom() <-- issue BOMToken - - # Note: the order of the following checks is NOT significant. - - # Is it the flow sequence start indicator? - if ch == u'[': - return self.fetch_flow_sequence_start() - - # Is it the flow mapping start indicator? - if ch == u'{': - return self.fetch_flow_mapping_start() - - # Is it the flow sequence end indicator? - if ch == u']': - return self.fetch_flow_sequence_end() - - # Is it the flow mapping end indicator? - if ch == u'}': - return self.fetch_flow_mapping_end() - - # Is it the flow entry indicator? - if ch == u',': - return self.fetch_flow_entry() - - # Is it the block entry indicator? - if ch == u'-' and self.check_block_entry(): - return self.fetch_block_entry() - - # Is it the key indicator? - if ch == u'?' and self.check_key(): - return self.fetch_key() - - # Is it the value indicator? - if ch == u':' and self.check_value(): - return self.fetch_value() - - # Is it an alias? - if ch == u'*': - return self.fetch_alias() - - # Is it an anchor? - if ch == u'&': - return self.fetch_anchor() - - # Is it a tag? - if ch == u'!': - return self.fetch_tag() - - # Is it a literal scalar? - if ch == u'|' and not self.flow_level: - return self.fetch_literal() - - # Is it a folded scalar? - if ch == u'>' and not self.flow_level: - return self.fetch_folded() - - # Is it a single quoted scalar? - if ch == u'\'': - return self.fetch_single() - - # Is it a double quoted scalar? - if ch == u'\"': - return self.fetch_double() - - # It must be a plain scalar then. - if self.check_plain(): - return self.fetch_plain() - - # No? It's an error. Let's produce a nice error message. - raise ScannerError("while scanning for the next token", None, - "found character %r that cannot start any token" - % ch.encode('utf-8'), self.get_mark()) - - # Simple keys treatment. - - def next_possible_simple_key(self): - # Return the number of the nearest possible simple key. Actually we - # don't need to loop through the whole dictionary. We may replace it - # with the following code: - # if not self.possible_simple_keys: - # return None - # return self.possible_simple_keys[ - # min(self.possible_simple_keys.keys())].token_number - min_token_number = None - for level in self.possible_simple_keys: - key = self.possible_simple_keys[level] - if min_token_number is None or key.token_number < min_token_number: - min_token_number = key.token_number - return min_token_number - - def stale_possible_simple_keys(self): - # Remove entries that are no longer possible simple keys. According to - # the YAML specification, simple keys - # - should be limited to a single line, - # - should be no longer than 1024 characters. - # Disabling this procedure will allow simple keys of any length and - # height (may cause problems if indentation is broken though). - for level in self.possible_simple_keys.keys(): - key = self.possible_simple_keys[level] - if key.line != self.line \ - or self.index-key.index > 1024: - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - del self.possible_simple_keys[level] - - def save_possible_simple_key(self): - # The next token may start a simple key. We check if it's possible - # and save its position. This function is called for - # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. - - # Check if a simple key is required at the current position. - required = not self.flow_level and self.indent == self.column - - # A simple key is required only if it is the first token in the current - # line. Therefore it is always allowed. - assert self.allow_simple_key or not required - - # The next token might be a simple key. Let's save it's number and - # position. - if self.allow_simple_key: - self.remove_possible_simple_key() - token_number = self.tokens_taken+len(self.tokens) - key = SimpleKey(token_number, required, - self.index, self.line, self.column, self.get_mark()) - self.possible_simple_keys[self.flow_level] = key - - def remove_possible_simple_key(self): - # Remove the saved possible key position at the current flow level. - if self.flow_level in self.possible_simple_keys: - key = self.possible_simple_keys[self.flow_level] - - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - - del self.possible_simple_keys[self.flow_level] - - # Indentation functions. - - def unwind_indent(self, column): - - ## In flow context, tokens should respect indentation. - ## Actually the condition should be `self.indent >= column` according to - ## the spec. But this condition will prohibit intuitively correct - ## constructions such as - ## key : { - ## } - #if self.flow_level and self.indent > column: - # raise ScannerError(None, None, - # "invalid intendation or unclosed '[' or '{'", - # self.get_mark()) - - # In the flow context, indentation is ignored. We make the scanner less - # restrictive then specification requires. - if self.flow_level: - return - - # In block context, we may need to issue the BLOCK-END tokens. - while self.indent > column: - mark = self.get_mark() - self.indent = self.indents.pop() - self.tokens.append(BlockEndToken(mark, mark)) - - def add_indent(self, column): - # Check if we need to increase indentation. - if self.indent < column: - self.indents.append(self.indent) - self.indent = column - return True - return False - - # Fetchers. - - def fetch_stream_start(self): - # We always add STREAM-START as the first token and STREAM-END as the - # last token. - - # Read the token. - mark = self.get_mark() - - # Add STREAM-START. - self.tokens.append(StreamStartToken(mark, mark, - encoding=self.encoding)) - - - def fetch_stream_end(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - self.possible_simple_keys = {} - - # Read the token. - mark = self.get_mark() - - # Add STREAM-END. - self.tokens.append(StreamEndToken(mark, mark)) - - # The steam is finished. - self.done = True - - def fetch_directive(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Scan and add DIRECTIVE. - self.tokens.append(self.scan_directive()) - - def fetch_document_start(self): - self.fetch_document_indicator(DocumentStartToken) - - def fetch_document_end(self): - self.fetch_document_indicator(DocumentEndToken) - - def fetch_document_indicator(self, TokenClass): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. Note that there could not be a block collection - # after '---'. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Add DOCUMENT-START or DOCUMENT-END. - start_mark = self.get_mark() - self.forward(3) - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_start(self): - self.fetch_flow_collection_start(FlowSequenceStartToken) - - def fetch_flow_mapping_start(self): - self.fetch_flow_collection_start(FlowMappingStartToken) - - def fetch_flow_collection_start(self, TokenClass): - - # '[' and '{' may start a simple key. - self.save_possible_simple_key() - - # Increase the flow level. - self.flow_level += 1 - - # Simple keys are allowed after '[' and '{'. - self.allow_simple_key = True - - # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_end(self): - self.fetch_flow_collection_end(FlowSequenceEndToken) - - def fetch_flow_mapping_end(self): - self.fetch_flow_collection_end(FlowMappingEndToken) - - def fetch_flow_collection_end(self, TokenClass): - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Decrease the flow level. - self.flow_level -= 1 - - # No simple keys after ']' or '}'. - self.allow_simple_key = False - - # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_entry(self): - - # Simple keys are allowed after ','. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add FLOW-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(FlowEntryToken(start_mark, end_mark)) - - def fetch_block_entry(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a new entry? - if not self.allow_simple_key: - raise ScannerError(None, None, - "sequence entries are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-SEQUENCE-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockSequenceStartToken(mark, mark)) - - # It's an error for the block entry to occur in the flow context, - # but we let the parser detect this. - else: - pass - - # Simple keys are allowed after '-'. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add BLOCK-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(BlockEntryToken(start_mark, end_mark)) - - def fetch_key(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a key (not nessesary a simple)? - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping keys are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-MAPPING-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after '?' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add KEY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(KeyToken(start_mark, end_mark)) - - def fetch_value(self): - - # Do we determine a simple key? - if self.flow_level in self.possible_simple_keys: - - # Add KEY. - key = self.possible_simple_keys[self.flow_level] - del self.possible_simple_keys[self.flow_level] - self.tokens.insert(key.token_number-self.tokens_taken, - KeyToken(key.mark, key.mark)) - - # If this key starts a new block mapping, we need to add - # BLOCK-MAPPING-START. - if not self.flow_level: - if self.add_indent(key.column): - self.tokens.insert(key.token_number-self.tokens_taken, - BlockMappingStartToken(key.mark, key.mark)) - - # There cannot be two simple keys one after another. - self.allow_simple_key = False - - # It must be a part of a complex key. - else: - - # Block context needs additional checks. - # (Do we really need them? They will be catched by the parser - # anyway.) - if not self.flow_level: - - # We are allowed to start a complex value if and only if - # we can start a simple key. - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping values are not allowed here", - self.get_mark()) - - # If this value starts a new block mapping, we need to add - # BLOCK-MAPPING-START. It will be detected as an error later by - # the parser. - if not self.flow_level: - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after ':' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add VALUE. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(ValueToken(start_mark, end_mark)) - - def fetch_alias(self): - - # ALIAS could be a simple key. - self.save_possible_simple_key() - - # No simple keys after ALIAS. - self.allow_simple_key = False - - # Scan and add ALIAS. - self.tokens.append(self.scan_anchor(AliasToken)) - - def fetch_anchor(self): - - # ANCHOR could start a simple key. - self.save_possible_simple_key() - - # No simple keys after ANCHOR. - self.allow_simple_key = False - - # Scan and add ANCHOR. - self.tokens.append(self.scan_anchor(AnchorToken)) - - def fetch_tag(self): - - # TAG could start a simple key. - self.save_possible_simple_key() - - # No simple keys after TAG. - self.allow_simple_key = False - - # Scan and add TAG. - self.tokens.append(self.scan_tag()) - - def fetch_literal(self): - self.fetch_block_scalar(style='|') - - def fetch_folded(self): - self.fetch_block_scalar(style='>') - - def fetch_block_scalar(self, style): - - # A simple key may follow a block scalar. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Scan and add SCALAR. - self.tokens.append(self.scan_block_scalar(style)) - - def fetch_single(self): - self.fetch_flow_scalar(style='\'') - - def fetch_double(self): - self.fetch_flow_scalar(style='"') - - def fetch_flow_scalar(self, style): - - # A flow scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after flow scalars. - self.allow_simple_key = False - - # Scan and add SCALAR. - self.tokens.append(self.scan_flow_scalar(style)) - - def fetch_plain(self): - - # A plain scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after plain scalars. But note that `scan_plain` will - # change this flag if the scan is finished at the beginning of the - # line. - self.allow_simple_key = False - - # Scan and add SCALAR. May change `allow_simple_key`. - self.tokens.append(self.scan_plain()) - - # Checkers. - - def check_directive(self): - - # DIRECTIVE: ^ '%' ... - # The '%' indicator is already checked. - if self.column == 0: - return True - - def check_document_start(self): - - # DOCUMENT-START: ^ '---' (' '|'\n') - if self.column == 0: - if self.prefix(3) == u'---' \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return True - - def check_document_end(self): - - # DOCUMENT-END: ^ '...' (' '|'\n') - if self.column == 0: - if self.prefix(3) == u'...' \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return True - - def check_block_entry(self): - - # BLOCK-ENTRY: '-' (' '|'\n') - return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' - - def check_key(self): - - # KEY(flow context): '?' - if self.flow_level: - return True - - # KEY(block context): '?' (' '|'\n') - else: - return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' - - def check_value(self): - - # VALUE(flow context): ':' - if self.flow_level: - return True - - # VALUE(block context): ':' (' '|'\n') - else: - return self.peek(1) in u'\0 \t\r\n\x85\u2028\u2029' - - def check_plain(self): - - # A plain scalar may start with any non-space character except: - # '-', '?', ':', ',', '[', ']', '{', '}', - # '#', '&', '*', '!', '|', '>', '\'', '\"', - # '%', '@', '`'. - # - # It may also start with - # '-', '?', ':' - # if it is followed by a non-space character. - # - # Note that we limit the last rule to the block context (except the - # '-' character) because we want the flow context to be space - # independent. - ch = self.peek() - return ch not in u'\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ - or (self.peek(1) not in u'\0 \t\r\n\x85\u2028\u2029' - and (ch == u'-' or (not self.flow_level and ch in u'?:'))) - - # Scanners. - - def scan_to_next_token(self): - # We ignore spaces, line breaks and comments. - # If we find a line break in the block context, we set the flag - # `allow_simple_key` on. - # The byte order mark is stripped if it's the first character in the - # stream. We do not yet support BOM inside the stream as the - # specification requires. Any such mark will be considered as a part - # of the document. - # - # TODO: We need to make tab handling rules more sane. A good rule is - # Tabs cannot precede tokens - # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, - # KEY(block), VALUE(block), BLOCK-ENTRY - # So the checking code is - # if : - # self.allow_simple_keys = False - # We also need to add the check for `allow_simple_keys == True` to - # `unwind_indent` before issuing BLOCK-END. - # Scanners for block, flow, and plain scalars need to be modified. - - if self.index == 0 and self.peek() == u'\uFEFF': - self.forward() - found = False - while not found: - while self.peek() == u' ': - self.forward() - if self.peek() == u'#': - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - if self.scan_line_break(): - if not self.flow_level: - self.allow_simple_key = True - else: - found = True - - def scan_directive(self): - # See the specification for details. - start_mark = self.get_mark() - self.forward() - name = self.scan_directive_name(start_mark) - value = None - if name == u'YAML': - value = self.scan_yaml_directive_value(start_mark) - end_mark = self.get_mark() - elif name == u'TAG': - value = self.scan_tag_directive_value(start_mark) - end_mark = self.get_mark() - else: - end_mark = self.get_mark() - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - self.scan_directive_ignored_line(start_mark) - return DirectiveToken(name, value, start_mark, end_mark) - - def scan_directive_name(self, start_mark): - # See the specification for details. - length = 0 - ch = self.peek(length) - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - return value - - def scan_yaml_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - major = self.scan_yaml_directive_number(start_mark) - if self.peek() != '.': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or '.', but found %r" - % self.peek().encode('utf-8'), - self.get_mark()) - self.forward() - minor = self.scan_yaml_directive_number(start_mark) - if self.peek() not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or ' ', but found %r" - % self.peek().encode('utf-8'), - self.get_mark()) - return (major, minor) - - def scan_yaml_directive_number(self, start_mark): - # See the specification for details. - ch = self.peek() - if not (u'0' <= ch <= u'9'): - raise ScannerError("while scanning a directive", start_mark, - "expected a digit, but found %r" % ch.encode('utf-8'), - self.get_mark()) - length = 0 - while u'0' <= self.peek(length) <= u'9': - length += 1 - value = int(self.prefix(length)) - self.forward(length) - return value - - def scan_tag_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - handle = self.scan_tag_directive_handle(start_mark) - while self.peek() == u' ': - self.forward() - prefix = self.scan_tag_directive_prefix(start_mark) - return (handle, prefix) - - def scan_tag_directive_handle(self, start_mark): - # See the specification for details. - value = self.scan_tag_handle('directive', start_mark) - ch = self.peek() - if ch != u' ': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch.encode('utf-8'), - self.get_mark()) - return value - - def scan_tag_directive_prefix(self, start_mark): - # See the specification for details. - value = self.scan_tag_uri('directive', start_mark) - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch.encode('utf-8'), - self.get_mark()) - return value - - def scan_directive_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - if self.peek() == u'#': - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in u'\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a comment or a line break, but found %r" - % ch.encode('utf-8'), self.get_mark()) - self.scan_line_break() - - def scan_anchor(self, TokenClass): - # The specification does not restrict characters for anchors and - # aliases. This may lead to problems, for instance, the document: - # [ *alias, value ] - # can be interpteted in two ways, as - # [ "value" ] - # and - # [ *alias , "value" ] - # Therefore we restrict aliases to numbers and ASCII letters. - start_mark = self.get_mark() - indicator = self.peek() - if indicator == u'*': - name = 'alias' - else: - name = 'anchor' - self.forward() - length = 0 - ch = self.peek(length) - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in u'\0 \t\r\n\x85\u2028\u2029?:,]}%@`': - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch.encode('utf-8'), self.get_mark()) - end_mark = self.get_mark() - return TokenClass(value, start_mark, end_mark) - - def scan_tag(self): - # See the specification for details. - start_mark = self.get_mark() - ch = self.peek(1) - if ch == u'<': - handle = None - self.forward(2) - suffix = self.scan_tag_uri('tag', start_mark) - if self.peek() != u'>': - raise ScannerError("while parsing a tag", start_mark, - "expected '>', but found %r" % self.peek().encode('utf-8'), - self.get_mark()) - self.forward() - elif ch in u'\0 \t\r\n\x85\u2028\u2029': - handle = None - suffix = u'!' - self.forward() - else: - length = 1 - use_handle = False - while ch not in u'\0 \r\n\x85\u2028\u2029': - if ch == u'!': - use_handle = True - break - length += 1 - ch = self.peek(length) - handle = u'!' - if use_handle: - handle = self.scan_tag_handle('tag', start_mark) - else: - handle = u'!' - self.forward() - suffix = self.scan_tag_uri('tag', start_mark) - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a tag", start_mark, - "expected ' ', but found %r" % ch.encode('utf-8'), - self.get_mark()) - value = (handle, suffix) - end_mark = self.get_mark() - return TagToken(value, start_mark, end_mark) - - def scan_block_scalar(self, style): - # See the specification for details. - - if style == '>': - folded = True - else: - folded = False - - chunks = [] - start_mark = self.get_mark() - - # Scan the header. - self.forward() - chomping, increment = self.scan_block_scalar_indicators(start_mark) - self.scan_block_scalar_ignored_line(start_mark) - - # Determine the indentation level and go to the first non-empty line. - min_indent = self.indent+1 - if min_indent < 1: - min_indent = 1 - if increment is None: - breaks, max_indent, end_mark = self.scan_block_scalar_indentation() - indent = max(min_indent, max_indent) - else: - indent = min_indent+increment-1 - breaks, end_mark = self.scan_block_scalar_breaks(indent) - line_break = u'' - - # Scan the inner part of the block scalar. - while self.column == indent and self.peek() != u'\0': - chunks.extend(breaks) - leading_non_space = self.peek() not in u' \t' - length = 0 - while self.peek(length) not in u'\0\r\n\x85\u2028\u2029': - length += 1 - chunks.append(self.prefix(length)) - self.forward(length) - line_break = self.scan_line_break() - breaks, end_mark = self.scan_block_scalar_breaks(indent) - if self.column == indent and self.peek() != u'\0': - - # Unfortunately, folding rules are ambiguous. - # - # This is the folding according to the specification: - - if folded and line_break == u'\n' \ - and leading_non_space and self.peek() not in u' \t': - if not breaks: - chunks.append(u' ') - else: - chunks.append(line_break) - - # This is Clark Evans's interpretation (also in the spec - # examples): - # - #if folded and line_break == u'\n': - # if not breaks: - # if self.peek() not in ' \t': - # chunks.append(u' ') - # else: - # chunks.append(line_break) - #else: - # chunks.append(line_break) - else: - break - - # Chomp the tail. - if chomping is not False: - chunks.append(line_break) - if chomping is True: - chunks.extend(breaks) - - # We are done. - return ScalarToken(u''.join(chunks), False, start_mark, end_mark, - style) - - def scan_block_scalar_indicators(self, start_mark): - # See the specification for details. - chomping = None - increment = None - ch = self.peek() - if ch in u'+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch in u'0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - elif ch in u'0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - ch = self.peek() - if ch in u'+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch not in u'\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected chomping or indentation indicators, but found %r" - % ch.encode('utf-8'), self.get_mark()) - return chomping, increment - - def scan_block_scalar_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == u' ': - self.forward() - if self.peek() == u'#': - while self.peek() not in u'\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in u'\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected a comment or a line break, but found %r" - % ch.encode('utf-8'), self.get_mark()) - self.scan_line_break() - - def scan_block_scalar_indentation(self): - # See the specification for details. - chunks = [] - max_indent = 0 - end_mark = self.get_mark() - while self.peek() in u' \r\n\x85\u2028\u2029': - if self.peek() != u' ': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - else: - self.forward() - if self.column > max_indent: - max_indent = self.column - return chunks, max_indent, end_mark - - def scan_block_scalar_breaks(self, indent): - # See the specification for details. - chunks = [] - end_mark = self.get_mark() - while self.column < indent and self.peek() == u' ': - self.forward() - while self.peek() in u'\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - while self.column < indent and self.peek() == u' ': - self.forward() - return chunks, end_mark - - def scan_flow_scalar(self, style): - # See the specification for details. - # Note that we loose indentation rules for quoted scalars. Quoted - # scalars don't need to adhere indentation because " and ' clearly - # mark the beginning and the end of them. Therefore we are less - # restrictive then the specification requires. We only need to check - # that document separators are not included in scalars. - if style == '"': - double = True - else: - double = False - chunks = [] - start_mark = self.get_mark() - quote = self.peek() - self.forward() - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - while self.peek() != quote: - chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - self.forward() - end_mark = self.get_mark() - return ScalarToken(u''.join(chunks), False, start_mark, end_mark, - style) - - ESCAPE_REPLACEMENTS = { - u'0': u'\0', - u'a': u'\x07', - u'b': u'\x08', - u't': u'\x09', - u'\t': u'\x09', - u'n': u'\x0A', - u'v': u'\x0B', - u'f': u'\x0C', - u'r': u'\x0D', - u'e': u'\x1B', - u' ': u'\x20', - u'\"': u'\"', - u'\\': u'\\', - u'N': u'\x85', - u'_': u'\xA0', - u'L': u'\u2028', - u'P': u'\u2029', - } - - ESCAPE_CODES = { - u'x': 2, - u'u': 4, - u'U': 8, - } - - def scan_flow_scalar_non_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - length = 0 - while self.peek(length) not in u'\'\"\\\0 \t\r\n\x85\u2028\u2029': - length += 1 - if length: - chunks.append(self.prefix(length)) - self.forward(length) - ch = self.peek() - if not double and ch == u'\'' and self.peek(1) == u'\'': - chunks.append(u'\'') - self.forward(2) - elif (double and ch == u'\'') or (not double and ch in u'\"\\'): - chunks.append(ch) - self.forward() - elif double and ch == u'\\': - self.forward() - ch = self.peek() - if ch in self.ESCAPE_REPLACEMENTS: - chunks.append(self.ESCAPE_REPLACEMENTS[ch]) - self.forward() - elif ch in self.ESCAPE_CODES: - length = self.ESCAPE_CODES[ch] - self.forward() - for k in range(length): - if self.peek(k) not in u'0123456789ABCDEFabcdef': - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "expected escape sequence of %d hexdecimal numbers, but found %r" % - (length, self.peek(k).encode('utf-8')), self.get_mark()) - code = int(self.prefix(length), 16) - chunks.append(unichr(code)) - self.forward(length) - elif ch in u'\r\n\x85\u2028\u2029': - self.scan_line_break() - chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) - else: - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "found unknown escape character %r" % ch.encode('utf-8'), self.get_mark()) - else: - return chunks - - def scan_flow_scalar_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - length = 0 - while self.peek(length) in u' \t': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch == u'\0': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected end of stream", self.get_mark()) - elif ch in u'\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - breaks = self.scan_flow_scalar_breaks(double, start_mark) - if line_break != u'\n': - chunks.append(line_break) - elif not breaks: - chunks.append(u' ') - chunks.extend(breaks) - else: - chunks.append(whitespaces) - return chunks - - def scan_flow_scalar_breaks(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - # Instead of checking indentation, we check for document - # separators. - prefix = self.prefix(3) - if (prefix == u'---' or prefix == u'...') \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected document separator", self.get_mark()) - while self.peek() in u' \t': - self.forward() - if self.peek() in u'\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - else: - return chunks - - def scan_plain(self): - # See the specification for details. - # We add an additional restriction for the flow context: - # plain scalars in the flow context cannot contain ',', ':' and '?'. - # We also keep track of the `allow_simple_key` flag here. - # Indentation rules are loosed for the flow context. - chunks = [] - start_mark = self.get_mark() - end_mark = start_mark - indent = self.indent+1 - # We allow zero indentation for scalars, but then we need to check for - # document separators at the beginning of the line. - #if indent == 0: - # indent = 1 - spaces = [] - while True: - length = 0 - if self.peek() == u'#': - break - while True: - ch = self.peek(length) - if ch in u'\0 \t\r\n\x85\u2028\u2029' \ - or (not self.flow_level and ch == u':' and - self.peek(length+1) in u'\0 \t\r\n\x85\u2028\u2029') \ - or (self.flow_level and ch in u',:?[]{}'): - break - length += 1 - # It's not clear what we should do with ':' in the flow context. - if (self.flow_level and ch == u':' - and self.peek(length+1) not in u'\0 \t\r\n\x85\u2028\u2029,[]{}'): - self.forward(length) - raise ScannerError("while scanning a plain scalar", start_mark, - "found unexpected ':'", self.get_mark(), - "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.") - if length == 0: - break - self.allow_simple_key = False - chunks.extend(spaces) - chunks.append(self.prefix(length)) - self.forward(length) - end_mark = self.get_mark() - spaces = self.scan_plain_spaces(indent, start_mark) - if not spaces or self.peek() == u'#' \ - or (not self.flow_level and self.column < indent): - break - return ScalarToken(u''.join(chunks), True, start_mark, end_mark) - - def scan_plain_spaces(self, indent, start_mark): - # See the specification for details. - # The specification is really confusing about tabs in plain scalars. - # We just forbid them completely. Do not use tabs in YAML! - chunks = [] - length = 0 - while self.peek(length) in u' ': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch in u'\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - self.allow_simple_key = True - prefix = self.prefix(3) - if (prefix == u'---' or prefix == u'...') \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return - breaks = [] - while self.peek() in u' \r\n\x85\u2028\u2029': - if self.peek() == ' ': - self.forward() - else: - breaks.append(self.scan_line_break()) - prefix = self.prefix(3) - if (prefix == u'---' or prefix == u'...') \ - and self.peek(3) in u'\0 \t\r\n\x85\u2028\u2029': - return - if line_break != u'\n': - chunks.append(line_break) - elif not breaks: - chunks.append(u' ') - chunks.extend(breaks) - elif whitespaces: - chunks.append(whitespaces) - return chunks - - def scan_tag_handle(self, name, start_mark): - # See the specification for details. - # For some strange reasons, the specification does not allow '_' in - # tag handles. I have allowed it anyway. - ch = self.peek() - if ch != u'!': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch.encode('utf-8'), - self.get_mark()) - length = 1 - ch = self.peek(length) - if ch != u' ': - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-_': - length += 1 - ch = self.peek(length) - if ch != u'!': - self.forward(length) - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch.encode('utf-8'), - self.get_mark()) - length += 1 - value = self.prefix(length) - self.forward(length) - return value - - def scan_tag_uri(self, name, start_mark): - # See the specification for details. - # Note: we do not check if URI is well-formed. - chunks = [] - length = 0 - ch = self.peek(length) - while u'0' <= ch <= u'9' or u'A' <= ch <= u'Z' or u'a' <= ch <= u'z' \ - or ch in u'-;/?:@&=+$,_.!~*\'()[]%': - if ch == u'%': - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - chunks.append(self.scan_uri_escapes(name, start_mark)) - else: - length += 1 - ch = self.peek(length) - if length: - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - if not chunks: - raise ScannerError("while parsing a %s" % name, start_mark, - "expected URI, but found %r" % ch.encode('utf-8'), - self.get_mark()) - return u''.join(chunks) - - def scan_uri_escapes(self, name, start_mark): - # See the specification for details. - bytes = [] - mark = self.get_mark() - while self.peek() == u'%': - self.forward() - for k in range(2): - if self.peek(k) not in u'0123456789ABCDEFabcdef': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected URI escape sequence of 2 hexdecimal numbers, but found %r" % - (self.peek(k).encode('utf-8')), self.get_mark()) - bytes.append(chr(int(self.prefix(2), 16))) - self.forward(2) - try: - value = unicode(''.join(bytes), 'utf-8') - except UnicodeDecodeError, exc: - raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) - return value - - def scan_line_break(self): - # Transforms: - # '\r\n' : '\n' - # '\r' : '\n' - # '\n' : '\n' - # '\x85' : '\n' - # '\u2028' : '\u2028' - # '\u2029 : '\u2029' - # default : '' - ch = self.peek() - if ch in u'\r\n\x85': - if self.prefix(2) == u'\r\n': - self.forward(2) - else: - self.forward() - return u'\n' - elif ch in u'\u2028\u2029': - self.forward() - return ch - return u'' - -#try: -# import psyco -# psyco.bind(Scanner) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/lib/yaml/serializer.py b/libs/PyYAML-3.10/lib/yaml/serializer.py deleted file mode 100644 index 0bf1e96..0000000 --- a/libs/PyYAML-3.10/lib/yaml/serializer.py +++ /dev/null @@ -1,111 +0,0 @@ - -__all__ = ['Serializer', 'SerializerError'] - -from error import YAMLError -from events import * -from nodes import * - -class SerializerError(YAMLError): - pass - -class Serializer(object): - - ANCHOR_TEMPLATE = u'id%03d' - - def __init__(self, encoding=None, - explicit_start=None, explicit_end=None, version=None, tags=None): - self.use_encoding = encoding - self.use_explicit_start = explicit_start - self.use_explicit_end = explicit_end - self.use_version = version - self.use_tags = tags - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - self.closed = None - - def open(self): - if self.closed is None: - self.emit(StreamStartEvent(encoding=self.use_encoding)) - self.closed = False - elif self.closed: - raise SerializerError("serializer is closed") - else: - raise SerializerError("serializer is already opened") - - def close(self): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif not self.closed: - self.emit(StreamEndEvent()) - self.closed = True - - #def __del__(self): - # self.close() - - def serialize(self, node): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif self.closed: - raise SerializerError("serializer is closed") - self.emit(DocumentStartEvent(explicit=self.use_explicit_start, - version=self.use_version, tags=self.use_tags)) - self.anchor_node(node) - self.serialize_node(node, None, None) - self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - - def anchor_node(self, node): - if node in self.anchors: - if self.anchors[node] is None: - self.anchors[node] = self.generate_anchor(node) - else: - self.anchors[node] = None - if isinstance(node, SequenceNode): - for item in node.value: - self.anchor_node(item) - elif isinstance(node, MappingNode): - for key, value in node.value: - self.anchor_node(key) - self.anchor_node(value) - - def generate_anchor(self, node): - self.last_anchor_id += 1 - return self.ANCHOR_TEMPLATE % self.last_anchor_id - - def serialize_node(self, node, parent, index): - alias = self.anchors[node] - if node in self.serialized_nodes: - self.emit(AliasEvent(alias)) - else: - self.serialized_nodes[node] = True - self.descend_resolver(parent, index) - if isinstance(node, ScalarNode): - detected_tag = self.resolve(ScalarNode, node.value, (True, False)) - default_tag = self.resolve(ScalarNode, node.value, (False, True)) - implicit = (node.tag == detected_tag), (node.tag == default_tag) - self.emit(ScalarEvent(alias, node.tag, implicit, node.value, - style=node.style)) - elif isinstance(node, SequenceNode): - implicit = (node.tag - == self.resolve(SequenceNode, node.value, True)) - self.emit(SequenceStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - index = 0 - for item in node.value: - self.serialize_node(item, node, index) - index += 1 - self.emit(SequenceEndEvent()) - elif isinstance(node, MappingNode): - implicit = (node.tag - == self.resolve(MappingNode, node.value, True)) - self.emit(MappingStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - for key, value in node.value: - self.serialize_node(key, node, None) - self.serialize_node(value, node, key) - self.emit(MappingEndEvent()) - self.ascend_resolver() - diff --git a/libs/PyYAML-3.10/lib/yaml/tokens.py b/libs/PyYAML-3.10/lib/yaml/tokens.py deleted file mode 100644 index 4d0b48a..0000000 --- a/libs/PyYAML-3.10/lib/yaml/tokens.py +++ /dev/null @@ -1,104 +0,0 @@ - -class Token(object): - def __init__(self, start_mark, end_mark): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in self.__dict__ - if not key.endswith('_mark')] - attributes.sort() - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -#class BOMToken(Token): -# id = '' - -class DirectiveToken(Token): - id = '' - def __init__(self, name, value, start_mark, end_mark): - self.name = name - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class DocumentStartToken(Token): - id = '' - -class DocumentEndToken(Token): - id = '' - -class StreamStartToken(Token): - id = '' - def __init__(self, start_mark=None, end_mark=None, - encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndToken(Token): - id = '' - -class BlockSequenceStartToken(Token): - id = '' - -class BlockMappingStartToken(Token): - id = '' - -class BlockEndToken(Token): - id = '' - -class FlowSequenceStartToken(Token): - id = '[' - -class FlowMappingStartToken(Token): - id = '{' - -class FlowSequenceEndToken(Token): - id = ']' - -class FlowMappingEndToken(Token): - id = '}' - -class KeyToken(Token): - id = '?' - -class ValueToken(Token): - id = ':' - -class BlockEntryToken(Token): - id = '-' - -class FlowEntryToken(Token): - id = ',' - -class AliasToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class AnchorToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class TagToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class ScalarToken(Token): - id = '' - def __init__(self, value, plain, start_mark, end_mark, style=None): - self.value = value - self.plain = plain - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - diff --git a/libs/PyYAML-3.10/lib3/yaml/__init__.py b/libs/PyYAML-3.10/lib3/yaml/__init__.py deleted file mode 100644 index 0033d9c..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/__init__.py +++ /dev/null @@ -1,312 +0,0 @@ - -from .error import * - -from .tokens import * -from .events import * -from .nodes import * - -from .loader import * -from .dumper import * - -__version__ = '3.10' -try: - from .cyaml import * - __with_libyaml__ = True -except ImportError: - __with_libyaml__ = False - -import io - -def scan(stream, Loader=Loader): - """ - Scan a YAML stream and produce scanning tokens. - """ - loader = Loader(stream) - try: - while loader.check_token(): - yield loader.get_token() - finally: - loader.dispose() - -def parse(stream, Loader=Loader): - """ - Parse a YAML stream and produce parsing events. - """ - loader = Loader(stream) - try: - while loader.check_event(): - yield loader.get_event() - finally: - loader.dispose() - -def compose(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding representation tree. - """ - loader = Loader(stream) - try: - return loader.get_single_node() - finally: - loader.dispose() - -def compose_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding representation trees. - """ - loader = Loader(stream) - try: - while loader.check_node(): - yield loader.get_node() - finally: - loader.dispose() - -def load(stream, Loader=Loader): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - """ - loader = Loader(stream) - try: - return loader.get_single_data() - finally: - loader.dispose() - -def load_all(stream, Loader=Loader): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - """ - loader = Loader(stream) - try: - while loader.check_data(): - yield loader.get_data() - finally: - loader.dispose() - -def safe_load(stream): - """ - Parse the first YAML document in a stream - and produce the corresponding Python object. - Resolve only basic YAML tags. - """ - return load(stream, SafeLoader) - -def safe_load_all(stream): - """ - Parse all YAML documents in a stream - and produce corresponding Python objects. - Resolve only basic YAML tags. - """ - return load_all(stream, SafeLoader) - -def emit(events, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - """ - Emit YAML parsing events into a stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - stream = io.StringIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - try: - for event in events: - dumper.emit(event) - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize_all(nodes, stream=None, Dumper=Dumper, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of representation trees into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - getvalue = stream.getvalue - dumper = Dumper(stream, canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for node in nodes: - dumper.serialize(node) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def serialize(node, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a representation tree into a YAML stream. - If stream is None, return the produced string instead. - """ - return serialize_all([node], stream, Dumper=Dumper, **kwds) - -def dump_all(documents, stream=None, Dumper=Dumper, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - """ - Serialize a sequence of Python objects into a YAML stream. - If stream is None, return the produced string instead. - """ - getvalue = None - if stream is None: - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - getvalue = stream.getvalue - dumper = Dumper(stream, default_style=default_style, - default_flow_style=default_flow_style, - canonical=canonical, indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break, - encoding=encoding, version=version, tags=tags, - explicit_start=explicit_start, explicit_end=explicit_end) - try: - dumper.open() - for data in documents: - dumper.represent(data) - dumper.close() - finally: - dumper.dispose() - if getvalue: - return getvalue() - -def dump(data, stream=None, Dumper=Dumper, **kwds): - """ - Serialize a Python object into a YAML stream. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=Dumper, **kwds) - -def safe_dump_all(documents, stream=None, **kwds): - """ - Serialize a sequence of Python objects into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all(documents, stream, Dumper=SafeDumper, **kwds) - -def safe_dump(data, stream=None, **kwds): - """ - Serialize a Python object into a YAML stream. - Produce only basic YAML tags. - If stream is None, return the produced string instead. - """ - return dump_all([data], stream, Dumper=SafeDumper, **kwds) - -def add_implicit_resolver(tag, regexp, first=None, - Loader=Loader, Dumper=Dumper): - """ - Add an implicit scalar detector. - If an implicit scalar value matches the given regexp, - the corresponding tag is assigned to the scalar. - first is a sequence of possible initial characters or None. - """ - Loader.add_implicit_resolver(tag, regexp, first) - Dumper.add_implicit_resolver(tag, regexp, first) - -def add_path_resolver(tag, path, kind=None, Loader=Loader, Dumper=Dumper): - """ - Add a path based resolver for the given tag. - A path is a list of keys that forms a path - to a node in the representation tree. - Keys can be string values, integers, or None. - """ - Loader.add_path_resolver(tag, path, kind) - Dumper.add_path_resolver(tag, path, kind) - -def add_constructor(tag, constructor, Loader=Loader): - """ - Add a constructor for the given tag. - Constructor is a function that accepts a Loader instance - and a node object and produces the corresponding Python object. - """ - Loader.add_constructor(tag, constructor) - -def add_multi_constructor(tag_prefix, multi_constructor, Loader=Loader): - """ - Add a multi-constructor for the given tag prefix. - Multi-constructor is called for a node if its tag starts with tag_prefix. - Multi-constructor accepts a Loader instance, a tag suffix, - and a node object and produces the corresponding Python object. - """ - Loader.add_multi_constructor(tag_prefix, multi_constructor) - -def add_representer(data_type, representer, Dumper=Dumper): - """ - Add a representer for the given type. - Representer is a function accepting a Dumper instance - and an instance of the given data type - and producing the corresponding representation node. - """ - Dumper.add_representer(data_type, representer) - -def add_multi_representer(data_type, multi_representer, Dumper=Dumper): - """ - Add a representer for the given type. - Multi-representer is a function accepting a Dumper instance - and an instance of the given data type or subtype - and producing the corresponding representation node. - """ - Dumper.add_multi_representer(data_type, multi_representer) - -class YAMLObjectMetaclass(type): - """ - The metaclass for YAMLObject. - """ - def __init__(cls, name, bases, kwds): - super(YAMLObjectMetaclass, cls).__init__(name, bases, kwds) - if 'yaml_tag' in kwds and kwds['yaml_tag'] is not None: - cls.yaml_loader.add_constructor(cls.yaml_tag, cls.from_yaml) - cls.yaml_dumper.add_representer(cls, cls.to_yaml) - -class YAMLObject(metaclass=YAMLObjectMetaclass): - """ - An object that can dump itself to a YAML stream - and load itself from a YAML stream. - """ - - __slots__ = () # no direct instantiation, so allow immutable subclasses - - yaml_loader = Loader - yaml_dumper = Dumper - - yaml_tag = None - yaml_flow_style = None - - @classmethod - def from_yaml(cls, loader, node): - """ - Convert a representation node to a Python object. - """ - return loader.construct_yaml_object(node, cls) - - @classmethod - def to_yaml(cls, dumper, data): - """ - Convert a Python object to a representation node. - """ - return dumper.represent_yaml_object(cls.yaml_tag, data, cls, - flow_style=cls.yaml_flow_style) - diff --git a/libs/PyYAML-3.10/lib3/yaml/composer.py b/libs/PyYAML-3.10/lib3/yaml/composer.py deleted file mode 100644 index d5c6a7a..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/composer.py +++ /dev/null @@ -1,139 +0,0 @@ - -__all__ = ['Composer', 'ComposerError'] - -from .error import MarkedYAMLError -from .events import * -from .nodes import * - -class ComposerError(MarkedYAMLError): - pass - -class Composer: - - def __init__(self): - self.anchors = {} - - def check_node(self): - # Drop the STREAM-START event. - if self.check_event(StreamStartEvent): - self.get_event() - - # If there are more documents available? - return not self.check_event(StreamEndEvent) - - def get_node(self): - # Get the root node of the next document. - if not self.check_event(StreamEndEvent): - return self.compose_document() - - def get_single_node(self): - # Drop the STREAM-START event. - self.get_event() - - # Compose a document if the stream is not empty. - document = None - if not self.check_event(StreamEndEvent): - document = self.compose_document() - - # Ensure that the stream contains no more documents. - if not self.check_event(StreamEndEvent): - event = self.get_event() - raise ComposerError("expected a single document in the stream", - document.start_mark, "but found another document", - event.start_mark) - - # Drop the STREAM-END event. - self.get_event() - - return document - - def compose_document(self): - # Drop the DOCUMENT-START event. - self.get_event() - - # Compose the root node. - node = self.compose_node(None, None) - - # Drop the DOCUMENT-END event. - self.get_event() - - self.anchors = {} - return node - - def compose_node(self, parent, index): - if self.check_event(AliasEvent): - event = self.get_event() - anchor = event.anchor - if anchor not in self.anchors: - raise ComposerError(None, None, "found undefined alias %r" - % anchor, event.start_mark) - return self.anchors[anchor] - event = self.peek_event() - anchor = event.anchor - if anchor is not None: - if anchor in self.anchors: - raise ComposerError("found duplicate anchor %r; first occurence" - % anchor, self.anchors[anchor].start_mark, - "second occurence", event.start_mark) - self.descend_resolver(parent, index) - if self.check_event(ScalarEvent): - node = self.compose_scalar_node(anchor) - elif self.check_event(SequenceStartEvent): - node = self.compose_sequence_node(anchor) - elif self.check_event(MappingStartEvent): - node = self.compose_mapping_node(anchor) - self.ascend_resolver() - return node - - def compose_scalar_node(self, anchor): - event = self.get_event() - tag = event.tag - if tag is None or tag == '!': - tag = self.resolve(ScalarNode, event.value, event.implicit) - node = ScalarNode(tag, event.value, - event.start_mark, event.end_mark, style=event.style) - if anchor is not None: - self.anchors[anchor] = node - return node - - def compose_sequence_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == '!': - tag = self.resolve(SequenceNode, None, start_event.implicit) - node = SequenceNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - index = 0 - while not self.check_event(SequenceEndEvent): - node.value.append(self.compose_node(node, index)) - index += 1 - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - - def compose_mapping_node(self, anchor): - start_event = self.get_event() - tag = start_event.tag - if tag is None or tag == '!': - tag = self.resolve(MappingNode, None, start_event.implicit) - node = MappingNode(tag, [], - start_event.start_mark, None, - flow_style=start_event.flow_style) - if anchor is not None: - self.anchors[anchor] = node - while not self.check_event(MappingEndEvent): - #key_event = self.peek_event() - item_key = self.compose_node(node, None) - #if item_key in node.value: - # raise ComposerError("while composing a mapping", start_event.start_mark, - # "found duplicate key", key_event.start_mark) - item_value = self.compose_node(node, item_key) - #node.value[item_key] = item_value - node.value.append((item_key, item_value)) - end_event = self.get_event() - node.end_mark = end_event.end_mark - return node - diff --git a/libs/PyYAML-3.10/lib3/yaml/constructor.py b/libs/PyYAML-3.10/lib3/yaml/constructor.py deleted file mode 100644 index 981543a..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/constructor.py +++ /dev/null @@ -1,686 +0,0 @@ - -__all__ = ['BaseConstructor', 'SafeConstructor', 'Constructor', - 'ConstructorError'] - -from .error import * -from .nodes import * - -import collections, datetime, base64, binascii, re, sys, types - -class ConstructorError(MarkedYAMLError): - pass - -class BaseConstructor: - - yaml_constructors = {} - yaml_multi_constructors = {} - - def __init__(self): - self.constructed_objects = {} - self.recursive_objects = {} - self.state_generators = [] - self.deep_construct = False - - def check_data(self): - # If there are more documents available? - return self.check_node() - - def get_data(self): - # Construct and return the next document. - if self.check_node(): - return self.construct_document(self.get_node()) - - def get_single_data(self): - # Ensure that the stream contains a single document and construct it. - node = self.get_single_node() - if node is not None: - return self.construct_document(node) - return None - - def construct_document(self, node): - data = self.construct_object(node) - while self.state_generators: - state_generators = self.state_generators - self.state_generators = [] - for generator in state_generators: - for dummy in generator: - pass - self.constructed_objects = {} - self.recursive_objects = {} - self.deep_construct = False - return data - - def construct_object(self, node, deep=False): - if node in self.constructed_objects: - return self.constructed_objects[node] - if deep: - old_deep = self.deep_construct - self.deep_construct = True - if node in self.recursive_objects: - raise ConstructorError(None, None, - "found unconstructable recursive node", node.start_mark) - self.recursive_objects[node] = None - constructor = None - tag_suffix = None - if node.tag in self.yaml_constructors: - constructor = self.yaml_constructors[node.tag] - else: - for tag_prefix in self.yaml_multi_constructors: - if node.tag.startswith(tag_prefix): - tag_suffix = node.tag[len(tag_prefix):] - constructor = self.yaml_multi_constructors[tag_prefix] - break - else: - if None in self.yaml_multi_constructors: - tag_suffix = node.tag - constructor = self.yaml_multi_constructors[None] - elif None in self.yaml_constructors: - constructor = self.yaml_constructors[None] - elif isinstance(node, ScalarNode): - constructor = self.__class__.construct_scalar - elif isinstance(node, SequenceNode): - constructor = self.__class__.construct_sequence - elif isinstance(node, MappingNode): - constructor = self.__class__.construct_mapping - if tag_suffix is None: - data = constructor(self, node) - else: - data = constructor(self, tag_suffix, node) - if isinstance(data, types.GeneratorType): - generator = data - data = next(generator) - if self.deep_construct: - for dummy in generator: - pass - else: - self.state_generators.append(generator) - self.constructed_objects[node] = data - del self.recursive_objects[node] - if deep: - self.deep_construct = old_deep - return data - - def construct_scalar(self, node): - if not isinstance(node, ScalarNode): - raise ConstructorError(None, None, - "expected a scalar node, but found %s" % node.id, - node.start_mark) - return node.value - - def construct_sequence(self, node, deep=False): - if not isinstance(node, SequenceNode): - raise ConstructorError(None, None, - "expected a sequence node, but found %s" % node.id, - node.start_mark) - return [self.construct_object(child, deep=deep) - for child in node.value] - - def construct_mapping(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - mapping = {} - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - if not isinstance(key, collections.Hashable): - raise ConstructorError("while constructing a mapping", node.start_mark, - "found unhashable key", key_node.start_mark) - value = self.construct_object(value_node, deep=deep) - mapping[key] = value - return mapping - - def construct_pairs(self, node, deep=False): - if not isinstance(node, MappingNode): - raise ConstructorError(None, None, - "expected a mapping node, but found %s" % node.id, - node.start_mark) - pairs = [] - for key_node, value_node in node.value: - key = self.construct_object(key_node, deep=deep) - value = self.construct_object(value_node, deep=deep) - pairs.append((key, value)) - return pairs - - @classmethod - def add_constructor(cls, tag, constructor): - if not 'yaml_constructors' in cls.__dict__: - cls.yaml_constructors = cls.yaml_constructors.copy() - cls.yaml_constructors[tag] = constructor - - @classmethod - def add_multi_constructor(cls, tag_prefix, multi_constructor): - if not 'yaml_multi_constructors' in cls.__dict__: - cls.yaml_multi_constructors = cls.yaml_multi_constructors.copy() - cls.yaml_multi_constructors[tag_prefix] = multi_constructor - -class SafeConstructor(BaseConstructor): - - def construct_scalar(self, node): - if isinstance(node, MappingNode): - for key_node, value_node in node.value: - if key_node.tag == 'tag:yaml.org,2002:value': - return self.construct_scalar(value_node) - return super().construct_scalar(node) - - def flatten_mapping(self, node): - merge = [] - index = 0 - while index < len(node.value): - key_node, value_node = node.value[index] - if key_node.tag == 'tag:yaml.org,2002:merge': - del node.value[index] - if isinstance(value_node, MappingNode): - self.flatten_mapping(value_node) - merge.extend(value_node.value) - elif isinstance(value_node, SequenceNode): - submerge = [] - for subnode in value_node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing a mapping", - node.start_mark, - "expected a mapping for merging, but found %s" - % subnode.id, subnode.start_mark) - self.flatten_mapping(subnode) - submerge.append(subnode.value) - submerge.reverse() - for value in submerge: - merge.extend(value) - else: - raise ConstructorError("while constructing a mapping", node.start_mark, - "expected a mapping or list of mappings for merging, but found %s" - % value_node.id, value_node.start_mark) - elif key_node.tag == 'tag:yaml.org,2002:value': - key_node.tag = 'tag:yaml.org,2002:str' - index += 1 - else: - index += 1 - if merge: - node.value = merge + node.value - - def construct_mapping(self, node, deep=False): - if isinstance(node, MappingNode): - self.flatten_mapping(node) - return super().construct_mapping(node, deep=deep) - - def construct_yaml_null(self, node): - self.construct_scalar(node) - return None - - bool_values = { - 'yes': True, - 'no': False, - 'true': True, - 'false': False, - 'on': True, - 'off': False, - } - - def construct_yaml_bool(self, node): - value = self.construct_scalar(node) - return self.bool_values[value.lower()] - - def construct_yaml_int(self, node): - value = self.construct_scalar(node) - value = value.replace('_', '') - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '0': - return 0 - elif value.startswith('0b'): - return sign*int(value[2:], 2) - elif value.startswith('0x'): - return sign*int(value[2:], 16) - elif value[0] == '0': - return sign*int(value, 8) - elif ':' in value: - digits = [int(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*int(value) - - inf_value = 1e300 - while inf_value != inf_value*inf_value: - inf_value *= inf_value - nan_value = -inf_value/inf_value # Trying to make a quiet NaN (like C99). - - def construct_yaml_float(self, node): - value = self.construct_scalar(node) - value = value.replace('_', '').lower() - sign = +1 - if value[0] == '-': - sign = -1 - if value[0] in '+-': - value = value[1:] - if value == '.inf': - return sign*self.inf_value - elif value == '.nan': - return self.nan_value - elif ':' in value: - digits = [float(part) for part in value.split(':')] - digits.reverse() - base = 1 - value = 0.0 - for digit in digits: - value += digit*base - base *= 60 - return sign*value - else: - return sign*float(value) - - def construct_yaml_binary(self, node): - try: - value = self.construct_scalar(node).encode('ascii') - except UnicodeEncodeError as exc: - raise ConstructorError(None, None, - "failed to convert base64 data into ascii: %s" % exc, - node.start_mark) - try: - if hasattr(base64, 'decodebytes'): - return base64.decodebytes(value) - else: - return base64.decodestring(value) - except binascii.Error as exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - timestamp_regexp = re.compile( - r'''^(?P[0-9][0-9][0-9][0-9]) - -(?P[0-9][0-9]?) - -(?P[0-9][0-9]?) - (?:(?:[Tt]|[ \t]+) - (?P[0-9][0-9]?) - :(?P[0-9][0-9]) - :(?P[0-9][0-9]) - (?:\.(?P[0-9]*))? - (?:[ \t]*(?PZ|(?P[-+])(?P[0-9][0-9]?) - (?::(?P[0-9][0-9]))?))?)?$''', re.X) - - def construct_yaml_timestamp(self, node): - value = self.construct_scalar(node) - match = self.timestamp_regexp.match(node.value) - values = match.groupdict() - year = int(values['year']) - month = int(values['month']) - day = int(values['day']) - if not values['hour']: - return datetime.date(year, month, day) - hour = int(values['hour']) - minute = int(values['minute']) - second = int(values['second']) - fraction = 0 - if values['fraction']: - fraction = values['fraction'][:6] - while len(fraction) < 6: - fraction += '0' - fraction = int(fraction) - delta = None - if values['tz_sign']: - tz_hour = int(values['tz_hour']) - tz_minute = int(values['tz_minute'] or 0) - delta = datetime.timedelta(hours=tz_hour, minutes=tz_minute) - if values['tz_sign'] == '-': - delta = -delta - data = datetime.datetime(year, month, day, hour, minute, second, fraction) - if delta: - data -= delta - return data - - def construct_yaml_omap(self, node): - # Note: we do not check for duplicate keys, because it's too - # CPU-expensive. - omap = [] - yield omap - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing an ordered map", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - omap.append((key, value)) - - def construct_yaml_pairs(self, node): - # Note: the same code as `construct_yaml_omap`. - pairs = [] - yield pairs - if not isinstance(node, SequenceNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a sequence, but found %s" % node.id, node.start_mark) - for subnode in node.value: - if not isinstance(subnode, MappingNode): - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a mapping of length 1, but found %s" % subnode.id, - subnode.start_mark) - if len(subnode.value) != 1: - raise ConstructorError("while constructing pairs", node.start_mark, - "expected a single mapping item, but found %d items" % len(subnode.value), - subnode.start_mark) - key_node, value_node = subnode.value[0] - key = self.construct_object(key_node) - value = self.construct_object(value_node) - pairs.append((key, value)) - - def construct_yaml_set(self, node): - data = set() - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_str(self, node): - return self.construct_scalar(node) - - def construct_yaml_seq(self, node): - data = [] - yield data - data.extend(self.construct_sequence(node)) - - def construct_yaml_map(self, node): - data = {} - yield data - value = self.construct_mapping(node) - data.update(value) - - def construct_yaml_object(self, node, cls): - data = cls.__new__(cls) - yield data - if hasattr(data, '__setstate__'): - state = self.construct_mapping(node, deep=True) - data.__setstate__(state) - else: - state = self.construct_mapping(node) - data.__dict__.update(state) - - def construct_undefined(self, node): - raise ConstructorError(None, None, - "could not determine a constructor for the tag %r" % node.tag, - node.start_mark) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:null', - SafeConstructor.construct_yaml_null) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:bool', - SafeConstructor.construct_yaml_bool) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:int', - SafeConstructor.construct_yaml_int) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:float', - SafeConstructor.construct_yaml_float) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:binary', - SafeConstructor.construct_yaml_binary) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:timestamp', - SafeConstructor.construct_yaml_timestamp) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:omap', - SafeConstructor.construct_yaml_omap) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:pairs', - SafeConstructor.construct_yaml_pairs) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:set', - SafeConstructor.construct_yaml_set) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:str', - SafeConstructor.construct_yaml_str) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:seq', - SafeConstructor.construct_yaml_seq) - -SafeConstructor.add_constructor( - 'tag:yaml.org,2002:map', - SafeConstructor.construct_yaml_map) - -SafeConstructor.add_constructor(None, - SafeConstructor.construct_undefined) - -class Constructor(SafeConstructor): - - def construct_python_str(self, node): - return self.construct_scalar(node) - - def construct_python_unicode(self, node): - return self.construct_scalar(node) - - def construct_python_bytes(self, node): - try: - value = self.construct_scalar(node).encode('ascii') - except UnicodeEncodeError as exc: - raise ConstructorError(None, None, - "failed to convert base64 data into ascii: %s" % exc, - node.start_mark) - try: - if hasattr(base64, 'decodebytes'): - return base64.decodebytes(value) - else: - return base64.decodestring(value) - except binascii.Error as exc: - raise ConstructorError(None, None, - "failed to decode base64 data: %s" % exc, node.start_mark) - - def construct_python_long(self, node): - return self.construct_yaml_int(node) - - def construct_python_complex(self, node): - return complex(self.construct_scalar(node)) - - def construct_python_tuple(self, node): - return tuple(self.construct_sequence(node)) - - def find_python_module(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python module", mark, - "expected non-empty name appended to the tag", mark) - try: - __import__(name) - except ImportError as exc: - raise ConstructorError("while constructing a Python module", mark, - "cannot find module %r (%s)" % (name, exc), mark) - return sys.modules[name] - - def find_python_name(self, name, mark): - if not name: - raise ConstructorError("while constructing a Python object", mark, - "expected non-empty name appended to the tag", mark) - if '.' in name: - module_name, object_name = name.rsplit('.', 1) - else: - module_name = 'builtins' - object_name = name - try: - __import__(module_name) - except ImportError as exc: - raise ConstructorError("while constructing a Python object", mark, - "cannot find module %r (%s)" % (module_name, exc), mark) - module = sys.modules[module_name] - if not hasattr(module, object_name): - raise ConstructorError("while constructing a Python object", mark, - "cannot find %r in the module %r" - % (object_name, module.__name__), mark) - return getattr(module, object_name) - - def construct_python_name(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python name", node.start_mark, - "expected the empty value, but found %r" % value, node.start_mark) - return self.find_python_name(suffix, node.start_mark) - - def construct_python_module(self, suffix, node): - value = self.construct_scalar(node) - if value: - raise ConstructorError("while constructing a Python module", node.start_mark, - "expected the empty value, but found %r" % value, node.start_mark) - return self.find_python_module(suffix, node.start_mark) - - def make_python_instance(self, suffix, node, - args=None, kwds=None, newobj=False): - if not args: - args = [] - if not kwds: - kwds = {} - cls = self.find_python_name(suffix, node.start_mark) - if newobj and isinstance(cls, type): - return cls.__new__(cls, *args, **kwds) - else: - return cls(*args, **kwds) - - def set_python_instance_state(self, instance, state): - if hasattr(instance, '__setstate__'): - instance.__setstate__(state) - else: - slotstate = {} - if isinstance(state, tuple) and len(state) == 2: - state, slotstate = state - if hasattr(instance, '__dict__'): - instance.__dict__.update(state) - elif state: - slotstate.update(state) - for key, value in slotstate.items(): - setattr(object, key, value) - - def construct_python_object(self, suffix, node): - # Format: - # !!python/object:module.name { ... state ... } - instance = self.make_python_instance(suffix, node, newobj=True) - yield instance - deep = hasattr(instance, '__setstate__') - state = self.construct_mapping(node, deep=deep) - self.set_python_instance_state(instance, state) - - def construct_python_object_apply(self, suffix, node, newobj=False): - # Format: - # !!python/object/apply # (or !!python/object/new) - # args: [ ... arguments ... ] - # kwds: { ... keywords ... } - # state: ... state ... - # listitems: [ ... listitems ... ] - # dictitems: { ... dictitems ... } - # or short format: - # !!python/object/apply [ ... arguments ... ] - # The difference between !!python/object/apply and !!python/object/new - # is how an object is created, check make_python_instance for details. - if isinstance(node, SequenceNode): - args = self.construct_sequence(node, deep=True) - kwds = {} - state = {} - listitems = [] - dictitems = {} - else: - value = self.construct_mapping(node, deep=True) - args = value.get('args', []) - kwds = value.get('kwds', {}) - state = value.get('state', {}) - listitems = value.get('listitems', []) - dictitems = value.get('dictitems', {}) - instance = self.make_python_instance(suffix, node, args, kwds, newobj) - if state: - self.set_python_instance_state(instance, state) - if listitems: - instance.extend(listitems) - if dictitems: - for key in dictitems: - instance[key] = dictitems[key] - return instance - - def construct_python_object_new(self, suffix, node): - return self.construct_python_object_apply(suffix, node, newobj=True) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/none', - Constructor.construct_yaml_null) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/bool', - Constructor.construct_yaml_bool) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/str', - Constructor.construct_python_str) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/unicode', - Constructor.construct_python_unicode) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/bytes', - Constructor.construct_python_bytes) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/int', - Constructor.construct_yaml_int) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/long', - Constructor.construct_python_long) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/float', - Constructor.construct_yaml_float) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/complex', - Constructor.construct_python_complex) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/list', - Constructor.construct_yaml_seq) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/tuple', - Constructor.construct_python_tuple) - -Constructor.add_constructor( - 'tag:yaml.org,2002:python/dict', - Constructor.construct_yaml_map) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/name:', - Constructor.construct_python_name) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/module:', - Constructor.construct_python_module) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object:', - Constructor.construct_python_object) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object/apply:', - Constructor.construct_python_object_apply) - -Constructor.add_multi_constructor( - 'tag:yaml.org,2002:python/object/new:', - Constructor.construct_python_object_new) - diff --git a/libs/PyYAML-3.10/lib3/yaml/cyaml.py b/libs/PyYAML-3.10/lib3/yaml/cyaml.py deleted file mode 100644 index d5cb87e..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/cyaml.py +++ /dev/null @@ -1,85 +0,0 @@ - -__all__ = ['CBaseLoader', 'CSafeLoader', 'CLoader', - 'CBaseDumper', 'CSafeDumper', 'CDumper'] - -from _yaml import CParser, CEmitter - -from .constructor import * - -from .serializer import * -from .representer import * - -from .resolver import * - -class CBaseLoader(CParser, BaseConstructor, BaseResolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class CSafeLoader(CParser, SafeConstructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class CLoader(CParser, Constructor, Resolver): - - def __init__(self, stream): - CParser.__init__(self, stream) - Constructor.__init__(self) - Resolver.__init__(self) - -class CBaseDumper(CEmitter, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CSafeDumper(CEmitter, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class CDumper(CEmitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - CEmitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, encoding=encoding, - allow_unicode=allow_unicode, line_break=line_break, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/lib3/yaml/dumper.py b/libs/PyYAML-3.10/lib3/yaml/dumper.py deleted file mode 100644 index 0b69128..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/dumper.py +++ /dev/null @@ -1,62 +0,0 @@ - -__all__ = ['BaseDumper', 'SafeDumper', 'Dumper'] - -from .emitter import * -from .serializer import * -from .representer import * -from .resolver import * - -class BaseDumper(Emitter, Serializer, BaseRepresenter, BaseResolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class SafeDumper(Emitter, Serializer, SafeRepresenter, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - SafeRepresenter.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - -class Dumper(Emitter, Serializer, Representer, Resolver): - - def __init__(self, stream, - default_style=None, default_flow_style=None, - canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None, - encoding=None, explicit_start=None, explicit_end=None, - version=None, tags=None): - Emitter.__init__(self, stream, canonical=canonical, - indent=indent, width=width, - allow_unicode=allow_unicode, line_break=line_break) - Serializer.__init__(self, encoding=encoding, - explicit_start=explicit_start, explicit_end=explicit_end, - version=version, tags=tags) - Representer.__init__(self, default_style=default_style, - default_flow_style=default_flow_style) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/lib3/yaml/emitter.py b/libs/PyYAML-3.10/lib3/yaml/emitter.py deleted file mode 100644 index 34cb145..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/emitter.py +++ /dev/null @@ -1,1137 +0,0 @@ - -# Emitter expects events obeying the following grammar: -# stream ::= STREAM-START document* STREAM-END -# document ::= DOCUMENT-START node DOCUMENT-END -# node ::= SCALAR | sequence | mapping -# sequence ::= SEQUENCE-START node* SEQUENCE-END -# mapping ::= MAPPING-START (node node)* MAPPING-END - -__all__ = ['Emitter', 'EmitterError'] - -from .error import YAMLError -from .events import * - -class EmitterError(YAMLError): - pass - -class ScalarAnalysis: - def __init__(self, scalar, empty, multiline, - allow_flow_plain, allow_block_plain, - allow_single_quoted, allow_double_quoted, - allow_block): - self.scalar = scalar - self.empty = empty - self.multiline = multiline - self.allow_flow_plain = allow_flow_plain - self.allow_block_plain = allow_block_plain - self.allow_single_quoted = allow_single_quoted - self.allow_double_quoted = allow_double_quoted - self.allow_block = allow_block - -class Emitter: - - DEFAULT_TAG_PREFIXES = { - '!' : '!', - 'tag:yaml.org,2002:' : '!!', - } - - def __init__(self, stream, canonical=None, indent=None, width=None, - allow_unicode=None, line_break=None): - - # The stream should have the methods `write` and possibly `flush`. - self.stream = stream - - # Encoding can be overriden by STREAM-START. - self.encoding = None - - # Emitter is a state machine with a stack of states to handle nested - # structures. - self.states = [] - self.state = self.expect_stream_start - - # Current event and the event queue. - self.events = [] - self.event = None - - # The current indentation level and the stack of previous indents. - self.indents = [] - self.indent = None - - # Flow level. - self.flow_level = 0 - - # Contexts. - self.root_context = False - self.sequence_context = False - self.mapping_context = False - self.simple_key_context = False - - # Characteristics of the last emitted character: - # - current position. - # - is it a whitespace? - # - is it an indention character - # (indentation space, '-', '?', or ':')? - self.line = 0 - self.column = 0 - self.whitespace = True - self.indention = True - - # Whether the document requires an explicit document indicator - self.open_ended = False - - # Formatting details. - self.canonical = canonical - self.allow_unicode = allow_unicode - self.best_indent = 2 - if indent and 1 < indent < 10: - self.best_indent = indent - self.best_width = 80 - if width and width > self.best_indent*2: - self.best_width = width - self.best_line_break = '\n' - if line_break in ['\r', '\n', '\r\n']: - self.best_line_break = line_break - - # Tag prefixes. - self.tag_prefixes = None - - # Prepared anchor and tag. - self.prepared_anchor = None - self.prepared_tag = None - - # Scalar analysis and style. - self.analysis = None - self.style = None - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def emit(self, event): - self.events.append(event) - while not self.need_more_events(): - self.event = self.events.pop(0) - self.state() - self.event = None - - # In some cases, we wait for a few next events before emitting. - - def need_more_events(self): - if not self.events: - return True - event = self.events[0] - if isinstance(event, DocumentStartEvent): - return self.need_events(1) - elif isinstance(event, SequenceStartEvent): - return self.need_events(2) - elif isinstance(event, MappingStartEvent): - return self.need_events(3) - else: - return False - - def need_events(self, count): - level = 0 - for event in self.events[1:]: - if isinstance(event, (DocumentStartEvent, CollectionStartEvent)): - level += 1 - elif isinstance(event, (DocumentEndEvent, CollectionEndEvent)): - level -= 1 - elif isinstance(event, StreamEndEvent): - level = -1 - if level < 0: - return False - return (len(self.events) < count+1) - - def increase_indent(self, flow=False, indentless=False): - self.indents.append(self.indent) - if self.indent is None: - if flow: - self.indent = self.best_indent - else: - self.indent = 0 - elif not indentless: - self.indent += self.best_indent - - # States. - - # Stream handlers. - - def expect_stream_start(self): - if isinstance(self.event, StreamStartEvent): - if self.event.encoding and not hasattr(self.stream, 'encoding'): - self.encoding = self.event.encoding - self.write_stream_start() - self.state = self.expect_first_document_start - else: - raise EmitterError("expected StreamStartEvent, but got %s" - % self.event) - - def expect_nothing(self): - raise EmitterError("expected nothing, but got %s" % self.event) - - # Document handlers. - - def expect_first_document_start(self): - return self.expect_document_start(first=True) - - def expect_document_start(self, first=False): - if isinstance(self.event, DocumentStartEvent): - if (self.event.version or self.event.tags) and self.open_ended: - self.write_indicator('...', True) - self.write_indent() - if self.event.version: - version_text = self.prepare_version(self.event.version) - self.write_version_directive(version_text) - self.tag_prefixes = self.DEFAULT_TAG_PREFIXES.copy() - if self.event.tags: - handles = sorted(self.event.tags.keys()) - for handle in handles: - prefix = self.event.tags[handle] - self.tag_prefixes[prefix] = handle - handle_text = self.prepare_tag_handle(handle) - prefix_text = self.prepare_tag_prefix(prefix) - self.write_tag_directive(handle_text, prefix_text) - implicit = (first and not self.event.explicit and not self.canonical - and not self.event.version and not self.event.tags - and not self.check_empty_document()) - if not implicit: - self.write_indent() - self.write_indicator('---', True) - if self.canonical: - self.write_indent() - self.state = self.expect_document_root - elif isinstance(self.event, StreamEndEvent): - if self.open_ended: - self.write_indicator('...', True) - self.write_indent() - self.write_stream_end() - self.state = self.expect_nothing - else: - raise EmitterError("expected DocumentStartEvent, but got %s" - % self.event) - - def expect_document_end(self): - if isinstance(self.event, DocumentEndEvent): - self.write_indent() - if self.event.explicit: - self.write_indicator('...', True) - self.write_indent() - self.flush_stream() - self.state = self.expect_document_start - else: - raise EmitterError("expected DocumentEndEvent, but got %s" - % self.event) - - def expect_document_root(self): - self.states.append(self.expect_document_end) - self.expect_node(root=True) - - # Node handlers. - - def expect_node(self, root=False, sequence=False, mapping=False, - simple_key=False): - self.root_context = root - self.sequence_context = sequence - self.mapping_context = mapping - self.simple_key_context = simple_key - if isinstance(self.event, AliasEvent): - self.expect_alias() - elif isinstance(self.event, (ScalarEvent, CollectionStartEvent)): - self.process_anchor('&') - self.process_tag() - if isinstance(self.event, ScalarEvent): - self.expect_scalar() - elif isinstance(self.event, SequenceStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_sequence(): - self.expect_flow_sequence() - else: - self.expect_block_sequence() - elif isinstance(self.event, MappingStartEvent): - if self.flow_level or self.canonical or self.event.flow_style \ - or self.check_empty_mapping(): - self.expect_flow_mapping() - else: - self.expect_block_mapping() - else: - raise EmitterError("expected NodeEvent, but got %s" % self.event) - - def expect_alias(self): - if self.event.anchor is None: - raise EmitterError("anchor is not specified for alias") - self.process_anchor('*') - self.state = self.states.pop() - - def expect_scalar(self): - self.increase_indent(flow=True) - self.process_scalar() - self.indent = self.indents.pop() - self.state = self.states.pop() - - # Flow sequence handlers. - - def expect_flow_sequence(self): - self.write_indicator('[', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_sequence_item - - def expect_first_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator(']', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - def expect_flow_sequence_item(self): - if isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(',', False) - self.write_indent() - self.write_indicator(']', False) - self.state = self.states.pop() - else: - self.write_indicator(',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - self.states.append(self.expect_flow_sequence_item) - self.expect_node(sequence=True) - - # Flow mapping handlers. - - def expect_flow_mapping(self): - self.write_indicator('{', True, whitespace=True) - self.flow_level += 1 - self.increase_indent(flow=True) - self.state = self.expect_first_flow_mapping_key - - def expect_first_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - self.write_indicator('}', False) - self.state = self.states.pop() - else: - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_key(self): - if isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.flow_level -= 1 - if self.canonical: - self.write_indicator(',', False) - self.write_indent() - self.write_indicator('}', False) - self.state = self.states.pop() - else: - self.write_indicator(',', False) - if self.canonical or self.column > self.best_width: - self.write_indent() - if not self.canonical and self.check_simple_key(): - self.states.append(self.expect_flow_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True) - self.states.append(self.expect_flow_mapping_value) - self.expect_node(mapping=True) - - def expect_flow_mapping_simple_value(self): - self.write_indicator(':', False) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - def expect_flow_mapping_value(self): - if self.canonical or self.column > self.best_width: - self.write_indent() - self.write_indicator(':', True) - self.states.append(self.expect_flow_mapping_key) - self.expect_node(mapping=True) - - # Block sequence handlers. - - def expect_block_sequence(self): - indentless = (self.mapping_context and not self.indention) - self.increase_indent(flow=False, indentless=indentless) - self.state = self.expect_first_block_sequence_item - - def expect_first_block_sequence_item(self): - return self.expect_block_sequence_item(first=True) - - def expect_block_sequence_item(self, first=False): - if not first and isinstance(self.event, SequenceEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - self.write_indicator('-', True, indention=True) - self.states.append(self.expect_block_sequence_item) - self.expect_node(sequence=True) - - # Block mapping handlers. - - def expect_block_mapping(self): - self.increase_indent(flow=False) - self.state = self.expect_first_block_mapping_key - - def expect_first_block_mapping_key(self): - return self.expect_block_mapping_key(first=True) - - def expect_block_mapping_key(self, first=False): - if not first and isinstance(self.event, MappingEndEvent): - self.indent = self.indents.pop() - self.state = self.states.pop() - else: - self.write_indent() - if self.check_simple_key(): - self.states.append(self.expect_block_mapping_simple_value) - self.expect_node(mapping=True, simple_key=True) - else: - self.write_indicator('?', True, indention=True) - self.states.append(self.expect_block_mapping_value) - self.expect_node(mapping=True) - - def expect_block_mapping_simple_value(self): - self.write_indicator(':', False) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - def expect_block_mapping_value(self): - self.write_indent() - self.write_indicator(':', True, indention=True) - self.states.append(self.expect_block_mapping_key) - self.expect_node(mapping=True) - - # Checkers. - - def check_empty_sequence(self): - return (isinstance(self.event, SequenceStartEvent) and self.events - and isinstance(self.events[0], SequenceEndEvent)) - - def check_empty_mapping(self): - return (isinstance(self.event, MappingStartEvent) and self.events - and isinstance(self.events[0], MappingEndEvent)) - - def check_empty_document(self): - if not isinstance(self.event, DocumentStartEvent) or not self.events: - return False - event = self.events[0] - return (isinstance(event, ScalarEvent) and event.anchor is None - and event.tag is None and event.implicit and event.value == '') - - def check_simple_key(self): - length = 0 - if isinstance(self.event, NodeEvent) and self.event.anchor is not None: - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - length += len(self.prepared_anchor) - if isinstance(self.event, (ScalarEvent, CollectionStartEvent)) \ - and self.event.tag is not None: - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(self.event.tag) - length += len(self.prepared_tag) - if isinstance(self.event, ScalarEvent): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - length += len(self.analysis.scalar) - return (length < 128 and (isinstance(self.event, AliasEvent) - or (isinstance(self.event, ScalarEvent) - and not self.analysis.empty and not self.analysis.multiline) - or self.check_empty_sequence() or self.check_empty_mapping())) - - # Anchor, Tag, and Scalar processors. - - def process_anchor(self, indicator): - if self.event.anchor is None: - self.prepared_anchor = None - return - if self.prepared_anchor is None: - self.prepared_anchor = self.prepare_anchor(self.event.anchor) - if self.prepared_anchor: - self.write_indicator(indicator+self.prepared_anchor, True) - self.prepared_anchor = None - - def process_tag(self): - tag = self.event.tag - if isinstance(self.event, ScalarEvent): - if self.style is None: - self.style = self.choose_scalar_style() - if ((not self.canonical or tag is None) and - ((self.style == '' and self.event.implicit[0]) - or (self.style != '' and self.event.implicit[1]))): - self.prepared_tag = None - return - if self.event.implicit[0] and tag is None: - tag = '!' - self.prepared_tag = None - else: - if (not self.canonical or tag is None) and self.event.implicit: - self.prepared_tag = None - return - if tag is None: - raise EmitterError("tag is not specified") - if self.prepared_tag is None: - self.prepared_tag = self.prepare_tag(tag) - if self.prepared_tag: - self.write_indicator(self.prepared_tag, True) - self.prepared_tag = None - - def choose_scalar_style(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.event.style == '"' or self.canonical: - return '"' - if not self.event.style and self.event.implicit[0]: - if (not (self.simple_key_context and - (self.analysis.empty or self.analysis.multiline)) - and (self.flow_level and self.analysis.allow_flow_plain - or (not self.flow_level and self.analysis.allow_block_plain))): - return '' - if self.event.style and self.event.style in '|>': - if (not self.flow_level and not self.simple_key_context - and self.analysis.allow_block): - return self.event.style - if not self.event.style or self.event.style == '\'': - if (self.analysis.allow_single_quoted and - not (self.simple_key_context and self.analysis.multiline)): - return '\'' - return '"' - - def process_scalar(self): - if self.analysis is None: - self.analysis = self.analyze_scalar(self.event.value) - if self.style is None: - self.style = self.choose_scalar_style() - split = (not self.simple_key_context) - #if self.analysis.multiline and split \ - # and (not self.style or self.style in '\'\"'): - # self.write_indent() - if self.style == '"': - self.write_double_quoted(self.analysis.scalar, split) - elif self.style == '\'': - self.write_single_quoted(self.analysis.scalar, split) - elif self.style == '>': - self.write_folded(self.analysis.scalar) - elif self.style == '|': - self.write_literal(self.analysis.scalar) - else: - self.write_plain(self.analysis.scalar, split) - self.analysis = None - self.style = None - - # Analyzers. - - def prepare_version(self, version): - major, minor = version - if major != 1: - raise EmitterError("unsupported YAML version: %d.%d" % (major, minor)) - return '%d.%d' % (major, minor) - - def prepare_tag_handle(self, handle): - if not handle: - raise EmitterError("tag handle must not be empty") - if handle[0] != '!' or handle[-1] != '!': - raise EmitterError("tag handle must start and end with '!': %r" % handle) - for ch in handle[1:-1]: - if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_'): - raise EmitterError("invalid character %r in the tag handle: %r" - % (ch, handle)) - return handle - - def prepare_tag_prefix(self, prefix): - if not prefix: - raise EmitterError("tag prefix must not be empty") - chunks = [] - start = end = 0 - if prefix[0] == '!': - end = 1 - while end < len(prefix): - ch = prefix[end] - if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?!:@&=+$,_.~*\'()[]': - end += 1 - else: - if start < end: - chunks.append(prefix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append('%%%02X' % ord(ch)) - if start < end: - chunks.append(prefix[start:end]) - return ''.join(chunks) - - def prepare_tag(self, tag): - if not tag: - raise EmitterError("tag must not be empty") - if tag == '!': - return tag - handle = None - suffix = tag - prefixes = sorted(self.tag_prefixes.keys()) - for prefix in prefixes: - if tag.startswith(prefix) \ - and (prefix == '!' or len(prefix) < len(tag)): - handle = self.tag_prefixes[prefix] - suffix = tag[len(prefix):] - chunks = [] - start = end = 0 - while end < len(suffix): - ch = suffix[end] - if '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?:@&=+$,_.~*\'()[]' \ - or (ch == '!' and handle != '!'): - end += 1 - else: - if start < end: - chunks.append(suffix[start:end]) - start = end = end+1 - data = ch.encode('utf-8') - for ch in data: - chunks.append('%%%02X' % ord(ch)) - if start < end: - chunks.append(suffix[start:end]) - suffix_text = ''.join(chunks) - if handle: - return '%s%s' % (handle, suffix_text) - else: - return '!<%s>' % suffix_text - - def prepare_anchor(self, anchor): - if not anchor: - raise EmitterError("anchor must not be empty") - for ch in anchor: - if not ('0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_'): - raise EmitterError("invalid character %r in the anchor: %r" - % (ch, anchor)) - return anchor - - def analyze_scalar(self, scalar): - - # Empty scalar is a special case. - if not scalar: - return ScalarAnalysis(scalar=scalar, empty=True, multiline=False, - allow_flow_plain=False, allow_block_plain=True, - allow_single_quoted=True, allow_double_quoted=True, - allow_block=False) - - # Indicators and special characters. - block_indicators = False - flow_indicators = False - line_breaks = False - special_characters = False - - # Important whitespace combinations. - leading_space = False - leading_break = False - trailing_space = False - trailing_break = False - break_space = False - space_break = False - - # Check document indicators. - if scalar.startswith('---') or scalar.startswith('...'): - block_indicators = True - flow_indicators = True - - # First character or preceded by a whitespace. - preceeded_by_whitespace = True - - # Last character or followed by a whitespace. - followed_by_whitespace = (len(scalar) == 1 or - scalar[1] in '\0 \t\r\n\x85\u2028\u2029') - - # The previous character is a space. - previous_space = False - - # The previous character is a break. - previous_break = False - - index = 0 - while index < len(scalar): - ch = scalar[index] - - # Check for indicators. - if index == 0: - # Leading indicators are special characters. - if ch in '#,[]{}&*!|>\'\"%@`': - flow_indicators = True - block_indicators = True - if ch in '?:': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == '-' and followed_by_whitespace: - flow_indicators = True - block_indicators = True - else: - # Some indicators cannot appear within a scalar as well. - if ch in ',?[]{}': - flow_indicators = True - if ch == ':': - flow_indicators = True - if followed_by_whitespace: - block_indicators = True - if ch == '#' and preceeded_by_whitespace: - flow_indicators = True - block_indicators = True - - # Check for line breaks, special, and unicode characters. - if ch in '\n\x85\u2028\u2029': - line_breaks = True - if not (ch == '\n' or '\x20' <= ch <= '\x7E'): - if (ch == '\x85' or '\xA0' <= ch <= '\uD7FF' - or '\uE000' <= ch <= '\uFFFD') and ch != '\uFEFF': - unicode_characters = True - if not self.allow_unicode: - special_characters = True - else: - special_characters = True - - # Detect important whitespace combinations. - if ch == ' ': - if index == 0: - leading_space = True - if index == len(scalar)-1: - trailing_space = True - if previous_break: - break_space = True - previous_space = True - previous_break = False - elif ch in '\n\x85\u2028\u2029': - if index == 0: - leading_break = True - if index == len(scalar)-1: - trailing_break = True - if previous_space: - space_break = True - previous_space = False - previous_break = True - else: - previous_space = False - previous_break = False - - # Prepare for the next character. - index += 1 - preceeded_by_whitespace = (ch in '\0 \t\r\n\x85\u2028\u2029') - followed_by_whitespace = (index+1 >= len(scalar) or - scalar[index+1] in '\0 \t\r\n\x85\u2028\u2029') - - # Let's decide what styles are allowed. - allow_flow_plain = True - allow_block_plain = True - allow_single_quoted = True - allow_double_quoted = True - allow_block = True - - # Leading and trailing whitespaces are bad for plain scalars. - if (leading_space or leading_break - or trailing_space or trailing_break): - allow_flow_plain = allow_block_plain = False - - # We do not permit trailing spaces for block scalars. - if trailing_space: - allow_block = False - - # Spaces at the beginning of a new line are only acceptable for block - # scalars. - if break_space: - allow_flow_plain = allow_block_plain = allow_single_quoted = False - - # Spaces followed by breaks, as well as special character are only - # allowed for double quoted scalars. - if space_break or special_characters: - allow_flow_plain = allow_block_plain = \ - allow_single_quoted = allow_block = False - - # Although the plain scalar writer supports breaks, we never emit - # multiline plain scalars. - if line_breaks: - allow_flow_plain = allow_block_plain = False - - # Flow indicators are forbidden for flow plain scalars. - if flow_indicators: - allow_flow_plain = False - - # Block indicators are forbidden for block plain scalars. - if block_indicators: - allow_block_plain = False - - return ScalarAnalysis(scalar=scalar, - empty=False, multiline=line_breaks, - allow_flow_plain=allow_flow_plain, - allow_block_plain=allow_block_plain, - allow_single_quoted=allow_single_quoted, - allow_double_quoted=allow_double_quoted, - allow_block=allow_block) - - # Writers. - - def flush_stream(self): - if hasattr(self.stream, 'flush'): - self.stream.flush() - - def write_stream_start(self): - # Write BOM if needed. - if self.encoding and self.encoding.startswith('utf-16'): - self.stream.write('\uFEFF'.encode(self.encoding)) - - def write_stream_end(self): - self.flush_stream() - - def write_indicator(self, indicator, need_whitespace, - whitespace=False, indention=False): - if self.whitespace or not need_whitespace: - data = indicator - else: - data = ' '+indicator - self.whitespace = whitespace - self.indention = self.indention and indention - self.column += len(data) - self.open_ended = False - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_indent(self): - indent = self.indent or 0 - if not self.indention or self.column > indent \ - or (self.column == indent and not self.whitespace): - self.write_line_break() - if self.column < indent: - self.whitespace = True - data = ' '*(indent-self.column) - self.column = indent - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_line_break(self, data=None): - if data is None: - data = self.best_line_break - self.whitespace = True - self.indention = True - self.line += 1 - self.column = 0 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - - def write_version_directive(self, version_text): - data = '%%YAML %s' % version_text - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - def write_tag_directive(self, handle_text, prefix_text): - data = '%%TAG %s %s' % (handle_text, prefix_text) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_line_break() - - # Scalar streams. - - def write_single_quoted(self, text, split=True): - self.write_indicator('\'', True) - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch is None or ch != ' ': - if start+1 == end and self.column > self.best_width and split \ - and start != 0 and end != len(text): - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - if text[start] == '\n': - self.write_line_break() - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029' or ch == '\'': - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch == '\'': - data = '\'\'' - self.column += 2 - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end + 1 - if ch is not None: - spaces = (ch == ' ') - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - self.write_indicator('\'', False) - - ESCAPE_REPLACEMENTS = { - '\0': '0', - '\x07': 'a', - '\x08': 'b', - '\x09': 't', - '\x0A': 'n', - '\x0B': 'v', - '\x0C': 'f', - '\x0D': 'r', - '\x1B': 'e', - '\"': '\"', - '\\': '\\', - '\x85': 'N', - '\xA0': '_', - '\u2028': 'L', - '\u2029': 'P', - } - - def write_double_quoted(self, text, split=True): - self.write_indicator('"', True) - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if ch is None or ch in '"\\\x85\u2028\u2029\uFEFF' \ - or not ('\x20' <= ch <= '\x7E' - or (self.allow_unicode - and ('\xA0' <= ch <= '\uD7FF' - or '\uE000' <= ch <= '\uFFFD'))): - if start < end: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - if ch in self.ESCAPE_REPLACEMENTS: - data = '\\'+self.ESCAPE_REPLACEMENTS[ch] - elif ch <= '\xFF': - data = '\\x%02X' % ord(ch) - elif ch <= '\uFFFF': - data = '\\u%04X' % ord(ch) - else: - data = '\\U%08X' % ord(ch) - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end+1 - if 0 < end < len(text)-1 and (ch == ' ' or start >= end) \ - and self.column+(end-start) > self.best_width and split: - data = text[start:end]+'\\' - if start < end: - start = end - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.write_indent() - self.whitespace = False - self.indention = False - if text[start] == ' ': - data = '\\' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - end += 1 - self.write_indicator('"', False) - - def determine_block_hints(self, text): - hints = '' - if text: - if text[0] in ' \n\x85\u2028\u2029': - hints += str(self.best_indent) - if text[-1] not in '\n\x85\u2028\u2029': - hints += '-' - elif len(text) == 1 or text[-2] in '\n\x85\u2028\u2029': - hints += '+' - return hints - - def write_folded(self, text): - hints = self.determine_block_hints(text) - self.write_indicator('>'+hints, True) - if hints[-1:] == '+': - self.open_ended = True - self.write_line_break() - leading_space = True - spaces = False - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - if not leading_space and ch is not None and ch != ' ' \ - and text[start] == '\n': - self.write_line_break() - leading_space = (ch == ' ') - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - elif spaces: - if ch != ' ': - if start+1 == end and self.column > self.best_width: - self.write_indent() - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in '\n\x85\u2028\u2029') - spaces = (ch == ' ') - end += 1 - - def write_literal(self, text): - hints = self.determine_block_hints(text) - self.write_indicator('|'+hints, True) - if hints[-1:] == '+': - self.open_ended = True - self.write_line_break() - breaks = True - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if breaks: - if ch is None or ch not in '\n\x85\u2028\u2029': - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - if ch is not None: - self.write_indent() - start = end - else: - if ch is None or ch in '\n\x85\u2028\u2029': - data = text[start:end] - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - if ch is None: - self.write_line_break() - start = end - if ch is not None: - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - - def write_plain(self, text, split=True): - if self.root_context: - self.open_ended = True - if not text: - return - if not self.whitespace: - data = ' ' - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - self.whitespace = False - self.indention = False - spaces = False - breaks = False - start = end = 0 - while end <= len(text): - ch = None - if end < len(text): - ch = text[end] - if spaces: - if ch != ' ': - if start+1 == end and self.column > self.best_width and split: - self.write_indent() - self.whitespace = False - self.indention = False - else: - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - elif breaks: - if ch not in '\n\x85\u2028\u2029': - if text[start] == '\n': - self.write_line_break() - for br in text[start:end]: - if br == '\n': - self.write_line_break() - else: - self.write_line_break(br) - self.write_indent() - self.whitespace = False - self.indention = False - start = end - else: - if ch is None or ch in ' \n\x85\u2028\u2029': - data = text[start:end] - self.column += len(data) - if self.encoding: - data = data.encode(self.encoding) - self.stream.write(data) - start = end - if ch is not None: - spaces = (ch == ' ') - breaks = (ch in '\n\x85\u2028\u2029') - end += 1 - diff --git a/libs/PyYAML-3.10/lib3/yaml/error.py b/libs/PyYAML-3.10/lib3/yaml/error.py deleted file mode 100644 index b796b4d..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/error.py +++ /dev/null @@ -1,75 +0,0 @@ - -__all__ = ['Mark', 'YAMLError', 'MarkedYAMLError'] - -class Mark: - - def __init__(self, name, index, line, column, buffer, pointer): - self.name = name - self.index = index - self.line = line - self.column = column - self.buffer = buffer - self.pointer = pointer - - def get_snippet(self, indent=4, max_length=75): - if self.buffer is None: - return None - head = '' - start = self.pointer - while start > 0 and self.buffer[start-1] not in '\0\r\n\x85\u2028\u2029': - start -= 1 - if self.pointer-start > max_length/2-1: - head = ' ... ' - start += 5 - break - tail = '' - end = self.pointer - while end < len(self.buffer) and self.buffer[end] not in '\0\r\n\x85\u2028\u2029': - end += 1 - if end-self.pointer > max_length/2-1: - tail = ' ... ' - end -= 5 - break - snippet = self.buffer[start:end] - return ' '*indent + head + snippet + tail + '\n' \ - + ' '*(indent+self.pointer-start+len(head)) + '^' - - def __str__(self): - snippet = self.get_snippet() - where = " in \"%s\", line %d, column %d" \ - % (self.name, self.line+1, self.column+1) - if snippet is not None: - where += ":\n"+snippet - return where - -class YAMLError(Exception): - pass - -class MarkedYAMLError(YAMLError): - - def __init__(self, context=None, context_mark=None, - problem=None, problem_mark=None, note=None): - self.context = context - self.context_mark = context_mark - self.problem = problem - self.problem_mark = problem_mark - self.note = note - - def __str__(self): - lines = [] - if self.context is not None: - lines.append(self.context) - if self.context_mark is not None \ - and (self.problem is None or self.problem_mark is None - or self.context_mark.name != self.problem_mark.name - or self.context_mark.line != self.problem_mark.line - or self.context_mark.column != self.problem_mark.column): - lines.append(str(self.context_mark)) - if self.problem is not None: - lines.append(self.problem) - if self.problem_mark is not None: - lines.append(str(self.problem_mark)) - if self.note is not None: - lines.append(self.note) - return '\n'.join(lines) - diff --git a/libs/PyYAML-3.10/lib3/yaml/events.py b/libs/PyYAML-3.10/lib3/yaml/events.py deleted file mode 100644 index f79ad38..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/events.py +++ /dev/null @@ -1,86 +0,0 @@ - -# Abstract classes. - -class Event(object): - def __init__(self, start_mark=None, end_mark=None): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] - if hasattr(self, key)] - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -class NodeEvent(Event): - def __init__(self, anchor, start_mark=None, end_mark=None): - self.anchor = anchor - self.start_mark = start_mark - self.end_mark = end_mark - -class CollectionStartEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, - flow_style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class CollectionEndEvent(Event): - pass - -# Implementations. - -class StreamStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndEvent(Event): - pass - -class DocumentStartEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None, version=None, tags=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - self.version = version - self.tags = tags - -class DocumentEndEvent(Event): - def __init__(self, start_mark=None, end_mark=None, - explicit=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.explicit = explicit - -class AliasEvent(NodeEvent): - pass - -class ScalarEvent(NodeEvent): - def __init__(self, anchor, tag, implicit, value, - start_mark=None, end_mark=None, style=None): - self.anchor = anchor - self.tag = tag - self.implicit = implicit - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class SequenceStartEvent(CollectionStartEvent): - pass - -class SequenceEndEvent(CollectionEndEvent): - pass - -class MappingStartEvent(CollectionStartEvent): - pass - -class MappingEndEvent(CollectionEndEvent): - pass - diff --git a/libs/PyYAML-3.10/lib3/yaml/loader.py b/libs/PyYAML-3.10/lib3/yaml/loader.py deleted file mode 100644 index 08c8f01..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/loader.py +++ /dev/null @@ -1,40 +0,0 @@ - -__all__ = ['BaseLoader', 'SafeLoader', 'Loader'] - -from .reader import * -from .scanner import * -from .parser import * -from .composer import * -from .constructor import * -from .resolver import * - -class BaseLoader(Reader, Scanner, Parser, Composer, BaseConstructor, BaseResolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - BaseConstructor.__init__(self) - BaseResolver.__init__(self) - -class SafeLoader(Reader, Scanner, Parser, Composer, SafeConstructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - SafeConstructor.__init__(self) - Resolver.__init__(self) - -class Loader(Reader, Scanner, Parser, Composer, Constructor, Resolver): - - def __init__(self, stream): - Reader.__init__(self, stream) - Scanner.__init__(self) - Parser.__init__(self) - Composer.__init__(self) - Constructor.__init__(self) - Resolver.__init__(self) - diff --git a/libs/PyYAML-3.10/lib3/yaml/nodes.py b/libs/PyYAML-3.10/lib3/yaml/nodes.py deleted file mode 100644 index c4f070c..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/nodes.py +++ /dev/null @@ -1,49 +0,0 @@ - -class Node(object): - def __init__(self, tag, value, start_mark, end_mark): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - value = self.value - #if isinstance(value, list): - # if len(value) == 0: - # value = '' - # elif len(value) == 1: - # value = '<1 item>' - # else: - # value = '<%d items>' % len(value) - #else: - # if len(value) > 75: - # value = repr(value[:70]+u' ... ') - # else: - # value = repr(value) - value = repr(value) - return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value) - -class ScalarNode(Node): - id = 'scalar' - def __init__(self, tag, value, - start_mark=None, end_mark=None, style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - -class CollectionNode(Node): - def __init__(self, tag, value, - start_mark=None, end_mark=None, flow_style=None): - self.tag = tag - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - self.flow_style = flow_style - -class SequenceNode(CollectionNode): - id = 'sequence' - -class MappingNode(CollectionNode): - id = 'mapping' - diff --git a/libs/PyYAML-3.10/lib3/yaml/parser.py b/libs/PyYAML-3.10/lib3/yaml/parser.py deleted file mode 100644 index 13a5995..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/parser.py +++ /dev/null @@ -1,589 +0,0 @@ - -# The following YAML grammar is LL(1) and is parsed by a recursive descent -# parser. -# -# stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -# implicit_document ::= block_node DOCUMENT-END* -# explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -# block_node_or_indentless_sequence ::= -# ALIAS -# | properties (block_content | indentless_block_sequence)? -# | block_content -# | indentless_block_sequence -# block_node ::= ALIAS -# | properties block_content? -# | block_content -# flow_node ::= ALIAS -# | properties flow_content? -# | flow_content -# properties ::= TAG ANCHOR? | ANCHOR TAG? -# block_content ::= block_collection | flow_collection | SCALAR -# flow_content ::= flow_collection | SCALAR -# block_collection ::= block_sequence | block_mapping -# flow_collection ::= flow_sequence | flow_mapping -# block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -# indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -# block_mapping ::= BLOCK-MAPPING_START -# ((KEY block_node_or_indentless_sequence?)? -# (VALUE block_node_or_indentless_sequence?)?)* -# BLOCK-END -# flow_sequence ::= FLOW-SEQUENCE-START -# (flow_sequence_entry FLOW-ENTRY)* -# flow_sequence_entry? -# FLOW-SEQUENCE-END -# flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# flow_mapping ::= FLOW-MAPPING-START -# (flow_mapping_entry FLOW-ENTRY)* -# flow_mapping_entry? -# FLOW-MAPPING-END -# flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -# -# FIRST sets: -# -# stream: { STREAM-START } -# explicit_document: { DIRECTIVE DOCUMENT-START } -# implicit_document: FIRST(block_node) -# block_node: { ALIAS TAG ANCHOR SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_node: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_content: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# flow_content: { FLOW-SEQUENCE-START FLOW-MAPPING-START SCALAR } -# block_collection: { BLOCK-SEQUENCE-START BLOCK-MAPPING-START } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# block_sequence: { BLOCK-SEQUENCE-START } -# block_mapping: { BLOCK-MAPPING-START } -# block_node_or_indentless_sequence: { ALIAS ANCHOR TAG SCALAR BLOCK-SEQUENCE-START BLOCK-MAPPING-START FLOW-SEQUENCE-START FLOW-MAPPING-START BLOCK-ENTRY } -# indentless_sequence: { ENTRY } -# flow_collection: { FLOW-SEQUENCE-START FLOW-MAPPING-START } -# flow_sequence: { FLOW-SEQUENCE-START } -# flow_mapping: { FLOW-MAPPING-START } -# flow_sequence_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } -# flow_mapping_entry: { ALIAS ANCHOR TAG SCALAR FLOW-SEQUENCE-START FLOW-MAPPING-START KEY } - -__all__ = ['Parser', 'ParserError'] - -from .error import MarkedYAMLError -from .tokens import * -from .events import * -from .scanner import * - -class ParserError(MarkedYAMLError): - pass - -class Parser: - # Since writing a recursive-descendant parser is a straightforward task, we - # do not give many comments here. - - DEFAULT_TAGS = { - '!': '!', - '!!': 'tag:yaml.org,2002:', - } - - def __init__(self): - self.current_event = None - self.yaml_version = None - self.tag_handles = {} - self.states = [] - self.marks = [] - self.state = self.parse_stream_start - - def dispose(self): - # Reset the state attributes (to clear self-references) - self.states = [] - self.state = None - - def check_event(self, *choices): - # Check the type of the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - if self.current_event is not None: - if not choices: - return True - for choice in choices: - if isinstance(self.current_event, choice): - return True - return False - - def peek_event(self): - # Get the next event. - if self.current_event is None: - if self.state: - self.current_event = self.state() - return self.current_event - - def get_event(self): - # Get the next event and proceed further. - if self.current_event is None: - if self.state: - self.current_event = self.state() - value = self.current_event - self.current_event = None - return value - - # stream ::= STREAM-START implicit_document? explicit_document* STREAM-END - # implicit_document ::= block_node DOCUMENT-END* - # explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* - - def parse_stream_start(self): - - # Parse the stream start. - token = self.get_token() - event = StreamStartEvent(token.start_mark, token.end_mark, - encoding=token.encoding) - - # Prepare the next state. - self.state = self.parse_implicit_document_start - - return event - - def parse_implicit_document_start(self): - - # Parse an implicit document. - if not self.check_token(DirectiveToken, DocumentStartToken, - StreamEndToken): - self.tag_handles = self.DEFAULT_TAGS - token = self.peek_token() - start_mark = end_mark = token.start_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=False) - - # Prepare the next state. - self.states.append(self.parse_document_end) - self.state = self.parse_block_node - - return event - - else: - return self.parse_document_start() - - def parse_document_start(self): - - # Parse any extra document end indicators. - while self.check_token(DocumentEndToken): - self.get_token() - - # Parse an explicit document. - if not self.check_token(StreamEndToken): - token = self.peek_token() - start_mark = token.start_mark - version, tags = self.process_directives() - if not self.check_token(DocumentStartToken): - raise ParserError(None, None, - "expected '', but found %r" - % self.peek_token().id, - self.peek_token().start_mark) - token = self.get_token() - end_mark = token.end_mark - event = DocumentStartEvent(start_mark, end_mark, - explicit=True, version=version, tags=tags) - self.states.append(self.parse_document_end) - self.state = self.parse_document_content - else: - # Parse the end of the stream. - token = self.get_token() - event = StreamEndEvent(token.start_mark, token.end_mark) - assert not self.states - assert not self.marks - self.state = None - return event - - def parse_document_end(self): - - # Parse the document end. - token = self.peek_token() - start_mark = end_mark = token.start_mark - explicit = False - if self.check_token(DocumentEndToken): - token = self.get_token() - end_mark = token.end_mark - explicit = True - event = DocumentEndEvent(start_mark, end_mark, - explicit=explicit) - - # Prepare the next state. - self.state = self.parse_document_start - - return event - - def parse_document_content(self): - if self.check_token(DirectiveToken, - DocumentStartToken, DocumentEndToken, StreamEndToken): - event = self.process_empty_scalar(self.peek_token().start_mark) - self.state = self.states.pop() - return event - else: - return self.parse_block_node() - - def process_directives(self): - self.yaml_version = None - self.tag_handles = {} - while self.check_token(DirectiveToken): - token = self.get_token() - if token.name == 'YAML': - if self.yaml_version is not None: - raise ParserError(None, None, - "found duplicate YAML directive", token.start_mark) - major, minor = token.value - if major != 1: - raise ParserError(None, None, - "found incompatible YAML document (version 1.* is required)", - token.start_mark) - self.yaml_version = token.value - elif token.name == 'TAG': - handle, prefix = token.value - if handle in self.tag_handles: - raise ParserError(None, None, - "duplicate tag handle %r" % handle, - token.start_mark) - self.tag_handles[handle] = prefix - if self.tag_handles: - value = self.yaml_version, self.tag_handles.copy() - else: - value = self.yaml_version, None - for key in self.DEFAULT_TAGS: - if key not in self.tag_handles: - self.tag_handles[key] = self.DEFAULT_TAGS[key] - return value - - # block_node_or_indentless_sequence ::= ALIAS - # | properties (block_content | indentless_block_sequence)? - # | block_content - # | indentless_block_sequence - # block_node ::= ALIAS - # | properties block_content? - # | block_content - # flow_node ::= ALIAS - # | properties flow_content? - # | flow_content - # properties ::= TAG ANCHOR? | ANCHOR TAG? - # block_content ::= block_collection | flow_collection | SCALAR - # flow_content ::= flow_collection | SCALAR - # block_collection ::= block_sequence | block_mapping - # flow_collection ::= flow_sequence | flow_mapping - - def parse_block_node(self): - return self.parse_node(block=True) - - def parse_flow_node(self): - return self.parse_node() - - def parse_block_node_or_indentless_sequence(self): - return self.parse_node(block=True, indentless_sequence=True) - - def parse_node(self, block=False, indentless_sequence=False): - if self.check_token(AliasToken): - token = self.get_token() - event = AliasEvent(token.value, token.start_mark, token.end_mark) - self.state = self.states.pop() - else: - anchor = None - tag = None - start_mark = end_mark = tag_mark = None - if self.check_token(AnchorToken): - token = self.get_token() - start_mark = token.start_mark - end_mark = token.end_mark - anchor = token.value - if self.check_token(TagToken): - token = self.get_token() - tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - elif self.check_token(TagToken): - token = self.get_token() - start_mark = tag_mark = token.start_mark - end_mark = token.end_mark - tag = token.value - if self.check_token(AnchorToken): - token = self.get_token() - end_mark = token.end_mark - anchor = token.value - if tag is not None: - handle, suffix = tag - if handle is not None: - if handle not in self.tag_handles: - raise ParserError("while parsing a node", start_mark, - "found undefined tag handle %r" % handle, - tag_mark) - tag = self.tag_handles[handle]+suffix - else: - tag = suffix - #if tag == '!': - # raise ParserError("while parsing a node", start_mark, - # "found non-specific tag '!'", tag_mark, - # "Please check 'http://pyyaml.org/wiki/YAMLNonSpecificTag' and share your opinion.") - if start_mark is None: - start_mark = end_mark = self.peek_token().start_mark - event = None - implicit = (tag is None or tag == '!') - if indentless_sequence and self.check_token(BlockEntryToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark) - self.state = self.parse_indentless_sequence_entry - else: - if self.check_token(ScalarToken): - token = self.get_token() - end_mark = token.end_mark - if (token.plain and tag is None) or tag == '!': - implicit = (True, False) - elif tag is None: - implicit = (False, True) - else: - implicit = (False, False) - event = ScalarEvent(anchor, tag, implicit, token.value, - start_mark, end_mark, style=token.style) - self.state = self.states.pop() - elif self.check_token(FlowSequenceStartToken): - end_mark = self.peek_token().end_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_sequence_first_entry - elif self.check_token(FlowMappingStartToken): - end_mark = self.peek_token().end_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=True) - self.state = self.parse_flow_mapping_first_key - elif block and self.check_token(BlockSequenceStartToken): - end_mark = self.peek_token().start_mark - event = SequenceStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_sequence_first_entry - elif block and self.check_token(BlockMappingStartToken): - end_mark = self.peek_token().start_mark - event = MappingStartEvent(anchor, tag, implicit, - start_mark, end_mark, flow_style=False) - self.state = self.parse_block_mapping_first_key - elif anchor is not None or tag is not None: - # Empty scalars are allowed even if a tag or an anchor is - # specified. - event = ScalarEvent(anchor, tag, (implicit, False), '', - start_mark, end_mark) - self.state = self.states.pop() - else: - if block: - node = 'block' - else: - node = 'flow' - token = self.peek_token() - raise ParserError("while parsing a %s node" % node, start_mark, - "expected the node content, but found %r" % token.id, - token.start_mark) - return event - - # block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END - - def parse_block_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_sequence_entry() - - def parse_block_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, BlockEndToken): - self.states.append(self.parse_block_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_block_sequence_entry - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block collection", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - # indentless_sequence ::= (BLOCK-ENTRY block_node?)+ - - def parse_indentless_sequence_entry(self): - if self.check_token(BlockEntryToken): - token = self.get_token() - if not self.check_token(BlockEntryToken, - KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_indentless_sequence_entry) - return self.parse_block_node() - else: - self.state = self.parse_indentless_sequence_entry - return self.process_empty_scalar(token.end_mark) - token = self.peek_token() - event = SequenceEndEvent(token.start_mark, token.start_mark) - self.state = self.states.pop() - return event - - # block_mapping ::= BLOCK-MAPPING_START - # ((KEY block_node_or_indentless_sequence?)? - # (VALUE block_node_or_indentless_sequence?)?)* - # BLOCK-END - - def parse_block_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_block_mapping_key() - - def parse_block_mapping_key(self): - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_value) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_value - return self.process_empty_scalar(token.end_mark) - if not self.check_token(BlockEndToken): - token = self.peek_token() - raise ParserError("while parsing a block mapping", self.marks[-1], - "expected , but found %r" % token.id, token.start_mark) - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_block_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(KeyToken, ValueToken, BlockEndToken): - self.states.append(self.parse_block_mapping_key) - return self.parse_block_node_or_indentless_sequence() - else: - self.state = self.parse_block_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_block_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - # flow_sequence ::= FLOW-SEQUENCE-START - # (flow_sequence_entry FLOW-ENTRY)* - # flow_sequence_entry? - # FLOW-SEQUENCE-END - # flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - # - # Note that while production rules for both flow_sequence_entry and - # flow_mapping_entry are equal, their interpretations are different. - # For `flow_sequence_entry`, the part `KEY flow_node? (VALUE flow_node?)?` - # generate an inline mapping (set syntax). - - def parse_flow_sequence_first_entry(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_sequence_entry(first=True) - - def parse_flow_sequence_entry(self, first=False): - if not self.check_token(FlowSequenceEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow sequence", self.marks[-1], - "expected ',' or ']', but got %r" % token.id, token.start_mark) - - if self.check_token(KeyToken): - token = self.peek_token() - event = MappingStartEvent(None, None, True, - token.start_mark, token.end_mark, - flow_style=True) - self.state = self.parse_flow_sequence_entry_mapping_key - return event - elif not self.check_token(FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry) - return self.parse_flow_node() - token = self.get_token() - event = SequenceEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_sequence_entry_mapping_key(self): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_value - return self.process_empty_scalar(token.end_mark) - - def parse_flow_sequence_entry_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowSequenceEndToken): - self.states.append(self.parse_flow_sequence_entry_mapping_end) - return self.parse_flow_node() - else: - self.state = self.parse_flow_sequence_entry_mapping_end - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_sequence_entry_mapping_end - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_sequence_entry_mapping_end(self): - self.state = self.parse_flow_sequence_entry - token = self.peek_token() - return MappingEndEvent(token.start_mark, token.start_mark) - - # flow_mapping ::= FLOW-MAPPING-START - # (flow_mapping_entry FLOW-ENTRY)* - # flow_mapping_entry? - # FLOW-MAPPING-END - # flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - - def parse_flow_mapping_first_key(self): - token = self.get_token() - self.marks.append(token.start_mark) - return self.parse_flow_mapping_key(first=True) - - def parse_flow_mapping_key(self, first=False): - if not self.check_token(FlowMappingEndToken): - if not first: - if self.check_token(FlowEntryToken): - self.get_token() - else: - token = self.peek_token() - raise ParserError("while parsing a flow mapping", self.marks[-1], - "expected ',' or '}', but got %r" % token.id, token.start_mark) - if self.check_token(KeyToken): - token = self.get_token() - if not self.check_token(ValueToken, - FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_value) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_value - return self.process_empty_scalar(token.end_mark) - elif not self.check_token(FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_empty_value) - return self.parse_flow_node() - token = self.get_token() - event = MappingEndEvent(token.start_mark, token.end_mark) - self.state = self.states.pop() - self.marks.pop() - return event - - def parse_flow_mapping_value(self): - if self.check_token(ValueToken): - token = self.get_token() - if not self.check_token(FlowEntryToken, FlowMappingEndToken): - self.states.append(self.parse_flow_mapping_key) - return self.parse_flow_node() - else: - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(token.end_mark) - else: - self.state = self.parse_flow_mapping_key - token = self.peek_token() - return self.process_empty_scalar(token.start_mark) - - def parse_flow_mapping_empty_value(self): - self.state = self.parse_flow_mapping_key - return self.process_empty_scalar(self.peek_token().start_mark) - - def process_empty_scalar(self, mark): - return ScalarEvent(None, None, (True, False), '', mark, mark) - diff --git a/libs/PyYAML-3.10/lib3/yaml/reader.py b/libs/PyYAML-3.10/lib3/yaml/reader.py deleted file mode 100644 index f70e920..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/reader.py +++ /dev/null @@ -1,192 +0,0 @@ -# This module contains abstractions for the input stream. You don't have to -# looks further, there are no pretty code. -# -# We define two classes here. -# -# Mark(source, line, column) -# It's just a record and its only use is producing nice error messages. -# Parser does not use it for any other purposes. -# -# Reader(source, data) -# Reader determines the encoding of `data` and converts it to unicode. -# Reader provides the following methods and attributes: -# reader.peek(length=1) - return the next `length` characters -# reader.forward(length=1) - move the current position to `length` characters. -# reader.index - the number of the current character. -# reader.line, stream.column - the line and the column of the current character. - -__all__ = ['Reader', 'ReaderError'] - -from .error import YAMLError, Mark - -import codecs, re - -class ReaderError(YAMLError): - - def __init__(self, name, position, character, encoding, reason): - self.name = name - self.character = character - self.position = position - self.encoding = encoding - self.reason = reason - - def __str__(self): - if isinstance(self.character, bytes): - return "'%s' codec can't decode byte #x%02x: %s\n" \ - " in \"%s\", position %d" \ - % (self.encoding, ord(self.character), self.reason, - self.name, self.position) - else: - return "unacceptable character #x%04x: %s\n" \ - " in \"%s\", position %d" \ - % (self.character, self.reason, - self.name, self.position) - -class Reader(object): - # Reader: - # - determines the data encoding and converts it to a unicode string, - # - checks if characters are in allowed range, - # - adds '\0' to the end. - - # Reader accepts - # - a `bytes` object, - # - a `str` object, - # - a file-like object with its `read` method returning `str`, - # - a file-like object with its `read` method returning `unicode`. - - # Yeah, it's ugly and slow. - - def __init__(self, stream): - self.name = None - self.stream = None - self.stream_pointer = 0 - self.eof = True - self.buffer = '' - self.pointer = 0 - self.raw_buffer = None - self.raw_decode = None - self.encoding = None - self.index = 0 - self.line = 0 - self.column = 0 - if isinstance(stream, str): - self.name = "" - self.check_printable(stream) - self.buffer = stream+'\0' - elif isinstance(stream, bytes): - self.name = "" - self.raw_buffer = stream - self.determine_encoding() - else: - self.stream = stream - self.name = getattr(stream, 'name', "") - self.eof = False - self.raw_buffer = None - self.determine_encoding() - - def peek(self, index=0): - try: - return self.buffer[self.pointer+index] - except IndexError: - self.update(index+1) - return self.buffer[self.pointer+index] - - def prefix(self, length=1): - if self.pointer+length >= len(self.buffer): - self.update(length) - return self.buffer[self.pointer:self.pointer+length] - - def forward(self, length=1): - if self.pointer+length+1 >= len(self.buffer): - self.update(length+1) - while length: - ch = self.buffer[self.pointer] - self.pointer += 1 - self.index += 1 - if ch in '\n\x85\u2028\u2029' \ - or (ch == '\r' and self.buffer[self.pointer] != '\n'): - self.line += 1 - self.column = 0 - elif ch != '\uFEFF': - self.column += 1 - length -= 1 - - def get_mark(self): - if self.stream is None: - return Mark(self.name, self.index, self.line, self.column, - self.buffer, self.pointer) - else: - return Mark(self.name, self.index, self.line, self.column, - None, None) - - def determine_encoding(self): - while not self.eof and (self.raw_buffer is None or len(self.raw_buffer) < 2): - self.update_raw() - if isinstance(self.raw_buffer, bytes): - if self.raw_buffer.startswith(codecs.BOM_UTF16_LE): - self.raw_decode = codecs.utf_16_le_decode - self.encoding = 'utf-16-le' - elif self.raw_buffer.startswith(codecs.BOM_UTF16_BE): - self.raw_decode = codecs.utf_16_be_decode - self.encoding = 'utf-16-be' - else: - self.raw_decode = codecs.utf_8_decode - self.encoding = 'utf-8' - self.update(1) - - NON_PRINTABLE = re.compile('[^\x09\x0A\x0D\x20-\x7E\x85\xA0-\uD7FF\uE000-\uFFFD]') - def check_printable(self, data): - match = self.NON_PRINTABLE.search(data) - if match: - character = match.group() - position = self.index+(len(self.buffer)-self.pointer)+match.start() - raise ReaderError(self.name, position, ord(character), - 'unicode', "special characters are not allowed") - - def update(self, length): - if self.raw_buffer is None: - return - self.buffer = self.buffer[self.pointer:] - self.pointer = 0 - while len(self.buffer) < length: - if not self.eof: - self.update_raw() - if self.raw_decode is not None: - try: - data, converted = self.raw_decode(self.raw_buffer, - 'strict', self.eof) - except UnicodeDecodeError as exc: - character = self.raw_buffer[exc.start] - if self.stream is not None: - position = self.stream_pointer-len(self.raw_buffer)+exc.start - else: - position = exc.start - raise ReaderError(self.name, position, character, - exc.encoding, exc.reason) - else: - data = self.raw_buffer - converted = len(data) - self.check_printable(data) - self.buffer += data - self.raw_buffer = self.raw_buffer[converted:] - if self.eof: - self.buffer += '\0' - self.raw_buffer = None - break - - def update_raw(self, size=4096): - data = self.stream.read(size) - if self.raw_buffer is None: - self.raw_buffer = data - else: - self.raw_buffer += data - self.stream_pointer += len(data) - if not data: - self.eof = True - -#try: -# import psyco -# psyco.bind(Reader) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/lib3/yaml/representer.py b/libs/PyYAML-3.10/lib3/yaml/representer.py deleted file mode 100644 index 67cd6fd..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/representer.py +++ /dev/null @@ -1,374 +0,0 @@ - -__all__ = ['BaseRepresenter', 'SafeRepresenter', 'Representer', - 'RepresenterError'] - -from .error import * -from .nodes import * - -import datetime, sys, copyreg, types, base64 - -class RepresenterError(YAMLError): - pass - -class BaseRepresenter: - - yaml_representers = {} - yaml_multi_representers = {} - - def __init__(self, default_style=None, default_flow_style=None): - self.default_style = default_style - self.default_flow_style = default_flow_style - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent(self, data): - node = self.represent_data(data) - self.serialize(node) - self.represented_objects = {} - self.object_keeper = [] - self.alias_key = None - - def represent_data(self, data): - if self.ignore_aliases(data): - self.alias_key = None - else: - self.alias_key = id(data) - if self.alias_key is not None: - if self.alias_key in self.represented_objects: - node = self.represented_objects[self.alias_key] - #if node is None: - # raise RepresenterError("recursive objects are not allowed: %r" % data) - return node - #self.represented_objects[alias_key] = None - self.object_keeper.append(data) - data_types = type(data).__mro__ - if data_types[0] in self.yaml_representers: - node = self.yaml_representers[data_types[0]](self, data) - else: - for data_type in data_types: - if data_type in self.yaml_multi_representers: - node = self.yaml_multi_representers[data_type](self, data) - break - else: - if None in self.yaml_multi_representers: - node = self.yaml_multi_representers[None](self, data) - elif None in self.yaml_representers: - node = self.yaml_representers[None](self, data) - else: - node = ScalarNode(None, str(data)) - #if alias_key is not None: - # self.represented_objects[alias_key] = node - return node - - @classmethod - def add_representer(cls, data_type, representer): - if not 'yaml_representers' in cls.__dict__: - cls.yaml_representers = cls.yaml_representers.copy() - cls.yaml_representers[data_type] = representer - - @classmethod - def add_multi_representer(cls, data_type, representer): - if not 'yaml_multi_representers' in cls.__dict__: - cls.yaml_multi_representers = cls.yaml_multi_representers.copy() - cls.yaml_multi_representers[data_type] = representer - - def represent_scalar(self, tag, value, style=None): - if style is None: - style = self.default_style - node = ScalarNode(tag, value, style=style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - return node - - def represent_sequence(self, tag, sequence, flow_style=None): - value = [] - node = SequenceNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - for item in sequence: - node_item = self.represent_data(item) - if not (isinstance(node_item, ScalarNode) and not node_item.style): - best_style = False - value.append(node_item) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def represent_mapping(self, tag, mapping, flow_style=None): - value = [] - node = MappingNode(tag, value, flow_style=flow_style) - if self.alias_key is not None: - self.represented_objects[self.alias_key] = node - best_style = True - if hasattr(mapping, 'items'): - mapping = list(mapping.items()) - try: - mapping = sorted(mapping) - except TypeError: - pass - for item_key, item_value in mapping: - node_key = self.represent_data(item_key) - node_value = self.represent_data(item_value) - if not (isinstance(node_key, ScalarNode) and not node_key.style): - best_style = False - if not (isinstance(node_value, ScalarNode) and not node_value.style): - best_style = False - value.append((node_key, node_value)) - if flow_style is None: - if self.default_flow_style is not None: - node.flow_style = self.default_flow_style - else: - node.flow_style = best_style - return node - - def ignore_aliases(self, data): - return False - -class SafeRepresenter(BaseRepresenter): - - def ignore_aliases(self, data): - if data in [None, ()]: - return True - if isinstance(data, (str, bytes, bool, int, float)): - return True - - def represent_none(self, data): - return self.represent_scalar('tag:yaml.org,2002:null', 'null') - - def represent_str(self, data): - return self.represent_scalar('tag:yaml.org,2002:str', data) - - def represent_binary(self, data): - if hasattr(base64, 'encodebytes'): - data = base64.encodebytes(data).decode('ascii') - else: - data = base64.encodestring(data).decode('ascii') - return self.represent_scalar('tag:yaml.org,2002:binary', data, style='|') - - def represent_bool(self, data): - if data: - value = 'true' - else: - value = 'false' - return self.represent_scalar('tag:yaml.org,2002:bool', value) - - def represent_int(self, data): - return self.represent_scalar('tag:yaml.org,2002:int', str(data)) - - inf_value = 1e300 - while repr(inf_value) != repr(inf_value*inf_value): - inf_value *= inf_value - - def represent_float(self, data): - if data != data or (data == 0.0 and data == 1.0): - value = '.nan' - elif data == self.inf_value: - value = '.inf' - elif data == -self.inf_value: - value = '-.inf' - else: - value = repr(data).lower() - # Note that in some cases `repr(data)` represents a float number - # without the decimal parts. For instance: - # >>> repr(1e17) - # '1e17' - # Unfortunately, this is not a valid float representation according - # to the definition of the `!!float` tag. We fix this by adding - # '.0' before the 'e' symbol. - if '.' not in value and 'e' in value: - value = value.replace('e', '.0e', 1) - return self.represent_scalar('tag:yaml.org,2002:float', value) - - def represent_list(self, data): - #pairs = (len(data) > 0 and isinstance(data, list)) - #if pairs: - # for item in data: - # if not isinstance(item, tuple) or len(item) != 2: - # pairs = False - # break - #if not pairs: - return self.represent_sequence('tag:yaml.org,2002:seq', data) - #value = [] - #for item_key, item_value in data: - # value.append(self.represent_mapping(u'tag:yaml.org,2002:map', - # [(item_key, item_value)])) - #return SequenceNode(u'tag:yaml.org,2002:pairs', value) - - def represent_dict(self, data): - return self.represent_mapping('tag:yaml.org,2002:map', data) - - def represent_set(self, data): - value = {} - for key in data: - value[key] = None - return self.represent_mapping('tag:yaml.org,2002:set', value) - - def represent_date(self, data): - value = data.isoformat() - return self.represent_scalar('tag:yaml.org,2002:timestamp', value) - - def represent_datetime(self, data): - value = data.isoformat(' ') - return self.represent_scalar('tag:yaml.org,2002:timestamp', value) - - def represent_yaml_object(self, tag, data, cls, flow_style=None): - if hasattr(data, '__getstate__'): - state = data.__getstate__() - else: - state = data.__dict__.copy() - return self.represent_mapping(tag, state, flow_style=flow_style) - - def represent_undefined(self, data): - raise RepresenterError("cannot represent an object: %s" % data) - -SafeRepresenter.add_representer(type(None), - SafeRepresenter.represent_none) - -SafeRepresenter.add_representer(str, - SafeRepresenter.represent_str) - -SafeRepresenter.add_representer(bytes, - SafeRepresenter.represent_binary) - -SafeRepresenter.add_representer(bool, - SafeRepresenter.represent_bool) - -SafeRepresenter.add_representer(int, - SafeRepresenter.represent_int) - -SafeRepresenter.add_representer(float, - SafeRepresenter.represent_float) - -SafeRepresenter.add_representer(list, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(tuple, - SafeRepresenter.represent_list) - -SafeRepresenter.add_representer(dict, - SafeRepresenter.represent_dict) - -SafeRepresenter.add_representer(set, - SafeRepresenter.represent_set) - -SafeRepresenter.add_representer(datetime.date, - SafeRepresenter.represent_date) - -SafeRepresenter.add_representer(datetime.datetime, - SafeRepresenter.represent_datetime) - -SafeRepresenter.add_representer(None, - SafeRepresenter.represent_undefined) - -class Representer(SafeRepresenter): - - def represent_complex(self, data): - if data.imag == 0.0: - data = '%r' % data.real - elif data.real == 0.0: - data = '%rj' % data.imag - elif data.imag > 0: - data = '%r+%rj' % (data.real, data.imag) - else: - data = '%r%rj' % (data.real, data.imag) - return self.represent_scalar('tag:yaml.org,2002:python/complex', data) - - def represent_tuple(self, data): - return self.represent_sequence('tag:yaml.org,2002:python/tuple', data) - - def represent_name(self, data): - name = '%s.%s' % (data.__module__, data.__name__) - return self.represent_scalar('tag:yaml.org,2002:python/name:'+name, '') - - def represent_module(self, data): - return self.represent_scalar( - 'tag:yaml.org,2002:python/module:'+data.__name__, '') - - def represent_object(self, data): - # We use __reduce__ API to save the data. data.__reduce__ returns - # a tuple of length 2-5: - # (function, args, state, listitems, dictitems) - - # For reconstructing, we calls function(*args), then set its state, - # listitems, and dictitems if they are not None. - - # A special case is when function.__name__ == '__newobj__'. In this - # case we create the object with args[0].__new__(*args). - - # Another special case is when __reduce__ returns a string - we don't - # support it. - - # We produce a !!python/object, !!python/object/new or - # !!python/object/apply node. - - cls = type(data) - if cls in copyreg.dispatch_table: - reduce = copyreg.dispatch_table[cls](data) - elif hasattr(data, '__reduce_ex__'): - reduce = data.__reduce_ex__(2) - elif hasattr(data, '__reduce__'): - reduce = data.__reduce__() - else: - raise RepresenterError("cannot represent object: %r" % data) - reduce = (list(reduce)+[None]*5)[:5] - function, args, state, listitems, dictitems = reduce - args = list(args) - if state is None: - state = {} - if listitems is not None: - listitems = list(listitems) - if dictitems is not None: - dictitems = dict(dictitems) - if function.__name__ == '__newobj__': - function = args[0] - args = args[1:] - tag = 'tag:yaml.org,2002:python/object/new:' - newobj = True - else: - tag = 'tag:yaml.org,2002:python/object/apply:' - newobj = False - function_name = '%s.%s' % (function.__module__, function.__name__) - if not args and not listitems and not dictitems \ - and isinstance(state, dict) and newobj: - return self.represent_mapping( - 'tag:yaml.org,2002:python/object:'+function_name, state) - if not listitems and not dictitems \ - and isinstance(state, dict) and not state: - return self.represent_sequence(tag+function_name, args) - value = {} - if args: - value['args'] = args - if state or not isinstance(state, dict): - value['state'] = state - if listitems: - value['listitems'] = listitems - if dictitems: - value['dictitems'] = dictitems - return self.represent_mapping(tag+function_name, value) - -Representer.add_representer(complex, - Representer.represent_complex) - -Representer.add_representer(tuple, - Representer.represent_tuple) - -Representer.add_representer(type, - Representer.represent_name) - -Representer.add_representer(types.FunctionType, - Representer.represent_name) - -Representer.add_representer(types.BuiltinFunctionType, - Representer.represent_name) - -Representer.add_representer(types.ModuleType, - Representer.represent_module) - -Representer.add_multi_representer(object, - Representer.represent_object) - diff --git a/libs/PyYAML-3.10/lib3/yaml/resolver.py b/libs/PyYAML-3.10/lib3/yaml/resolver.py deleted file mode 100644 index 0eece25..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/resolver.py +++ /dev/null @@ -1,224 +0,0 @@ - -__all__ = ['BaseResolver', 'Resolver'] - -from .error import * -from .nodes import * - -import re - -class ResolverError(YAMLError): - pass - -class BaseResolver: - - DEFAULT_SCALAR_TAG = 'tag:yaml.org,2002:str' - DEFAULT_SEQUENCE_TAG = 'tag:yaml.org,2002:seq' - DEFAULT_MAPPING_TAG = 'tag:yaml.org,2002:map' - - yaml_implicit_resolvers = {} - yaml_path_resolvers = {} - - def __init__(self): - self.resolver_exact_paths = [] - self.resolver_prefix_paths = [] - - @classmethod - def add_implicit_resolver(cls, tag, regexp, first): - if not 'yaml_implicit_resolvers' in cls.__dict__: - cls.yaml_implicit_resolvers = cls.yaml_implicit_resolvers.copy() - if first is None: - first = [None] - for ch in first: - cls.yaml_implicit_resolvers.setdefault(ch, []).append((tag, regexp)) - - @classmethod - def add_path_resolver(cls, tag, path, kind=None): - # Note: `add_path_resolver` is experimental. The API could be changed. - # `new_path` is a pattern that is matched against the path from the - # root to the node that is being considered. `node_path` elements are - # tuples `(node_check, index_check)`. `node_check` is a node class: - # `ScalarNode`, `SequenceNode`, `MappingNode` or `None`. `None` - # matches any kind of a node. `index_check` could be `None`, a boolean - # value, a string value, or a number. `None` and `False` match against - # any _value_ of sequence and mapping nodes. `True` matches against - # any _key_ of a mapping node. A string `index_check` matches against - # a mapping value that corresponds to a scalar key which content is - # equal to the `index_check` value. An integer `index_check` matches - # against a sequence value with the index equal to `index_check`. - if not 'yaml_path_resolvers' in cls.__dict__: - cls.yaml_path_resolvers = cls.yaml_path_resolvers.copy() - new_path = [] - for element in path: - if isinstance(element, (list, tuple)): - if len(element) == 2: - node_check, index_check = element - elif len(element) == 1: - node_check = element[0] - index_check = True - else: - raise ResolverError("Invalid path element: %s" % element) - else: - node_check = None - index_check = element - if node_check is str: - node_check = ScalarNode - elif node_check is list: - node_check = SequenceNode - elif node_check is dict: - node_check = MappingNode - elif node_check not in [ScalarNode, SequenceNode, MappingNode] \ - and not isinstance(node_check, str) \ - and node_check is not None: - raise ResolverError("Invalid node checker: %s" % node_check) - if not isinstance(index_check, (str, int)) \ - and index_check is not None: - raise ResolverError("Invalid index checker: %s" % index_check) - new_path.append((node_check, index_check)) - if kind is str: - kind = ScalarNode - elif kind is list: - kind = SequenceNode - elif kind is dict: - kind = MappingNode - elif kind not in [ScalarNode, SequenceNode, MappingNode] \ - and kind is not None: - raise ResolverError("Invalid node kind: %s" % kind) - cls.yaml_path_resolvers[tuple(new_path), kind] = tag - - def descend_resolver(self, current_node, current_index): - if not self.yaml_path_resolvers: - return - exact_paths = {} - prefix_paths = [] - if current_node: - depth = len(self.resolver_prefix_paths) - for path, kind in self.resolver_prefix_paths[-1]: - if self.check_resolver_prefix(depth, path, kind, - current_node, current_index): - if len(path) > depth: - prefix_paths.append((path, kind)) - else: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - for path, kind in self.yaml_path_resolvers: - if not path: - exact_paths[kind] = self.yaml_path_resolvers[path, kind] - else: - prefix_paths.append((path, kind)) - self.resolver_exact_paths.append(exact_paths) - self.resolver_prefix_paths.append(prefix_paths) - - def ascend_resolver(self): - if not self.yaml_path_resolvers: - return - self.resolver_exact_paths.pop() - self.resolver_prefix_paths.pop() - - def check_resolver_prefix(self, depth, path, kind, - current_node, current_index): - node_check, index_check = path[depth-1] - if isinstance(node_check, str): - if current_node.tag != node_check: - return - elif node_check is not None: - if not isinstance(current_node, node_check): - return - if index_check is True and current_index is not None: - return - if (index_check is False or index_check is None) \ - and current_index is None: - return - if isinstance(index_check, str): - if not (isinstance(current_index, ScalarNode) - and index_check == current_index.value): - return - elif isinstance(index_check, int) and not isinstance(index_check, bool): - if index_check != current_index: - return - return True - - def resolve(self, kind, value, implicit): - if kind is ScalarNode and implicit[0]: - if value == '': - resolvers = self.yaml_implicit_resolvers.get('', []) - else: - resolvers = self.yaml_implicit_resolvers.get(value[0], []) - resolvers += self.yaml_implicit_resolvers.get(None, []) - for tag, regexp in resolvers: - if regexp.match(value): - return tag - implicit = implicit[1] - if self.yaml_path_resolvers: - exact_paths = self.resolver_exact_paths[-1] - if kind in exact_paths: - return exact_paths[kind] - if None in exact_paths: - return exact_paths[None] - if kind is ScalarNode: - return self.DEFAULT_SCALAR_TAG - elif kind is SequenceNode: - return self.DEFAULT_SEQUENCE_TAG - elif kind is MappingNode: - return self.DEFAULT_MAPPING_TAG - -class Resolver(BaseResolver): - pass - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:bool', - re.compile(r'''^(?:yes|Yes|YES|no|No|NO - |true|True|TRUE|false|False|FALSE - |on|On|ON|off|Off|OFF)$''', re.X), - list('yYnNtTfFoO')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:float', - re.compile(r'''^(?:[-+]?(?:[0-9][0-9_]*)\.[0-9_]*(?:[eE][-+][0-9]+)? - |\.[0-9_]+(?:[eE][-+][0-9]+)? - |[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]* - |[-+]?\.(?:inf|Inf|INF) - |\.(?:nan|NaN|NAN))$''', re.X), - list('-+0123456789.')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:int', - re.compile(r'''^(?:[-+]?0b[0-1_]+ - |[-+]?0[0-7_]+ - |[-+]?(?:0|[1-9][0-9_]*) - |[-+]?0x[0-9a-fA-F_]+ - |[-+]?[1-9][0-9_]*(?::[0-5]?[0-9])+)$''', re.X), - list('-+0123456789')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:merge', - re.compile(r'^(?:<<)$'), - ['<']) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:null', - re.compile(r'''^(?: ~ - |null|Null|NULL - | )$''', re.X), - ['~', 'n', 'N', '']) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:timestamp', - re.compile(r'''^(?:[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9] - |[0-9][0-9][0-9][0-9] -[0-9][0-9]? -[0-9][0-9]? - (?:[Tt]|[ \t]+)[0-9][0-9]? - :[0-9][0-9] :[0-9][0-9] (?:\.[0-9]*)? - (?:[ \t]*(?:Z|[-+][0-9][0-9]?(?::[0-9][0-9])?))?)$''', re.X), - list('0123456789')) - -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:value', - re.compile(r'^(?:=)$'), - ['=']) - -# The following resolver is only for documentation purposes. It cannot work -# because plain scalars cannot start with '!', '&', or '*'. -Resolver.add_implicit_resolver( - 'tag:yaml.org,2002:yaml', - re.compile(r'^(?:!|&|\*)$'), - list('!&*')) - diff --git a/libs/PyYAML-3.10/lib3/yaml/scanner.py b/libs/PyYAML-3.10/lib3/yaml/scanner.py deleted file mode 100644 index 494d975..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/scanner.py +++ /dev/null @@ -1,1448 +0,0 @@ - -# Scanner produces tokens of the following types: -# STREAM-START -# STREAM-END -# DIRECTIVE(name, value) -# DOCUMENT-START -# DOCUMENT-END -# BLOCK-SEQUENCE-START -# BLOCK-MAPPING-START -# BLOCK-END -# FLOW-SEQUENCE-START -# FLOW-MAPPING-START -# FLOW-SEQUENCE-END -# FLOW-MAPPING-END -# BLOCK-ENTRY -# FLOW-ENTRY -# KEY -# VALUE -# ALIAS(value) -# ANCHOR(value) -# TAG(value) -# SCALAR(value, plain, style) -# -# Read comments in the Scanner code for more details. -# - -__all__ = ['Scanner', 'ScannerError'] - -from .error import MarkedYAMLError -from .tokens import * - -class ScannerError(MarkedYAMLError): - pass - -class SimpleKey: - # See below simple keys treatment. - - def __init__(self, token_number, required, index, line, column, mark): - self.token_number = token_number - self.required = required - self.index = index - self.line = line - self.column = column - self.mark = mark - -class Scanner: - - def __init__(self): - """Initialize the scanner.""" - # It is assumed that Scanner and Reader will have a common descendant. - # Reader do the dirty work of checking for BOM and converting the - # input data to Unicode. It also adds NUL to the end. - # - # Reader supports the following methods - # self.peek(i=0) # peek the next i-th character - # self.prefix(l=1) # peek the next l characters - # self.forward(l=1) # read the next l characters and move the pointer. - - # Had we reached the end of the stream? - self.done = False - - # The number of unclosed '{' and '['. `flow_level == 0` means block - # context. - self.flow_level = 0 - - # List of processed tokens that are not yet emitted. - self.tokens = [] - - # Add the STREAM-START token. - self.fetch_stream_start() - - # Number of tokens that were emitted through the `get_token` method. - self.tokens_taken = 0 - - # The current indentation level. - self.indent = -1 - - # Past indentation levels. - self.indents = [] - - # Variables related to simple keys treatment. - - # A simple key is a key that is not denoted by the '?' indicator. - # Example of simple keys: - # --- - # block simple key: value - # ? not a simple key: - # : { flow simple key: value } - # We emit the KEY token before all keys, so when we find a potential - # simple key, we try to locate the corresponding ':' indicator. - # Simple keys should be limited to a single line and 1024 characters. - - # Can a simple key start at the current position? A simple key may - # start: - # - at the beginning of the line, not counting indentation spaces - # (in block context), - # - after '{', '[', ',' (in the flow context), - # - after '?', ':', '-' (in the block context). - # In the block context, this flag also signifies if a block collection - # may start at the current position. - self.allow_simple_key = True - - # Keep track of possible simple keys. This is a dictionary. The key - # is `flow_level`; there can be no more that one possible simple key - # for each level. The value is a SimpleKey record: - # (token_number, required, index, line, column, mark) - # A simple key may start with ALIAS, ANCHOR, TAG, SCALAR(flow), - # '[', or '{' tokens. - self.possible_simple_keys = {} - - # Public methods. - - def check_token(self, *choices): - # Check if the next token is one of the given types. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - # Return the next token, but do not delete if from the queue. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - return self.tokens[0] - - def get_token(self): - # Return the next token. - while self.need_more_tokens(): - self.fetch_more_tokens() - if self.tokens: - self.tokens_taken += 1 - return self.tokens.pop(0) - - # Private methods. - - def need_more_tokens(self): - if self.done: - return False - if not self.tokens: - return True - # The current token may be a potential simple key, so we - # need to look further. - self.stale_possible_simple_keys() - if self.next_possible_simple_key() == self.tokens_taken: - return True - - def fetch_more_tokens(self): - - # Eat whitespaces and comments until we reach the next token. - self.scan_to_next_token() - - # Remove obsolete possible simple keys. - self.stale_possible_simple_keys() - - # Compare the current indentation and column. It may add some tokens - # and decrease the current indentation level. - self.unwind_indent(self.column) - - # Peek the next character. - ch = self.peek() - - # Is it the end of stream? - if ch == '\0': - return self.fetch_stream_end() - - # Is it a directive? - if ch == '%' and self.check_directive(): - return self.fetch_directive() - - # Is it the document start? - if ch == '-' and self.check_document_start(): - return self.fetch_document_start() - - # Is it the document end? - if ch == '.' and self.check_document_end(): - return self.fetch_document_end() - - # TODO: support for BOM within a stream. - #if ch == '\uFEFF': - # return self.fetch_bom() <-- issue BOMToken - - # Note: the order of the following checks is NOT significant. - - # Is it the flow sequence start indicator? - if ch == '[': - return self.fetch_flow_sequence_start() - - # Is it the flow mapping start indicator? - if ch == '{': - return self.fetch_flow_mapping_start() - - # Is it the flow sequence end indicator? - if ch == ']': - return self.fetch_flow_sequence_end() - - # Is it the flow mapping end indicator? - if ch == '}': - return self.fetch_flow_mapping_end() - - # Is it the flow entry indicator? - if ch == ',': - return self.fetch_flow_entry() - - # Is it the block entry indicator? - if ch == '-' and self.check_block_entry(): - return self.fetch_block_entry() - - # Is it the key indicator? - if ch == '?' and self.check_key(): - return self.fetch_key() - - # Is it the value indicator? - if ch == ':' and self.check_value(): - return self.fetch_value() - - # Is it an alias? - if ch == '*': - return self.fetch_alias() - - # Is it an anchor? - if ch == '&': - return self.fetch_anchor() - - # Is it a tag? - if ch == '!': - return self.fetch_tag() - - # Is it a literal scalar? - if ch == '|' and not self.flow_level: - return self.fetch_literal() - - # Is it a folded scalar? - if ch == '>' and not self.flow_level: - return self.fetch_folded() - - # Is it a single quoted scalar? - if ch == '\'': - return self.fetch_single() - - # Is it a double quoted scalar? - if ch == '\"': - return self.fetch_double() - - # It must be a plain scalar then. - if self.check_plain(): - return self.fetch_plain() - - # No? It's an error. Let's produce a nice error message. - raise ScannerError("while scanning for the next token", None, - "found character %r that cannot start any token" % ch, - self.get_mark()) - - # Simple keys treatment. - - def next_possible_simple_key(self): - # Return the number of the nearest possible simple key. Actually we - # don't need to loop through the whole dictionary. We may replace it - # with the following code: - # if not self.possible_simple_keys: - # return None - # return self.possible_simple_keys[ - # min(self.possible_simple_keys.keys())].token_number - min_token_number = None - for level in self.possible_simple_keys: - key = self.possible_simple_keys[level] - if min_token_number is None or key.token_number < min_token_number: - min_token_number = key.token_number - return min_token_number - - def stale_possible_simple_keys(self): - # Remove entries that are no longer possible simple keys. According to - # the YAML specification, simple keys - # - should be limited to a single line, - # - should be no longer than 1024 characters. - # Disabling this procedure will allow simple keys of any length and - # height (may cause problems if indentation is broken though). - for level in list(self.possible_simple_keys): - key = self.possible_simple_keys[level] - if key.line != self.line \ - or self.index-key.index > 1024: - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - del self.possible_simple_keys[level] - - def save_possible_simple_key(self): - # The next token may start a simple key. We check if it's possible - # and save its position. This function is called for - # ALIAS, ANCHOR, TAG, SCALAR(flow), '[', and '{'. - - # Check if a simple key is required at the current position. - required = not self.flow_level and self.indent == self.column - - # A simple key is required only if it is the first token in the current - # line. Therefore it is always allowed. - assert self.allow_simple_key or not required - - # The next token might be a simple key. Let's save it's number and - # position. - if self.allow_simple_key: - self.remove_possible_simple_key() - token_number = self.tokens_taken+len(self.tokens) - key = SimpleKey(token_number, required, - self.index, self.line, self.column, self.get_mark()) - self.possible_simple_keys[self.flow_level] = key - - def remove_possible_simple_key(self): - # Remove the saved possible key position at the current flow level. - if self.flow_level in self.possible_simple_keys: - key = self.possible_simple_keys[self.flow_level] - - if key.required: - raise ScannerError("while scanning a simple key", key.mark, - "could not found expected ':'", self.get_mark()) - - del self.possible_simple_keys[self.flow_level] - - # Indentation functions. - - def unwind_indent(self, column): - - ## In flow context, tokens should respect indentation. - ## Actually the condition should be `self.indent >= column` according to - ## the spec. But this condition will prohibit intuitively correct - ## constructions such as - ## key : { - ## } - #if self.flow_level and self.indent > column: - # raise ScannerError(None, None, - # "invalid intendation or unclosed '[' or '{'", - # self.get_mark()) - - # In the flow context, indentation is ignored. We make the scanner less - # restrictive then specification requires. - if self.flow_level: - return - - # In block context, we may need to issue the BLOCK-END tokens. - while self.indent > column: - mark = self.get_mark() - self.indent = self.indents.pop() - self.tokens.append(BlockEndToken(mark, mark)) - - def add_indent(self, column): - # Check if we need to increase indentation. - if self.indent < column: - self.indents.append(self.indent) - self.indent = column - return True - return False - - # Fetchers. - - def fetch_stream_start(self): - # We always add STREAM-START as the first token and STREAM-END as the - # last token. - - # Read the token. - mark = self.get_mark() - - # Add STREAM-START. - self.tokens.append(StreamStartToken(mark, mark, - encoding=self.encoding)) - - - def fetch_stream_end(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - self.possible_simple_keys = {} - - # Read the token. - mark = self.get_mark() - - # Add STREAM-END. - self.tokens.append(StreamEndToken(mark, mark)) - - # The steam is finished. - self.done = True - - def fetch_directive(self): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Scan and add DIRECTIVE. - self.tokens.append(self.scan_directive()) - - def fetch_document_start(self): - self.fetch_document_indicator(DocumentStartToken) - - def fetch_document_end(self): - self.fetch_document_indicator(DocumentEndToken) - - def fetch_document_indicator(self, TokenClass): - - # Set the current intendation to -1. - self.unwind_indent(-1) - - # Reset simple keys. Note that there could not be a block collection - # after '---'. - self.remove_possible_simple_key() - self.allow_simple_key = False - - # Add DOCUMENT-START or DOCUMENT-END. - start_mark = self.get_mark() - self.forward(3) - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_start(self): - self.fetch_flow_collection_start(FlowSequenceStartToken) - - def fetch_flow_mapping_start(self): - self.fetch_flow_collection_start(FlowMappingStartToken) - - def fetch_flow_collection_start(self, TokenClass): - - # '[' and '{' may start a simple key. - self.save_possible_simple_key() - - # Increase the flow level. - self.flow_level += 1 - - # Simple keys are allowed after '[' and '{'. - self.allow_simple_key = True - - # Add FLOW-SEQUENCE-START or FLOW-MAPPING-START. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_sequence_end(self): - self.fetch_flow_collection_end(FlowSequenceEndToken) - - def fetch_flow_mapping_end(self): - self.fetch_flow_collection_end(FlowMappingEndToken) - - def fetch_flow_collection_end(self, TokenClass): - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Decrease the flow level. - self.flow_level -= 1 - - # No simple keys after ']' or '}'. - self.allow_simple_key = False - - # Add FLOW-SEQUENCE-END or FLOW-MAPPING-END. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(TokenClass(start_mark, end_mark)) - - def fetch_flow_entry(self): - - # Simple keys are allowed after ','. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add FLOW-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(FlowEntryToken(start_mark, end_mark)) - - def fetch_block_entry(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a new entry? - if not self.allow_simple_key: - raise ScannerError(None, None, - "sequence entries are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-SEQUENCE-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockSequenceStartToken(mark, mark)) - - # It's an error for the block entry to occur in the flow context, - # but we let the parser detect this. - else: - pass - - # Simple keys are allowed after '-'. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add BLOCK-ENTRY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(BlockEntryToken(start_mark, end_mark)) - - def fetch_key(self): - - # Block context needs additional checks. - if not self.flow_level: - - # Are we allowed to start a key (not nessesary a simple)? - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping keys are not allowed here", - self.get_mark()) - - # We may need to add BLOCK-MAPPING-START. - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after '?' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add KEY. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(KeyToken(start_mark, end_mark)) - - def fetch_value(self): - - # Do we determine a simple key? - if self.flow_level in self.possible_simple_keys: - - # Add KEY. - key = self.possible_simple_keys[self.flow_level] - del self.possible_simple_keys[self.flow_level] - self.tokens.insert(key.token_number-self.tokens_taken, - KeyToken(key.mark, key.mark)) - - # If this key starts a new block mapping, we need to add - # BLOCK-MAPPING-START. - if not self.flow_level: - if self.add_indent(key.column): - self.tokens.insert(key.token_number-self.tokens_taken, - BlockMappingStartToken(key.mark, key.mark)) - - # There cannot be two simple keys one after another. - self.allow_simple_key = False - - # It must be a part of a complex key. - else: - - # Block context needs additional checks. - # (Do we really need them? They will be catched by the parser - # anyway.) - if not self.flow_level: - - # We are allowed to start a complex value if and only if - # we can start a simple key. - if not self.allow_simple_key: - raise ScannerError(None, None, - "mapping values are not allowed here", - self.get_mark()) - - # If this value starts a new block mapping, we need to add - # BLOCK-MAPPING-START. It will be detected as an error later by - # the parser. - if not self.flow_level: - if self.add_indent(self.column): - mark = self.get_mark() - self.tokens.append(BlockMappingStartToken(mark, mark)) - - # Simple keys are allowed after ':' in the block context. - self.allow_simple_key = not self.flow_level - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Add VALUE. - start_mark = self.get_mark() - self.forward() - end_mark = self.get_mark() - self.tokens.append(ValueToken(start_mark, end_mark)) - - def fetch_alias(self): - - # ALIAS could be a simple key. - self.save_possible_simple_key() - - # No simple keys after ALIAS. - self.allow_simple_key = False - - # Scan and add ALIAS. - self.tokens.append(self.scan_anchor(AliasToken)) - - def fetch_anchor(self): - - # ANCHOR could start a simple key. - self.save_possible_simple_key() - - # No simple keys after ANCHOR. - self.allow_simple_key = False - - # Scan and add ANCHOR. - self.tokens.append(self.scan_anchor(AnchorToken)) - - def fetch_tag(self): - - # TAG could start a simple key. - self.save_possible_simple_key() - - # No simple keys after TAG. - self.allow_simple_key = False - - # Scan and add TAG. - self.tokens.append(self.scan_tag()) - - def fetch_literal(self): - self.fetch_block_scalar(style='|') - - def fetch_folded(self): - self.fetch_block_scalar(style='>') - - def fetch_block_scalar(self, style): - - # A simple key may follow a block scalar. - self.allow_simple_key = True - - # Reset possible simple key on the current level. - self.remove_possible_simple_key() - - # Scan and add SCALAR. - self.tokens.append(self.scan_block_scalar(style)) - - def fetch_single(self): - self.fetch_flow_scalar(style='\'') - - def fetch_double(self): - self.fetch_flow_scalar(style='"') - - def fetch_flow_scalar(self, style): - - # A flow scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after flow scalars. - self.allow_simple_key = False - - # Scan and add SCALAR. - self.tokens.append(self.scan_flow_scalar(style)) - - def fetch_plain(self): - - # A plain scalar could be a simple key. - self.save_possible_simple_key() - - # No simple keys after plain scalars. But note that `scan_plain` will - # change this flag if the scan is finished at the beginning of the - # line. - self.allow_simple_key = False - - # Scan and add SCALAR. May change `allow_simple_key`. - self.tokens.append(self.scan_plain()) - - # Checkers. - - def check_directive(self): - - # DIRECTIVE: ^ '%' ... - # The '%' indicator is already checked. - if self.column == 0: - return True - - def check_document_start(self): - - # DOCUMENT-START: ^ '---' (' '|'\n') - if self.column == 0: - if self.prefix(3) == '---' \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return True - - def check_document_end(self): - - # DOCUMENT-END: ^ '...' (' '|'\n') - if self.column == 0: - if self.prefix(3) == '...' \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return True - - def check_block_entry(self): - - # BLOCK-ENTRY: '-' (' '|'\n') - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_key(self): - - # KEY(flow context): '?' - if self.flow_level: - return True - - # KEY(block context): '?' (' '|'\n') - else: - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_value(self): - - # VALUE(flow context): ':' - if self.flow_level: - return True - - # VALUE(block context): ':' (' '|'\n') - else: - return self.peek(1) in '\0 \t\r\n\x85\u2028\u2029' - - def check_plain(self): - - # A plain scalar may start with any non-space character except: - # '-', '?', ':', ',', '[', ']', '{', '}', - # '#', '&', '*', '!', '|', '>', '\'', '\"', - # '%', '@', '`'. - # - # It may also start with - # '-', '?', ':' - # if it is followed by a non-space character. - # - # Note that we limit the last rule to the block context (except the - # '-' character) because we want the flow context to be space - # independent. - ch = self.peek() - return ch not in '\0 \t\r\n\x85\u2028\u2029-?:,[]{}#&*!|>\'\"%@`' \ - or (self.peek(1) not in '\0 \t\r\n\x85\u2028\u2029' - and (ch == '-' or (not self.flow_level and ch in '?:'))) - - # Scanners. - - def scan_to_next_token(self): - # We ignore spaces, line breaks and comments. - # If we find a line break in the block context, we set the flag - # `allow_simple_key` on. - # The byte order mark is stripped if it's the first character in the - # stream. We do not yet support BOM inside the stream as the - # specification requires. Any such mark will be considered as a part - # of the document. - # - # TODO: We need to make tab handling rules more sane. A good rule is - # Tabs cannot precede tokens - # BLOCK-SEQUENCE-START, BLOCK-MAPPING-START, BLOCK-END, - # KEY(block), VALUE(block), BLOCK-ENTRY - # So the checking code is - # if : - # self.allow_simple_keys = False - # We also need to add the check for `allow_simple_keys == True` to - # `unwind_indent` before issuing BLOCK-END. - # Scanners for block, flow, and plain scalars need to be modified. - - if self.index == 0 and self.peek() == '\uFEFF': - self.forward() - found = False - while not found: - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - if self.scan_line_break(): - if not self.flow_level: - self.allow_simple_key = True - else: - found = True - - def scan_directive(self): - # See the specification for details. - start_mark = self.get_mark() - self.forward() - name = self.scan_directive_name(start_mark) - value = None - if name == 'YAML': - value = self.scan_yaml_directive_value(start_mark) - end_mark = self.get_mark() - elif name == 'TAG': - value = self.scan_tag_directive_value(start_mark) - end_mark = self.get_mark() - else: - end_mark = self.get_mark() - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - self.scan_directive_ignored_line(start_mark) - return DirectiveToken(name, value, start_mark, end_mark) - - def scan_directive_name(self, start_mark): - # See the specification for details. - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - return value - - def scan_yaml_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - major = self.scan_yaml_directive_number(start_mark) - if self.peek() != '.': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or '.', but found %r" % self.peek(), - self.get_mark()) - self.forward() - minor = self.scan_yaml_directive_number(start_mark) - if self.peek() not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a digit or ' ', but found %r" % self.peek(), - self.get_mark()) - return (major, minor) - - def scan_yaml_directive_number(self, start_mark): - # See the specification for details. - ch = self.peek() - if not ('0' <= ch <= '9'): - raise ScannerError("while scanning a directive", start_mark, - "expected a digit, but found %r" % ch, self.get_mark()) - length = 0 - while '0' <= self.peek(length) <= '9': - length += 1 - value = int(self.prefix(length)) - self.forward(length) - return value - - def scan_tag_directive_value(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - handle = self.scan_tag_directive_handle(start_mark) - while self.peek() == ' ': - self.forward() - prefix = self.scan_tag_directive_prefix(start_mark) - return (handle, prefix) - - def scan_tag_directive_handle(self, start_mark): - # See the specification for details. - value = self.scan_tag_handle('directive', start_mark) - ch = self.peek() - if ch != ' ': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - return value - - def scan_tag_directive_prefix(self, start_mark): - # See the specification for details. - value = self.scan_tag_uri('directive', start_mark) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - return value - - def scan_directive_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in '\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a directive", start_mark, - "expected a comment or a line break, but found %r" - % ch, self.get_mark()) - self.scan_line_break() - - def scan_anchor(self, TokenClass): - # The specification does not restrict characters for anchors and - # aliases. This may lead to problems, for instance, the document: - # [ *alias, value ] - # can be interpteted in two ways, as - # [ "value" ] - # and - # [ *alias , "value" ] - # Therefore we restrict aliases to numbers and ASCII letters. - start_mark = self.get_mark() - indicator = self.peek() - if indicator == '*': - name = 'alias' - else: - name = 'anchor' - self.forward() - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if not length: - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - value = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch not in '\0 \t\r\n\x85\u2028\u2029?:,]}%@`': - raise ScannerError("while scanning an %s" % name, start_mark, - "expected alphabetic or numeric character, but found %r" - % ch, self.get_mark()) - end_mark = self.get_mark() - return TokenClass(value, start_mark, end_mark) - - def scan_tag(self): - # See the specification for details. - start_mark = self.get_mark() - ch = self.peek(1) - if ch == '<': - handle = None - self.forward(2) - suffix = self.scan_tag_uri('tag', start_mark) - if self.peek() != '>': - raise ScannerError("while parsing a tag", start_mark, - "expected '>', but found %r" % self.peek(), - self.get_mark()) - self.forward() - elif ch in '\0 \t\r\n\x85\u2028\u2029': - handle = None - suffix = '!' - self.forward() - else: - length = 1 - use_handle = False - while ch not in '\0 \r\n\x85\u2028\u2029': - if ch == '!': - use_handle = True - break - length += 1 - ch = self.peek(length) - handle = '!' - if use_handle: - handle = self.scan_tag_handle('tag', start_mark) - else: - handle = '!' - self.forward() - suffix = self.scan_tag_uri('tag', start_mark) - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a tag", start_mark, - "expected ' ', but found %r" % ch, self.get_mark()) - value = (handle, suffix) - end_mark = self.get_mark() - return TagToken(value, start_mark, end_mark) - - def scan_block_scalar(self, style): - # See the specification for details. - - if style == '>': - folded = True - else: - folded = False - - chunks = [] - start_mark = self.get_mark() - - # Scan the header. - self.forward() - chomping, increment = self.scan_block_scalar_indicators(start_mark) - self.scan_block_scalar_ignored_line(start_mark) - - # Determine the indentation level and go to the first non-empty line. - min_indent = self.indent+1 - if min_indent < 1: - min_indent = 1 - if increment is None: - breaks, max_indent, end_mark = self.scan_block_scalar_indentation() - indent = max(min_indent, max_indent) - else: - indent = min_indent+increment-1 - breaks, end_mark = self.scan_block_scalar_breaks(indent) - line_break = '' - - # Scan the inner part of the block scalar. - while self.column == indent and self.peek() != '\0': - chunks.extend(breaks) - leading_non_space = self.peek() not in ' \t' - length = 0 - while self.peek(length) not in '\0\r\n\x85\u2028\u2029': - length += 1 - chunks.append(self.prefix(length)) - self.forward(length) - line_break = self.scan_line_break() - breaks, end_mark = self.scan_block_scalar_breaks(indent) - if self.column == indent and self.peek() != '\0': - - # Unfortunately, folding rules are ambiguous. - # - # This is the folding according to the specification: - - if folded and line_break == '\n' \ - and leading_non_space and self.peek() not in ' \t': - if not breaks: - chunks.append(' ') - else: - chunks.append(line_break) - - # This is Clark Evans's interpretation (also in the spec - # examples): - # - #if folded and line_break == '\n': - # if not breaks: - # if self.peek() not in ' \t': - # chunks.append(' ') - # else: - # chunks.append(line_break) - #else: - # chunks.append(line_break) - else: - break - - # Chomp the tail. - if chomping is not False: - chunks.append(line_break) - if chomping is True: - chunks.extend(breaks) - - # We are done. - return ScalarToken(''.join(chunks), False, start_mark, end_mark, - style) - - def scan_block_scalar_indicators(self, start_mark): - # See the specification for details. - chomping = None - increment = None - ch = self.peek() - if ch in '+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch in '0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - elif ch in '0123456789': - increment = int(ch) - if increment == 0: - raise ScannerError("while scanning a block scalar", start_mark, - "expected indentation indicator in the range 1-9, but found 0", - self.get_mark()) - self.forward() - ch = self.peek() - if ch in '+-': - if ch == '+': - chomping = True - else: - chomping = False - self.forward() - ch = self.peek() - if ch not in '\0 \r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected chomping or indentation indicators, but found %r" - % ch, self.get_mark()) - return chomping, increment - - def scan_block_scalar_ignored_line(self, start_mark): - # See the specification for details. - while self.peek() == ' ': - self.forward() - if self.peek() == '#': - while self.peek() not in '\0\r\n\x85\u2028\u2029': - self.forward() - ch = self.peek() - if ch not in '\0\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a block scalar", start_mark, - "expected a comment or a line break, but found %r" % ch, - self.get_mark()) - self.scan_line_break() - - def scan_block_scalar_indentation(self): - # See the specification for details. - chunks = [] - max_indent = 0 - end_mark = self.get_mark() - while self.peek() in ' \r\n\x85\u2028\u2029': - if self.peek() != ' ': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - else: - self.forward() - if self.column > max_indent: - max_indent = self.column - return chunks, max_indent, end_mark - - def scan_block_scalar_breaks(self, indent): - # See the specification for details. - chunks = [] - end_mark = self.get_mark() - while self.column < indent and self.peek() == ' ': - self.forward() - while self.peek() in '\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - end_mark = self.get_mark() - while self.column < indent and self.peek() == ' ': - self.forward() - return chunks, end_mark - - def scan_flow_scalar(self, style): - # See the specification for details. - # Note that we loose indentation rules for quoted scalars. Quoted - # scalars don't need to adhere indentation because " and ' clearly - # mark the beginning and the end of them. Therefore we are less - # restrictive then the specification requires. We only need to check - # that document separators are not included in scalars. - if style == '"': - double = True - else: - double = False - chunks = [] - start_mark = self.get_mark() - quote = self.peek() - self.forward() - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - while self.peek() != quote: - chunks.extend(self.scan_flow_scalar_spaces(double, start_mark)) - chunks.extend(self.scan_flow_scalar_non_spaces(double, start_mark)) - self.forward() - end_mark = self.get_mark() - return ScalarToken(''.join(chunks), False, start_mark, end_mark, - style) - - ESCAPE_REPLACEMENTS = { - '0': '\0', - 'a': '\x07', - 'b': '\x08', - 't': '\x09', - '\t': '\x09', - 'n': '\x0A', - 'v': '\x0B', - 'f': '\x0C', - 'r': '\x0D', - 'e': '\x1B', - ' ': '\x20', - '\"': '\"', - '\\': '\\', - 'N': '\x85', - '_': '\xA0', - 'L': '\u2028', - 'P': '\u2029', - } - - ESCAPE_CODES = { - 'x': 2, - 'u': 4, - 'U': 8, - } - - def scan_flow_scalar_non_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - length = 0 - while self.peek(length) not in '\'\"\\\0 \t\r\n\x85\u2028\u2029': - length += 1 - if length: - chunks.append(self.prefix(length)) - self.forward(length) - ch = self.peek() - if not double and ch == '\'' and self.peek(1) == '\'': - chunks.append('\'') - self.forward(2) - elif (double and ch == '\'') or (not double and ch in '\"\\'): - chunks.append(ch) - self.forward() - elif double and ch == '\\': - self.forward() - ch = self.peek() - if ch in self.ESCAPE_REPLACEMENTS: - chunks.append(self.ESCAPE_REPLACEMENTS[ch]) - self.forward() - elif ch in self.ESCAPE_CODES: - length = self.ESCAPE_CODES[ch] - self.forward() - for k in range(length): - if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "expected escape sequence of %d hexdecimal numbers, but found %r" % - (length, self.peek(k)), self.get_mark()) - code = int(self.prefix(length), 16) - chunks.append(chr(code)) - self.forward(length) - elif ch in '\r\n\x85\u2028\u2029': - self.scan_line_break() - chunks.extend(self.scan_flow_scalar_breaks(double, start_mark)) - else: - raise ScannerError("while scanning a double-quoted scalar", start_mark, - "found unknown escape character %r" % ch, self.get_mark()) - else: - return chunks - - def scan_flow_scalar_spaces(self, double, start_mark): - # See the specification for details. - chunks = [] - length = 0 - while self.peek(length) in ' \t': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch == '\0': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected end of stream", self.get_mark()) - elif ch in '\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - breaks = self.scan_flow_scalar_breaks(double, start_mark) - if line_break != '\n': - chunks.append(line_break) - elif not breaks: - chunks.append(' ') - chunks.extend(breaks) - else: - chunks.append(whitespaces) - return chunks - - def scan_flow_scalar_breaks(self, double, start_mark): - # See the specification for details. - chunks = [] - while True: - # Instead of checking indentation, we check for document - # separators. - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - raise ScannerError("while scanning a quoted scalar", start_mark, - "found unexpected document separator", self.get_mark()) - while self.peek() in ' \t': - self.forward() - if self.peek() in '\r\n\x85\u2028\u2029': - chunks.append(self.scan_line_break()) - else: - return chunks - - def scan_plain(self): - # See the specification for details. - # We add an additional restriction for the flow context: - # plain scalars in the flow context cannot contain ',', ':' and '?'. - # We also keep track of the `allow_simple_key` flag here. - # Indentation rules are loosed for the flow context. - chunks = [] - start_mark = self.get_mark() - end_mark = start_mark - indent = self.indent+1 - # We allow zero indentation for scalars, but then we need to check for - # document separators at the beginning of the line. - #if indent == 0: - # indent = 1 - spaces = [] - while True: - length = 0 - if self.peek() == '#': - break - while True: - ch = self.peek(length) - if ch in '\0 \t\r\n\x85\u2028\u2029' \ - or (not self.flow_level and ch == ':' and - self.peek(length+1) in '\0 \t\r\n\x85\u2028\u2029') \ - or (self.flow_level and ch in ',:?[]{}'): - break - length += 1 - # It's not clear what we should do with ':' in the flow context. - if (self.flow_level and ch == ':' - and self.peek(length+1) not in '\0 \t\r\n\x85\u2028\u2029,[]{}'): - self.forward(length) - raise ScannerError("while scanning a plain scalar", start_mark, - "found unexpected ':'", self.get_mark(), - "Please check http://pyyaml.org/wiki/YAMLColonInFlowContext for details.") - if length == 0: - break - self.allow_simple_key = False - chunks.extend(spaces) - chunks.append(self.prefix(length)) - self.forward(length) - end_mark = self.get_mark() - spaces = self.scan_plain_spaces(indent, start_mark) - if not spaces or self.peek() == '#' \ - or (not self.flow_level and self.column < indent): - break - return ScalarToken(''.join(chunks), True, start_mark, end_mark) - - def scan_plain_spaces(self, indent, start_mark): - # See the specification for details. - # The specification is really confusing about tabs in plain scalars. - # We just forbid them completely. Do not use tabs in YAML! - chunks = [] - length = 0 - while self.peek(length) in ' ': - length += 1 - whitespaces = self.prefix(length) - self.forward(length) - ch = self.peek() - if ch in '\r\n\x85\u2028\u2029': - line_break = self.scan_line_break() - self.allow_simple_key = True - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return - breaks = [] - while self.peek() in ' \r\n\x85\u2028\u2029': - if self.peek() == ' ': - self.forward() - else: - breaks.append(self.scan_line_break()) - prefix = self.prefix(3) - if (prefix == '---' or prefix == '...') \ - and self.peek(3) in '\0 \t\r\n\x85\u2028\u2029': - return - if line_break != '\n': - chunks.append(line_break) - elif not breaks: - chunks.append(' ') - chunks.extend(breaks) - elif whitespaces: - chunks.append(whitespaces) - return chunks - - def scan_tag_handle(self, name, start_mark): - # See the specification for details. - # For some strange reasons, the specification does not allow '_' in - # tag handles. I have allowed it anyway. - ch = self.peek() - if ch != '!': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch, self.get_mark()) - length = 1 - ch = self.peek(length) - if ch != ' ': - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-_': - length += 1 - ch = self.peek(length) - if ch != '!': - self.forward(length) - raise ScannerError("while scanning a %s" % name, start_mark, - "expected '!', but found %r" % ch, self.get_mark()) - length += 1 - value = self.prefix(length) - self.forward(length) - return value - - def scan_tag_uri(self, name, start_mark): - # See the specification for details. - # Note: we do not check if URI is well-formed. - chunks = [] - length = 0 - ch = self.peek(length) - while '0' <= ch <= '9' or 'A' <= ch <= 'Z' or 'a' <= ch <= 'z' \ - or ch in '-;/?:@&=+$,_.!~*\'()[]%': - if ch == '%': - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - chunks.append(self.scan_uri_escapes(name, start_mark)) - else: - length += 1 - ch = self.peek(length) - if length: - chunks.append(self.prefix(length)) - self.forward(length) - length = 0 - if not chunks: - raise ScannerError("while parsing a %s" % name, start_mark, - "expected URI, but found %r" % ch, self.get_mark()) - return ''.join(chunks) - - def scan_uri_escapes(self, name, start_mark): - # See the specification for details. - codes = [] - mark = self.get_mark() - while self.peek() == '%': - self.forward() - for k in range(2): - if self.peek(k) not in '0123456789ABCDEFabcdef': - raise ScannerError("while scanning a %s" % name, start_mark, - "expected URI escape sequence of 2 hexdecimal numbers, but found %r" - % self.peek(k), self.get_mark()) - codes.append(int(self.prefix(2), 16)) - self.forward(2) - try: - value = bytes(codes).decode('utf-8') - except UnicodeDecodeError as exc: - raise ScannerError("while scanning a %s" % name, start_mark, str(exc), mark) - return value - - def scan_line_break(self): - # Transforms: - # '\r\n' : '\n' - # '\r' : '\n' - # '\n' : '\n' - # '\x85' : '\n' - # '\u2028' : '\u2028' - # '\u2029 : '\u2029' - # default : '' - ch = self.peek() - if ch in '\r\n\x85': - if self.prefix(2) == '\r\n': - self.forward(2) - else: - self.forward() - return '\n' - elif ch in '\u2028\u2029': - self.forward() - return ch - return '' - -#try: -# import psyco -# psyco.bind(Scanner) -#except ImportError: -# pass - diff --git a/libs/PyYAML-3.10/lib3/yaml/serializer.py b/libs/PyYAML-3.10/lib3/yaml/serializer.py deleted file mode 100644 index fe911e6..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/serializer.py +++ /dev/null @@ -1,111 +0,0 @@ - -__all__ = ['Serializer', 'SerializerError'] - -from .error import YAMLError -from .events import * -from .nodes import * - -class SerializerError(YAMLError): - pass - -class Serializer: - - ANCHOR_TEMPLATE = 'id%03d' - - def __init__(self, encoding=None, - explicit_start=None, explicit_end=None, version=None, tags=None): - self.use_encoding = encoding - self.use_explicit_start = explicit_start - self.use_explicit_end = explicit_end - self.use_version = version - self.use_tags = tags - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - self.closed = None - - def open(self): - if self.closed is None: - self.emit(StreamStartEvent(encoding=self.use_encoding)) - self.closed = False - elif self.closed: - raise SerializerError("serializer is closed") - else: - raise SerializerError("serializer is already opened") - - def close(self): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif not self.closed: - self.emit(StreamEndEvent()) - self.closed = True - - #def __del__(self): - # self.close() - - def serialize(self, node): - if self.closed is None: - raise SerializerError("serializer is not opened") - elif self.closed: - raise SerializerError("serializer is closed") - self.emit(DocumentStartEvent(explicit=self.use_explicit_start, - version=self.use_version, tags=self.use_tags)) - self.anchor_node(node) - self.serialize_node(node, None, None) - self.emit(DocumentEndEvent(explicit=self.use_explicit_end)) - self.serialized_nodes = {} - self.anchors = {} - self.last_anchor_id = 0 - - def anchor_node(self, node): - if node in self.anchors: - if self.anchors[node] is None: - self.anchors[node] = self.generate_anchor(node) - else: - self.anchors[node] = None - if isinstance(node, SequenceNode): - for item in node.value: - self.anchor_node(item) - elif isinstance(node, MappingNode): - for key, value in node.value: - self.anchor_node(key) - self.anchor_node(value) - - def generate_anchor(self, node): - self.last_anchor_id += 1 - return self.ANCHOR_TEMPLATE % self.last_anchor_id - - def serialize_node(self, node, parent, index): - alias = self.anchors[node] - if node in self.serialized_nodes: - self.emit(AliasEvent(alias)) - else: - self.serialized_nodes[node] = True - self.descend_resolver(parent, index) - if isinstance(node, ScalarNode): - detected_tag = self.resolve(ScalarNode, node.value, (True, False)) - default_tag = self.resolve(ScalarNode, node.value, (False, True)) - implicit = (node.tag == detected_tag), (node.tag == default_tag) - self.emit(ScalarEvent(alias, node.tag, implicit, node.value, - style=node.style)) - elif isinstance(node, SequenceNode): - implicit = (node.tag - == self.resolve(SequenceNode, node.value, True)) - self.emit(SequenceStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - index = 0 - for item in node.value: - self.serialize_node(item, node, index) - index += 1 - self.emit(SequenceEndEvent()) - elif isinstance(node, MappingNode): - implicit = (node.tag - == self.resolve(MappingNode, node.value, True)) - self.emit(MappingStartEvent(alias, node.tag, implicit, - flow_style=node.flow_style)) - for key, value in node.value: - self.serialize_node(key, node, None) - self.serialize_node(value, node, key) - self.emit(MappingEndEvent()) - self.ascend_resolver() - diff --git a/libs/PyYAML-3.10/lib3/yaml/tokens.py b/libs/PyYAML-3.10/lib3/yaml/tokens.py deleted file mode 100644 index 4d0b48a..0000000 --- a/libs/PyYAML-3.10/lib3/yaml/tokens.py +++ /dev/null @@ -1,104 +0,0 @@ - -class Token(object): - def __init__(self, start_mark, end_mark): - self.start_mark = start_mark - self.end_mark = end_mark - def __repr__(self): - attributes = [key for key in self.__dict__ - if not key.endswith('_mark')] - attributes.sort() - arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) - for key in attributes]) - return '%s(%s)' % (self.__class__.__name__, arguments) - -#class BOMToken(Token): -# id = '' - -class DirectiveToken(Token): - id = '' - def __init__(self, name, value, start_mark, end_mark): - self.name = name - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class DocumentStartToken(Token): - id = '' - -class DocumentEndToken(Token): - id = '' - -class StreamStartToken(Token): - id = '' - def __init__(self, start_mark=None, end_mark=None, - encoding=None): - self.start_mark = start_mark - self.end_mark = end_mark - self.encoding = encoding - -class StreamEndToken(Token): - id = '' - -class BlockSequenceStartToken(Token): - id = '' - -class BlockMappingStartToken(Token): - id = '' - -class BlockEndToken(Token): - id = '' - -class FlowSequenceStartToken(Token): - id = '[' - -class FlowMappingStartToken(Token): - id = '{' - -class FlowSequenceEndToken(Token): - id = ']' - -class FlowMappingEndToken(Token): - id = '}' - -class KeyToken(Token): - id = '?' - -class ValueToken(Token): - id = ':' - -class BlockEntryToken(Token): - id = '-' - -class FlowEntryToken(Token): - id = ',' - -class AliasToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class AnchorToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class TagToken(Token): - id = '' - def __init__(self, value, start_mark, end_mark): - self.value = value - self.start_mark = start_mark - self.end_mark = end_mark - -class ScalarToken(Token): - id = '' - def __init__(self, value, plain, start_mark, end_mark, style=None): - self.value = value - self.plain = plain - self.start_mark = start_mark - self.end_mark = end_mark - self.style = style - diff --git a/libs/PyYAML-3.10/setup.cfg b/libs/PyYAML-3.10/setup.cfg deleted file mode 100644 index 0444d87..0000000 --- a/libs/PyYAML-3.10/setup.cfg +++ /dev/null @@ -1,35 +0,0 @@ - -# The INCLUDE and LIB directories to build the '_yaml' extension. -# You may also set them using the options '-I' and '-L'. -[build_ext] - -# List of directories to search for 'yaml.h' (separated by ':'). -#include_dirs=/usr/local/include:../../include - -# List of directories to search for 'libyaml.a' (separated by ':'). -#library_dirs=/usr/local/lib:../../lib - -# An alternative compiler to build the extention. -#compiler=mingw32 - -# Additional preprocessor definitions might be required. -#define=YAML_DECLARE_STATIC - -# The following options are used to build PyYAML Windows installer -# for Python 2.3 on my PC: -#include_dirs=../../../libyaml/tags/0.1.4/include -#library_dirs=../../../libyaml/tags/0.1.4/win32/vc6/output/release/lib -#define=YAML_DECLARE_STATIC - -# The following options are used to build PyYAML Windows installer -# for Python 2.4 and Python 2.5 on my PC: -#include_dirs=../../../libyaml/tags/0.1.4/include -#library_dirs=../../../libyaml/tags/0.1.4/win32/vs2003/output/release/lib -#define=YAML_DECLARE_STATIC - -# The following options are used to build PyYAML Windows installer -# for Python 2.6, 2.7, 3.0, 3.1 and 3.2 on my PC: -#include_dirs=../../../libyaml/tags/0.1.4/include -#library_dirs=../../../libyaml/tags/0.1.4/win32/vs2008/output/release/lib -#define=YAML_DECLARE_STATIC - diff --git a/libs/PyYAML-3.10/setup.py b/libs/PyYAML-3.10/setup.py deleted file mode 100644 index 1bda151..0000000 --- a/libs/PyYAML-3.10/setup.py +++ /dev/null @@ -1,347 +0,0 @@ - -NAME = 'PyYAML' -VERSION = '3.10' -DESCRIPTION = "YAML parser and emitter for Python" -LONG_DESCRIPTION = """\ -YAML is a data serialization format designed for human readability -and interaction with scripting languages. PyYAML is a YAML parser -and emitter for Python. - -PyYAML features a complete YAML 1.1 parser, Unicode support, pickle -support, capable extension API, and sensible error messages. PyYAML -supports standard YAML tags and provides Python-specific tags that -allow to represent an arbitrary Python object. - -PyYAML is applicable for a broad range of tasks from complex -configuration files to object serialization and persistance.""" -AUTHOR = "Kirill Simonov" -AUTHOR_EMAIL = 'xi@resolvent.net' -LICENSE = "MIT" -PLATFORMS = "Any" -URL = "http://pyyaml.org/wiki/PyYAML" -DOWNLOAD_URL = "http://pyyaml.org/download/pyyaml/%s-%s.tar.gz" % (NAME, VERSION) -CLASSIFIERS = [ - "Development Status :: 5 - Production/Stable", - "Intended Audience :: Developers", - "License :: OSI Approved :: MIT License", - "Operating System :: OS Independent", - "Programming Language :: Python", - "Programming Language :: Python :: 2", - "Programming Language :: Python :: 2.3", - "Programming Language :: Python :: 2.4", - "Programming Language :: Python :: 2.5", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.0", - "Programming Language :: Python :: 3.1", - "Programming Language :: Python :: 3.2", - "Topic :: Software Development :: Libraries :: Python Modules", - "Topic :: Text Processing :: Markup", -] - - -LIBYAML_CHECK = """ -#include - -int main(void) { - yaml_parser_t parser; - yaml_emitter_t emitter; - - yaml_parser_initialize(&parser); - yaml_parser_delete(&parser); - - yaml_emitter_initialize(&emitter); - yaml_emitter_delete(&emitter); - - return 0; -} -""" - - -import sys, os.path - -from distutils import log -from distutils.core import setup, Command -from distutils.core import Distribution as _Distribution -from distutils.core import Extension as _Extension -from distutils.dir_util import mkpath -from distutils.command.build_ext import build_ext as _build_ext -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -from distutils.errors import CompileError, LinkError, DistutilsPlatformError - -if 'setuptools.extension' in sys.modules: - _Extension = sys.modules['setuptools.extension']._Extension - sys.modules['distutils.core'].Extension = _Extension - sys.modules['distutils.extension'].Extension = _Extension - sys.modules['distutils.command.build_ext'].Extension = _Extension - -with_pyrex = None -if sys.version_info[0] < 3: - try: - from Cython.Distutils.extension import Extension as _Extension - from Cython.Distutils import build_ext as _build_ext - with_pyrex = 'cython' - except ImportError: - try: - # Pyrex cannot build _yaml.c at the moment, - # but it may get fixed eventually. - from Pyrex.Distutils import Extension as _Extension - from Pyrex.Distutils import build_ext as _build_ext - with_pyrex = 'pyrex' - except ImportError: - pass - - -class Distribution(_Distribution): - - def __init__(self, attrs=None): - _Distribution.__init__(self, attrs) - if not self.ext_modules: - return - for idx in range(len(self.ext_modules)-1, -1, -1): - ext = self.ext_modules[idx] - if not isinstance(ext, Extension): - continue - setattr(self, ext.attr_name, None) - self.global_options = [ - (ext.option_name, None, - "include %s (default if %s is available)" - % (ext.feature_description, ext.feature_name)), - (ext.neg_option_name, None, - "exclude %s" % ext.feature_description), - ] + self.global_options - self.negative_opt = self.negative_opt.copy() - self.negative_opt[ext.neg_option_name] = ext.option_name - - def has_ext_modules(self): - if not self.ext_modules: - return False - for ext in self.ext_modules: - with_ext = self.ext_status(ext) - if with_ext is None or with_ext: - return True - return False - - def ext_status(self, ext): - if 'Java' in sys.version or 'IronPython' in sys.version or 'PyPy' in sys.version: - return False - if isinstance(ext, Extension): - with_ext = getattr(self, ext.attr_name) - return with_ext - else: - return True - - -class Extension(_Extension): - - def __init__(self, name, sources, feature_name, feature_description, - feature_check, **kwds): - if not with_pyrex: - for filename in sources[:]: - base, ext = os.path.splitext(filename) - if ext == '.pyx': - sources.remove(filename) - sources.append('%s.c' % base) - _Extension.__init__(self, name, sources, **kwds) - self.feature_name = feature_name - self.feature_description = feature_description - self.feature_check = feature_check - self.attr_name = 'with_' + feature_name.replace('-', '_') - self.option_name = 'with-' + feature_name - self.neg_option_name = 'without-' + feature_name - - -class build_ext(_build_ext): - - def run(self): - optional = True - disabled = True - for ext in self.extensions: - with_ext = self.distribution.ext_status(ext) - if with_ext is None: - disabled = False - elif with_ext: - optional = False - disabled = False - break - if disabled: - return - try: - _build_ext.run(self) - except DistutilsPlatformError: - exc = sys.exc_info()[1] - if optional: - log.warn(str(exc)) - log.warn("skipping build_ext") - else: - raise - - def get_source_files(self): - self.check_extensions_list(self.extensions) - filenames = [] - for ext in self.extensions: - if with_pyrex == 'pyrex': - self.pyrex_sources(ext.sources, ext) - elif with_pyrex == 'cython': - self.cython_sources(ext.sources, ext) - for filename in ext.sources: - filenames.append(filename) - base = os.path.splitext(filename)[0] - for ext in ['c', 'h', 'pyx', 'pxd']: - filename = '%s.%s' % (base, ext) - if filename not in filenames and os.path.isfile(filename): - filenames.append(filename) - return filenames - - def get_outputs(self): - self.check_extensions_list(self.extensions) - outputs = [] - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = os.path.join(self.build_lib, - self.get_ext_filename(fullname)) - if os.path.isfile(filename): - outputs.append(filename) - return outputs - - def build_extensions(self): - self.check_extensions_list(self.extensions) - for ext in self.extensions: - with_ext = self.distribution.ext_status(ext) - if with_ext is None: - with_ext = self.check_extension_availability(ext) - if not with_ext: - continue - if with_pyrex == 'pyrex': - ext.sources = self.pyrex_sources(ext.sources, ext) - elif with_pyrex == 'cython': - ext.sources = self.cython_sources(ext.sources, ext) - self.build_extension(ext) - - def check_extension_availability(self, ext): - cache = os.path.join(self.build_temp, 'check_%s.out' % ext.feature_name) - if not self.force and os.path.isfile(cache): - data = open(cache).read().strip() - if data == '1': - return True - elif data == '0': - return False - mkpath(self.build_temp) - src = os.path.join(self.build_temp, 'check_%s.c' % ext.feature_name) - open(src, 'w').write(ext.feature_check) - log.info("checking if %s is compilable" % ext.feature_name) - try: - [obj] = self.compiler.compile([src], - macros=ext.define_macros+[(undef,) for undef in ext.undef_macros], - include_dirs=ext.include_dirs, - extra_postargs=(ext.extra_compile_args or []), - depends=ext.depends) - except CompileError: - log.warn("") - log.warn("%s is not found or a compiler error: forcing --%s" - % (ext.feature_name, ext.neg_option_name)) - log.warn("(if %s is installed correctly, you may need to" - % ext.feature_name) - log.warn(" specify the option --include-dirs or uncomment and") - log.warn(" modify the parameter include_dirs in setup.cfg)") - open(cache, 'w').write('0\n') - return False - prog = 'check_%s' % ext.feature_name - log.info("checking if %s is linkable" % ext.feature_name) - try: - self.compiler.link_executable([obj], prog, - output_dir=self.build_temp, - libraries=ext.libraries, - library_dirs=ext.library_dirs, - runtime_library_dirs=ext.runtime_library_dirs, - extra_postargs=(ext.extra_link_args or [])) - except LinkError: - log.warn("") - log.warn("%s is not found or a linker error: forcing --%s" - % (ext.feature_name, ext.neg_option_name)) - log.warn("(if %s is installed correctly, you may need to" - % ext.feature_name) - log.warn(" specify the option --library-dirs or uncomment and") - log.warn(" modify the parameter library_dirs in setup.cfg)") - open(cache, 'w').write('0\n') - return False - open(cache, 'w').write('1\n') - return True - - -class bdist_rpm(_bdist_rpm): - - def _make_spec_file(self): - argv0 = sys.argv[0] - features = [] - for ext in self.distribution.ext_modules: - if not isinstance(ext, Extension): - continue - with_ext = getattr(self.distribution, ext.attr_name) - if with_ext is None: - continue - if with_ext: - features.append('--'+ext.option_name) - else: - features.append('--'+ext.neg_option_name) - sys.argv[0] = ' '.join([argv0]+features) - spec_file = _bdist_rpm._make_spec_file(self) - sys.argv[0] = argv0 - return spec_file - - -class test(Command): - - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - build_cmd = self.get_finalized_command('build') - build_cmd.run() - sys.path.insert(0, build_cmd.build_lib) - if sys.version_info[0] < 3: - sys.path.insert(0, 'tests/lib') - else: - sys.path.insert(0, 'tests/lib3') - import test_all - test_all.main([]) - - -if __name__ == '__main__': - - setup( - name=NAME, - version=VERSION, - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - author=AUTHOR, - author_email=AUTHOR_EMAIL, - license=LICENSE, - platforms=PLATFORMS, - url=URL, - download_url=DOWNLOAD_URL, - classifiers=CLASSIFIERS, - - package_dir={'': {2: 'lib', 3: 'lib3'}[sys.version_info[0]]}, - packages=['yaml'], - ext_modules=[ - Extension('_yaml', ['ext/_yaml.pyx'], - 'libyaml', "LibYAML bindings", LIBYAML_CHECK, - libraries=['yaml']), - ], - - distclass=Distribution, - - cmdclass={ - 'build_ext': build_ext, - 'bdist_rpm': bdist_rpm, - 'test': test, - }, - ) - diff --git a/libs/PyYAML-3.10/tests/data/a-nasty-libyaml-bug.loader-error b/libs/PyYAML-3.10/tests/data/a-nasty-libyaml-bug.loader-error deleted file mode 100644 index f97d49f..0000000 --- a/libs/PyYAML-3.10/tests/data/a-nasty-libyaml-bug.loader-error +++ /dev/null @@ -1 +0,0 @@ -[ [ \ No newline at end of file diff --git a/libs/PyYAML-3.10/tests/data/aliases-cdumper-bug.code b/libs/PyYAML-3.10/tests/data/aliases-cdumper-bug.code deleted file mode 100644 index 0168441..0000000 --- a/libs/PyYAML-3.10/tests/data/aliases-cdumper-bug.code +++ /dev/null @@ -1 +0,0 @@ -[ today, today ] diff --git a/libs/PyYAML-3.10/tests/data/aliases.events b/libs/PyYAML-3.10/tests/data/aliases.events deleted file mode 100644 index 9139b51..0000000 --- a/libs/PyYAML-3.10/tests/data/aliases.events +++ /dev/null @@ -1,8 +0,0 @@ -- !StreamStart -- !DocumentStart -- !SequenceStart -- !Scalar { anchor: 'myanchor', tag: '!mytag', value: 'data' } -- !Alias { anchor: 'myanchor' } -- !SequenceEnd -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/bool.data b/libs/PyYAML-3.10/tests/data/bool.data deleted file mode 100644 index 0988b63..0000000 --- a/libs/PyYAML-3.10/tests/data/bool.data +++ /dev/null @@ -1,4 +0,0 @@ -- yes -- NO -- True -- on diff --git a/libs/PyYAML-3.10/tests/data/bool.detect b/libs/PyYAML-3.10/tests/data/bool.detect deleted file mode 100644 index 947ebbb..0000000 --- a/libs/PyYAML-3.10/tests/data/bool.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:bool diff --git a/libs/PyYAML-3.10/tests/data/colon-in-flow-context.loader-error b/libs/PyYAML-3.10/tests/data/colon-in-flow-context.loader-error deleted file mode 100644 index 13d5087..0000000 --- a/libs/PyYAML-3.10/tests/data/colon-in-flow-context.loader-error +++ /dev/null @@ -1 +0,0 @@ -{ foo:bar } diff --git a/libs/PyYAML-3.10/tests/data/construct-binary-py2.code b/libs/PyYAML-3.10/tests/data/construct-binary-py2.code deleted file mode 100644 index 67ac0d5..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-binary-py2.code +++ /dev/null @@ -1,7 +0,0 @@ -{ - "canonical": - "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;", - "generic": - "GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;", - "description": "The binary value above is a tiny arrow encoded as a gif image.", -} diff --git a/libs/PyYAML-3.10/tests/data/construct-binary-py2.data b/libs/PyYAML-3.10/tests/data/construct-binary-py2.data deleted file mode 100644 index dcdb16f..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-binary-py2.data +++ /dev/null @@ -1,12 +0,0 @@ -canonical: !!binary "\ - R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\ - OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\ - +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\ - AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=" -generic: !!binary | - R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5 - OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+ - +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC - AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs= -description: - The binary value above is a tiny arrow encoded as a gif image. diff --git a/libs/PyYAML-3.10/tests/data/construct-binary-py3.code b/libs/PyYAML-3.10/tests/data/construct-binary-py3.code deleted file mode 100644 index 30bfc3f..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-binary-py3.code +++ /dev/null @@ -1,7 +0,0 @@ -{ - "canonical": - b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;", - "generic": - b"GIF89a\x0c\x00\x0c\x00\x84\x00\x00\xff\xff\xf7\xf5\xf5\xee\xe9\xe9\xe5fff\x00\x00\x00\xe7\xe7\xe7^^^\xf3\xf3\xed\x8e\x8e\x8e\xe0\xe0\xe0\x9f\x9f\x9f\x93\x93\x93\xa7\xa7\xa7\x9e\x9e\x9eiiiccc\xa3\xa3\xa3\x84\x84\x84\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9\xff\xfe\xf9!\xfe\x0eMade with GIMP\x00,\x00\x00\x00\x00\x0c\x00\x0c\x00\x00\x05, \x8e\x810\x9e\xe3@\x14\xe8i\x10\xc4\xd1\x8a\x08\x1c\xcf\x80M$z\xef\xff0\x85p\xb8\xb01f\r\x1b\xce\x01\xc3\x01\x1e\x10' \x82\n\x01\x00;", - "description": "The binary value above is a tiny arrow encoded as a gif image.", -} diff --git a/libs/PyYAML-3.10/tests/data/construct-binary-py3.data b/libs/PyYAML-3.10/tests/data/construct-binary-py3.data deleted file mode 100644 index dcdb16f..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-binary-py3.data +++ /dev/null @@ -1,12 +0,0 @@ -canonical: !!binary "\ - R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\ - OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\ - +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\ - AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=" -generic: !!binary | - R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5 - OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+ - +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC - AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs= -description: - The binary value above is a tiny arrow encoded as a gif image. diff --git a/libs/PyYAML-3.10/tests/data/construct-bool.code b/libs/PyYAML-3.10/tests/data/construct-bool.code deleted file mode 100644 index 3d02580..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-bool.code +++ /dev/null @@ -1,7 +0,0 @@ -{ - "canonical": True, - "answer": False, - "logical": True, - "option": True, - "but": { "y": "is a string", "n": "is a string" }, -} diff --git a/libs/PyYAML-3.10/tests/data/construct-bool.data b/libs/PyYAML-3.10/tests/data/construct-bool.data deleted file mode 100644 index 36d6519..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-bool.data +++ /dev/null @@ -1,9 +0,0 @@ -canonical: yes -answer: NO -logical: True -option: on - - -but: - y: is a string - n: is a string diff --git a/libs/PyYAML-3.10/tests/data/construct-custom.code b/libs/PyYAML-3.10/tests/data/construct-custom.code deleted file mode 100644 index 2d5f063..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-custom.code +++ /dev/null @@ -1,10 +0,0 @@ -[ - MyTestClass1(x=1), - MyTestClass1(x=1, y=2, z=3), - MyTestClass2(x=10), - MyTestClass2(x=10, y=20, z=30), - MyTestClass3(x=1), - MyTestClass3(x=1, y=2, z=3), - MyTestClass3(x=1, y=2, z=3), - YAMLObject1(my_parameter='foo', my_another_parameter=[1,2,3]) -] diff --git a/libs/PyYAML-3.10/tests/data/construct-custom.data b/libs/PyYAML-3.10/tests/data/construct-custom.data deleted file mode 100644 index 9db0f64..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-custom.data +++ /dev/null @@ -1,26 +0,0 @@ ---- -- !tag1 - x: 1 -- !tag1 - x: 1 - 'y': 2 - z: 3 -- !tag2 - 10 -- !tag2 - =: 10 - 'y': 20 - z: 30 -- !tag3 - x: 1 -- !tag3 - x: 1 - 'y': 2 - z: 3 -- !tag3 - =: 1 - 'y': 2 - z: 3 -- !foo - my-parameter: foo - my-another-parameter: [1,2,3] diff --git a/libs/PyYAML-3.10/tests/data/construct-float.code b/libs/PyYAML-3.10/tests/data/construct-float.code deleted file mode 100644 index 8493bf2..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-float.code +++ /dev/null @@ -1,8 +0,0 @@ -{ - "canonical": 685230.15, - "exponential": 685230.15, - "fixed": 685230.15, - "sexagesimal": 685230.15, - "negative infinity": -1e300000, - "not a number": 1e300000/1e300000, -} diff --git a/libs/PyYAML-3.10/tests/data/construct-float.data b/libs/PyYAML-3.10/tests/data/construct-float.data deleted file mode 100644 index b662c62..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-float.data +++ /dev/null @@ -1,6 +0,0 @@ -canonical: 6.8523015e+5 -exponential: 685.230_15e+03 -fixed: 685_230.15 -sexagesimal: 190:20:30.15 -negative infinity: -.inf -not a number: .NaN diff --git a/libs/PyYAML-3.10/tests/data/construct-int.code b/libs/PyYAML-3.10/tests/data/construct-int.code deleted file mode 100644 index 1058f7b..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-int.code +++ /dev/null @@ -1,8 +0,0 @@ -{ - "canonical": 685230, - "decimal": 685230, - "octal": 685230, - "hexadecimal": 685230, - "binary": 685230, - "sexagesimal": 685230, -} diff --git a/libs/PyYAML-3.10/tests/data/construct-int.data b/libs/PyYAML-3.10/tests/data/construct-int.data deleted file mode 100644 index 852c314..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-int.data +++ /dev/null @@ -1,6 +0,0 @@ -canonical: 685230 -decimal: +685_230 -octal: 02472256 -hexadecimal: 0x_0A_74_AE -binary: 0b1010_0111_0100_1010_1110 -sexagesimal: 190:20:30 diff --git a/libs/PyYAML-3.10/tests/data/construct-map.code b/libs/PyYAML-3.10/tests/data/construct-map.code deleted file mode 100644 index 736ba48..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-map.code +++ /dev/null @@ -1,6 +0,0 @@ -{ - "Block style": - { "Clark" : "Evans", "Brian" : "Ingerson", "Oren" : "Ben-Kiki" }, - "Flow style": - { "Clark" : "Evans", "Brian" : "Ingerson", "Oren" : "Ben-Kiki" }, -} diff --git a/libs/PyYAML-3.10/tests/data/construct-map.data b/libs/PyYAML-3.10/tests/data/construct-map.data deleted file mode 100644 index 022446d..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-map.data +++ /dev/null @@ -1,6 +0,0 @@ -# Unordered set of key: value pairs. -Block style: !!map - Clark : Evans - Brian : Ingerson - Oren : Ben-Kiki -Flow style: !!map { Clark: Evans, Brian: Ingerson, Oren: Ben-Kiki } diff --git a/libs/PyYAML-3.10/tests/data/construct-merge.code b/libs/PyYAML-3.10/tests/data/construct-merge.code deleted file mode 100644 index 6cd419d..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-merge.code +++ /dev/null @@ -1,10 +0,0 @@ -[ - { "x": 1, "y": 2 }, - { "x": 0, "y": 2 }, - { "r": 10 }, - { "r": 1 }, - { "x": 1, "y": 2, "r": 10, "label": "center/big" }, - { "x": 1, "y": 2, "r": 10, "label": "center/big" }, - { "x": 1, "y": 2, "r": 10, "label": "center/big" }, - { "x": 1, "y": 2, "r": 10, "label": "center/big" }, -] diff --git a/libs/PyYAML-3.10/tests/data/construct-merge.data b/libs/PyYAML-3.10/tests/data/construct-merge.data deleted file mode 100644 index 3fdb2e2..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-merge.data +++ /dev/null @@ -1,27 +0,0 @@ ---- -- &CENTER { x: 1, 'y': 2 } -- &LEFT { x: 0, 'y': 2 } -- &BIG { r: 10 } -- &SMALL { r: 1 } - -# All the following maps are equal: - -- # Explicit keys - x: 1 - 'y': 2 - r: 10 - label: center/big - -- # Merge one map - << : *CENTER - r: 10 - label: center/big - -- # Merge multiple maps - << : [ *CENTER, *BIG ] - label: center/big - -- # Override - << : [ *BIG, *LEFT, *SMALL ] - x: 1 - label: center/big diff --git a/libs/PyYAML-3.10/tests/data/construct-null.code b/libs/PyYAML-3.10/tests/data/construct-null.code deleted file mode 100644 index a895eaa..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-null.code +++ /dev/null @@ -1,13 +0,0 @@ -[ - None, - { "empty": None, "canonical": None, "english": None, None: "null key" }, - { - "sparse": [ - None, - "2nd entry", - None, - "4th entry", - None, - ], - }, -] diff --git a/libs/PyYAML-3.10/tests/data/construct-null.data b/libs/PyYAML-3.10/tests/data/construct-null.data deleted file mode 100644 index 9ad0344..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-null.data +++ /dev/null @@ -1,18 +0,0 @@ -# A document may be null. ---- ---- -# This mapping has four keys, -# one has a value. -empty: -canonical: ~ -english: null -~: null key ---- -# This sequence has five -# entries, two have values. -sparse: - - ~ - - 2nd entry - - - - 4th entry - - Null diff --git a/libs/PyYAML-3.10/tests/data/construct-omap.code b/libs/PyYAML-3.10/tests/data/construct-omap.code deleted file mode 100644 index f4cf1b8..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-omap.code +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Bestiary": [ - ("aardvark", "African pig-like ant eater. Ugly."), - ("anteater", "South-American ant eater. Two species."), - ("anaconda", "South-American constrictor snake. Scaly."), - ], - "Numbers": [ ("one", 1), ("two", 2), ("three", 3) ], -} diff --git a/libs/PyYAML-3.10/tests/data/construct-omap.data b/libs/PyYAML-3.10/tests/data/construct-omap.data deleted file mode 100644 index 4fa0f45..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-omap.data +++ /dev/null @@ -1,8 +0,0 @@ -# Explicitly typed ordered map (dictionary). -Bestiary: !!omap - - aardvark: African pig-like ant eater. Ugly. - - anteater: South-American ant eater. Two species. - - anaconda: South-American constrictor snake. Scaly. - # Etc. -# Flow style -Numbers: !!omap [ one: 1, two: 2, three : 3 ] diff --git a/libs/PyYAML-3.10/tests/data/construct-pairs.code b/libs/PyYAML-3.10/tests/data/construct-pairs.code deleted file mode 100644 index 64f86ee..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-pairs.code +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Block tasks": [ - ("meeting", "with team."), - ("meeting", "with boss."), - ("break", "lunch."), - ("meeting", "with client."), - ], - "Flow tasks": [ ("meeting", "with team"), ("meeting", "with boss") ], -} diff --git a/libs/PyYAML-3.10/tests/data/construct-pairs.data b/libs/PyYAML-3.10/tests/data/construct-pairs.data deleted file mode 100644 index 05f55b9..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-pairs.data +++ /dev/null @@ -1,7 +0,0 @@ -# Explicitly typed pairs. -Block tasks: !!pairs - - meeting: with team. - - meeting: with boss. - - break: lunch. - - meeting: with client. -Flow tasks: !!pairs [ meeting: with team, meeting: with boss ] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-bool.code b/libs/PyYAML-3.10/tests/data/construct-python-bool.code deleted file mode 100644 index 170da01..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-bool.code +++ /dev/null @@ -1 +0,0 @@ -[ True, False ] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-bool.data b/libs/PyYAML-3.10/tests/data/construct-python-bool.data deleted file mode 100644 index 0068869..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-bool.data +++ /dev/null @@ -1 +0,0 @@ -[ !!python/bool True, !!python/bool False ] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-bytes-py3.code b/libs/PyYAML-3.10/tests/data/construct-python-bytes-py3.code deleted file mode 100644 index b9051d8..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-bytes-py3.code +++ /dev/null @@ -1 +0,0 @@ -b'some binary data' diff --git a/libs/PyYAML-3.10/tests/data/construct-python-bytes-py3.data b/libs/PyYAML-3.10/tests/data/construct-python-bytes-py3.data deleted file mode 100644 index 9528725..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-bytes-py3.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/bytes 'c29tZSBiaW5hcnkgZGF0YQ==' diff --git a/libs/PyYAML-3.10/tests/data/construct-python-complex.code b/libs/PyYAML-3.10/tests/data/construct-python-complex.code deleted file mode 100644 index e582dff..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-complex.code +++ /dev/null @@ -1 +0,0 @@ -[0.5+0j, 0.5+0.5j, 0.5j, -0.5+0.5j, -0.5+0j, -0.5-0.5j, -0.5j, 0.5-0.5j] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-complex.data b/libs/PyYAML-3.10/tests/data/construct-python-complex.data deleted file mode 100644 index 17ebad4..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-complex.data +++ /dev/null @@ -1,8 +0,0 @@ -- !!python/complex 0.5+0j -- !!python/complex 0.5+0.5j -- !!python/complex 0.5j -- !!python/complex -0.5+0.5j -- !!python/complex -0.5+0j -- !!python/complex -0.5-0.5j -- !!python/complex -0.5j -- !!python/complex 0.5-0.5j diff --git a/libs/PyYAML-3.10/tests/data/construct-python-float.code b/libs/PyYAML-3.10/tests/data/construct-python-float.code deleted file mode 100644 index d5910a0..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-float.code +++ /dev/null @@ -1 +0,0 @@ -123.456 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-float.data b/libs/PyYAML-3.10/tests/data/construct-python-float.data deleted file mode 100644 index b460eb8..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-float.data +++ /dev/null @@ -1 +0,0 @@ -!!python/float 123.456 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-int.code b/libs/PyYAML-3.10/tests/data/construct-python-int.code deleted file mode 100644 index 190a180..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-int.code +++ /dev/null @@ -1 +0,0 @@ -123 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-int.data b/libs/PyYAML-3.10/tests/data/construct-python-int.data deleted file mode 100644 index 741d669..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-int.data +++ /dev/null @@ -1 +0,0 @@ -!!python/int 123 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py2.code b/libs/PyYAML-3.10/tests/data/construct-python-long-short-py2.code deleted file mode 100644 index fafc3f1..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py2.code +++ /dev/null @@ -1 +0,0 @@ -123L diff --git a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py2.data b/libs/PyYAML-3.10/tests/data/construct-python-long-short-py2.data deleted file mode 100644 index 4bd5dc2..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py2.data +++ /dev/null @@ -1 +0,0 @@ -!!python/long 123 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py3.code b/libs/PyYAML-3.10/tests/data/construct-python-long-short-py3.code deleted file mode 100644 index 190a180..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py3.code +++ /dev/null @@ -1 +0,0 @@ -123 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py3.data b/libs/PyYAML-3.10/tests/data/construct-python-long-short-py3.data deleted file mode 100644 index 4bd5dc2..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-long-short-py3.data +++ /dev/null @@ -1 +0,0 @@ -!!python/long 123 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-name-module.code b/libs/PyYAML-3.10/tests/data/construct-python-name-module.code deleted file mode 100644 index 6f39148..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-name-module.code +++ /dev/null @@ -1 +0,0 @@ -[str, yaml.Loader, yaml.dump, abs, yaml.tokens] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-name-module.data b/libs/PyYAML-3.10/tests/data/construct-python-name-module.data deleted file mode 100644 index f0c9712..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-name-module.data +++ /dev/null @@ -1,5 +0,0 @@ -- !!python/name:str -- !!python/name:yaml.Loader -- !!python/name:yaml.dump -- !!python/name:abs -- !!python/module:yaml.tokens diff --git a/libs/PyYAML-3.10/tests/data/construct-python-none.code b/libs/PyYAML-3.10/tests/data/construct-python-none.code deleted file mode 100644 index b0047fa..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-none.code +++ /dev/null @@ -1 +0,0 @@ -None diff --git a/libs/PyYAML-3.10/tests/data/construct-python-none.data b/libs/PyYAML-3.10/tests/data/construct-python-none.data deleted file mode 100644 index 7907ec3..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-none.data +++ /dev/null @@ -1 +0,0 @@ -!!python/none diff --git a/libs/PyYAML-3.10/tests/data/construct-python-object.code b/libs/PyYAML-3.10/tests/data/construct-python-object.code deleted file mode 100644 index 7f1edf1..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-object.code +++ /dev/null @@ -1,23 +0,0 @@ -[ -AnObject(1, 'two', [3,3,3]), -AnInstance(1, 'two', [3,3,3]), - -AnObject(1, 'two', [3,3,3]), -AnInstance(1, 'two', [3,3,3]), - -AState(1, 'two', [3,3,3]), -ACustomState(1, 'two', [3,3,3]), - -InitArgs(1, 'two', [3,3,3]), -InitArgsWithState(1, 'two', [3,3,3]), - -NewArgs(1, 'two', [3,3,3]), -NewArgsWithState(1, 'two', [3,3,3]), - -Reduce(1, 'two', [3,3,3]), -ReduceWithState(1, 'two', [3,3,3]), - -MyInt(3), -MyList(3), -MyDict(3), -] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-object.data b/libs/PyYAML-3.10/tests/data/construct-python-object.data deleted file mode 100644 index bce8b2e..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-object.data +++ /dev/null @@ -1,21 +0,0 @@ -- !!python/object:test_constructor.AnObject { foo: 1, bar: two, baz: [3,3,3] } -- !!python/object:test_constructor.AnInstance { foo: 1, bar: two, baz: [3,3,3] } - -- !!python/object/new:test_constructor.AnObject { args: [1, two], kwds: {baz: [3,3,3]} } -- !!python/object/apply:test_constructor.AnInstance { args: [1, two], kwds: {baz: [3,3,3]} } - -- !!python/object:test_constructor.AState { _foo: 1, _bar: two, _baz: [3,3,3] } -- !!python/object/new:test_constructor.ACustomState { state: !!python/tuple [1, two, [3,3,3]] } - -- !!python/object/new:test_constructor.InitArgs [1, two, [3,3,3]] -- !!python/object/new:test_constructor.InitArgsWithState { args: [1, two], state: [3,3,3] } - -- !!python/object/new:test_constructor.NewArgs [1, two, [3,3,3]] -- !!python/object/new:test_constructor.NewArgsWithState { args: [1, two], state: [3,3,3] } - -- !!python/object/apply:test_constructor.Reduce [1, two, [3,3,3]] -- !!python/object/apply:test_constructor.ReduceWithState { args: [1, two], state: [3,3,3] } - -- !!python/object/new:test_constructor.MyInt [3] -- !!python/object/new:test_constructor.MyList { listitems: [~, ~, ~] } -- !!python/object/new:test_constructor.MyDict { dictitems: {0, 1, 2} } diff --git a/libs/PyYAML-3.10/tests/data/construct-python-str-ascii.code b/libs/PyYAML-3.10/tests/data/construct-python-str-ascii.code deleted file mode 100644 index d9d62f6..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-str-ascii.code +++ /dev/null @@ -1 +0,0 @@ -"ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-str-ascii.data b/libs/PyYAML-3.10/tests/data/construct-python-str-ascii.data deleted file mode 100644 index a83349e..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-str-ascii.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/str "ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py2.code b/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py2.code deleted file mode 100644 index 47b28ab..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py2.code +++ /dev/null @@ -1 +0,0 @@ -u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430'.encode('utf-8') diff --git a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py2.data b/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py2.data deleted file mode 100644 index 9ef2c72..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py2.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/str "Это уникодная строка" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py3.code b/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py3.code deleted file mode 100644 index 9f66032..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py3.code +++ /dev/null @@ -1 +0,0 @@ -'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430' diff --git a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py3.data b/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py3.data deleted file mode 100644 index 9ef2c72..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-str-utf8-py3.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/str "Это уникодная строка" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-tuple-list-dict.code b/libs/PyYAML-3.10/tests/data/construct-python-tuple-list-dict.code deleted file mode 100644 index 20ced98..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-tuple-list-dict.code +++ /dev/null @@ -1,6 +0,0 @@ -[ - [1, 2, 3, 4], - (1, 2, 3, 4), - {1: 2, 3: 4}, - {(0,0): 0, (0,1): 1, (1,0): 1, (1,1): 0}, -] diff --git a/libs/PyYAML-3.10/tests/data/construct-python-tuple-list-dict.data b/libs/PyYAML-3.10/tests/data/construct-python-tuple-list-dict.data deleted file mode 100644 index c56159b..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-tuple-list-dict.data +++ /dev/null @@ -1,8 +0,0 @@ -- !!python/list [1, 2, 3, 4] -- !!python/tuple [1, 2, 3, 4] -- !!python/dict {1: 2, 3: 4} -- !!python/dict - !!python/tuple [0,0]: 0 - !!python/tuple [0,1]: 1 - !!python/tuple [1,0]: 1 - !!python/tuple [1,1]: 0 diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py2.code b/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py2.code deleted file mode 100644 index d4cd82c..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py2.code +++ /dev/null @@ -1 +0,0 @@ -u"ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py2.data b/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py2.data deleted file mode 100644 index 3a0647b..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py2.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/unicode "ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py3.code b/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py3.code deleted file mode 100644 index d9d62f6..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py3.code +++ /dev/null @@ -1 +0,0 @@ -"ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py3.data b/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py3.data deleted file mode 100644 index 3a0647b..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-ascii-py3.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/unicode "ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py2.code b/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py2.code deleted file mode 100644 index 2793ac7..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py2.code +++ /dev/null @@ -1 +0,0 @@ -u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430' diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py2.data b/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py2.data deleted file mode 100644 index 5a980ea..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py2.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/unicode "Это уникодная строка" diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py3.code b/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py3.code deleted file mode 100644 index 9f66032..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py3.code +++ /dev/null @@ -1 +0,0 @@ -'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430' diff --git a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py3.data b/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py3.data deleted file mode 100644 index 5a980ea..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-python-unicode-utf8-py3.data +++ /dev/null @@ -1 +0,0 @@ ---- !!python/unicode "Это уникодная строка" diff --git a/libs/PyYAML-3.10/tests/data/construct-seq.code b/libs/PyYAML-3.10/tests/data/construct-seq.code deleted file mode 100644 index 0c90c05..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-seq.code +++ /dev/null @@ -1,4 +0,0 @@ -{ - "Block style": ["Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus", "Neptune", "Pluto"], - "Flow style": ["Mercury", "Venus", "Earth", "Mars", "Jupiter", "Saturn", "Uranus", "Neptune", "Pluto"], -} diff --git a/libs/PyYAML-3.10/tests/data/construct-seq.data b/libs/PyYAML-3.10/tests/data/construct-seq.data deleted file mode 100644 index bb92fd1..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-seq.data +++ /dev/null @@ -1,15 +0,0 @@ -# Ordered sequence of nodes -Block style: !!seq -- Mercury # Rotates - no light/dark sides. -- Venus # Deadliest. Aptly named. -- Earth # Mostly dirt. -- Mars # Seems empty. -- Jupiter # The king. -- Saturn # Pretty. -- Uranus # Where the sun hardly shines. -- Neptune # Boring. No rings. -- Pluto # You call this a planet? -Flow style: !!seq [ Mercury, Venus, Earth, Mars, # Rocks - Jupiter, Saturn, Uranus, Neptune, # Gas - Pluto ] # Overrated - diff --git a/libs/PyYAML-3.10/tests/data/construct-set.code b/libs/PyYAML-3.10/tests/data/construct-set.code deleted file mode 100644 index aa090e8..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-set.code +++ /dev/null @@ -1,4 +0,0 @@ -{ - "baseball players": set(["Mark McGwire", "Sammy Sosa", "Ken Griffey"]), - "baseball teams": set(["Boston Red Sox", "Detroit Tigers", "New York Yankees"]), -} diff --git a/libs/PyYAML-3.10/tests/data/construct-set.data b/libs/PyYAML-3.10/tests/data/construct-set.data deleted file mode 100644 index e05dc88..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-set.data +++ /dev/null @@ -1,7 +0,0 @@ -# Explicitly typed set. -baseball players: !!set - ? Mark McGwire - ? Sammy Sosa - ? Ken Griffey -# Flow style -baseball teams: !!set { Boston Red Sox, Detroit Tigers, New York Yankees } diff --git a/libs/PyYAML-3.10/tests/data/construct-str-ascii.code b/libs/PyYAML-3.10/tests/data/construct-str-ascii.code deleted file mode 100644 index d9d62f6..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str-ascii.code +++ /dev/null @@ -1 +0,0 @@ -"ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-str-ascii.data b/libs/PyYAML-3.10/tests/data/construct-str-ascii.data deleted file mode 100644 index 0d93013..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str-ascii.data +++ /dev/null @@ -1 +0,0 @@ ---- !!str "ascii string" diff --git a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py2.code b/libs/PyYAML-3.10/tests/data/construct-str-utf8-py2.code deleted file mode 100644 index 2793ac7..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py2.code +++ /dev/null @@ -1 +0,0 @@ -u'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430' diff --git a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py2.data b/libs/PyYAML-3.10/tests/data/construct-str-utf8-py2.data deleted file mode 100644 index e355f18..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py2.data +++ /dev/null @@ -1 +0,0 @@ ---- !!str "Это уникодная строка" diff --git a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py3.code b/libs/PyYAML-3.10/tests/data/construct-str-utf8-py3.code deleted file mode 100644 index 9f66032..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py3.code +++ /dev/null @@ -1 +0,0 @@ -'\u042d\u0442\u043e \u0443\u043d\u0438\u043a\u043e\u0434\u043d\u0430\u044f \u0441\u0442\u0440\u043e\u043a\u0430' diff --git a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py3.data b/libs/PyYAML-3.10/tests/data/construct-str-utf8-py3.data deleted file mode 100644 index e355f18..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str-utf8-py3.data +++ /dev/null @@ -1 +0,0 @@ ---- !!str "Это уникодная строка" diff --git a/libs/PyYAML-3.10/tests/data/construct-str.code b/libs/PyYAML-3.10/tests/data/construct-str.code deleted file mode 100644 index 8d57214..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str.code +++ /dev/null @@ -1 +0,0 @@ -{ "string": "abcd" } diff --git a/libs/PyYAML-3.10/tests/data/construct-str.data b/libs/PyYAML-3.10/tests/data/construct-str.data deleted file mode 100644 index 606ac6b..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-str.data +++ /dev/null @@ -1 +0,0 @@ -string: abcd diff --git a/libs/PyYAML-3.10/tests/data/construct-timestamp.code b/libs/PyYAML-3.10/tests/data/construct-timestamp.code deleted file mode 100644 index ffc3b2f..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-timestamp.code +++ /dev/null @@ -1,7 +0,0 @@ -{ - "canonical": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000), - "valid iso8601": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000), - "space separated": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000), - "no time zone (Z)": datetime.datetime(2001, 12, 15, 2, 59, 43, 100000), - "date (00:00:00Z)": datetime.date(2002, 12, 14), -} diff --git a/libs/PyYAML-3.10/tests/data/construct-timestamp.data b/libs/PyYAML-3.10/tests/data/construct-timestamp.data deleted file mode 100644 index c5f3840..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-timestamp.data +++ /dev/null @@ -1,5 +0,0 @@ -canonical: 2001-12-15T02:59:43.1Z -valid iso8601: 2001-12-14t21:59:43.10-05:00 -space separated: 2001-12-14 21:59:43.10 -5 -no time zone (Z): 2001-12-15 2:59:43.10 -date (00:00:00Z): 2002-12-14 diff --git a/libs/PyYAML-3.10/tests/data/construct-value.code b/libs/PyYAML-3.10/tests/data/construct-value.code deleted file mode 100644 index f1f015e..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-value.code +++ /dev/null @@ -1,9 +0,0 @@ -[ - { "link with": [ "library1.dll", "library2.dll" ] }, - { - "link with": [ - { "=": "library1.dll", "version": 1.2 }, - { "=": "library2.dll", "version": 2.3 }, - ], - }, -] diff --git a/libs/PyYAML-3.10/tests/data/construct-value.data b/libs/PyYAML-3.10/tests/data/construct-value.data deleted file mode 100644 index 3eb7919..0000000 --- a/libs/PyYAML-3.10/tests/data/construct-value.data +++ /dev/null @@ -1,10 +0,0 @@ ---- # Old schema -link with: - - library1.dll - - library2.dll ---- # New schema -link with: - - = : library1.dll - version: 1.2 - - = : library2.dll - version: 2.3 diff --git a/libs/PyYAML-3.10/tests/data/document-separator-in-quoted-scalar.loader-error b/libs/PyYAML-3.10/tests/data/document-separator-in-quoted-scalar.loader-error deleted file mode 100644 index 9eeb0d6..0000000 --- a/libs/PyYAML-3.10/tests/data/document-separator-in-quoted-scalar.loader-error +++ /dev/null @@ -1,11 +0,0 @@ ---- -"this --- is correct" ---- -"this -...is also -correct" ---- -"a quoted scalar -cannot contain ---- -document separators" diff --git a/libs/PyYAML-3.10/tests/data/documents.events b/libs/PyYAML-3.10/tests/data/documents.events deleted file mode 100644 index 775a51a..0000000 --- a/libs/PyYAML-3.10/tests/data/documents.events +++ /dev/null @@ -1,11 +0,0 @@ -- !StreamStart -- !DocumentStart { explicit: false } -- !Scalar { implicit: [true,false], value: 'data' } -- !DocumentEnd -- !DocumentStart -- !Scalar { implicit: [true,false] } -- !DocumentEnd -- !DocumentStart { version: [1,1], tags: { '!': '!foo', '!yaml!': 'tag:yaml.org,2002:', '!ugly!': '!!!!!!!' } } -- !Scalar { implicit: [true,false] } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/duplicate-anchor-1.loader-error b/libs/PyYAML-3.10/tests/data/duplicate-anchor-1.loader-error deleted file mode 100644 index 906cf29..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-anchor-1.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -- &foo bar -- &bar bar -- &foo bar diff --git a/libs/PyYAML-3.10/tests/data/duplicate-anchor-2.loader-error b/libs/PyYAML-3.10/tests/data/duplicate-anchor-2.loader-error deleted file mode 100644 index 62b4389..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-anchor-2.loader-error +++ /dev/null @@ -1 +0,0 @@ -&foo [1, 2, 3, &foo 4] diff --git a/libs/PyYAML-3.10/tests/data/duplicate-key.former-loader-error.code b/libs/PyYAML-3.10/tests/data/duplicate-key.former-loader-error.code deleted file mode 100644 index cb73906..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-key.former-loader-error.code +++ /dev/null @@ -1 +0,0 @@ -{ 'foo': 'baz' } diff --git a/libs/PyYAML-3.10/tests/data/duplicate-key.former-loader-error.data b/libs/PyYAML-3.10/tests/data/duplicate-key.former-loader-error.data deleted file mode 100644 index 84deb8f..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-key.former-loader-error.data +++ /dev/null @@ -1,3 +0,0 @@ ---- -foo: bar -foo: baz diff --git a/libs/PyYAML-3.10/tests/data/duplicate-mapping-key.former-loader-error.code b/libs/PyYAML-3.10/tests/data/duplicate-mapping-key.former-loader-error.code deleted file mode 100644 index 17a6285..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-mapping-key.former-loader-error.code +++ /dev/null @@ -1 +0,0 @@ -{ 'foo': { 'baz': 'bat', 'foo': 'duplicate key' } } diff --git a/libs/PyYAML-3.10/tests/data/duplicate-mapping-key.former-loader-error.data b/libs/PyYAML-3.10/tests/data/duplicate-mapping-key.former-loader-error.data deleted file mode 100644 index 7e7b4d1..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-mapping-key.former-loader-error.data +++ /dev/null @@ -1,6 +0,0 @@ ---- -&anchor foo: - foo: bar - *anchor: duplicate key - baz: bat - *anchor: duplicate key diff --git a/libs/PyYAML-3.10/tests/data/duplicate-merge-key.former-loader-error.code b/libs/PyYAML-3.10/tests/data/duplicate-merge-key.former-loader-error.code deleted file mode 100644 index 6a757f3..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-merge-key.former-loader-error.code +++ /dev/null @@ -1 +0,0 @@ -{ 'x': 1, 'y': 2, 'foo': 'bar', 'z': 3, 't': 4 } diff --git a/libs/PyYAML-3.10/tests/data/duplicate-merge-key.former-loader-error.data b/libs/PyYAML-3.10/tests/data/duplicate-merge-key.former-loader-error.data deleted file mode 100644 index cebc3a1..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-merge-key.former-loader-error.data +++ /dev/null @@ -1,4 +0,0 @@ ---- -<<: {x: 1, y: 2} -foo: bar -<<: {z: 3, t: 4} diff --git a/libs/PyYAML-3.10/tests/data/duplicate-tag-directive.loader-error b/libs/PyYAML-3.10/tests/data/duplicate-tag-directive.loader-error deleted file mode 100644 index 50c81a0..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-tag-directive.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -%TAG !foo! bar -%TAG !foo! baz ---- foo diff --git a/libs/PyYAML-3.10/tests/data/duplicate-value-key.former-loader-error.code b/libs/PyYAML-3.10/tests/data/duplicate-value-key.former-loader-error.code deleted file mode 100644 index 12f48c1..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-value-key.former-loader-error.code +++ /dev/null @@ -1 +0,0 @@ -{ 'foo': 'bar', '=': 2 } diff --git a/libs/PyYAML-3.10/tests/data/duplicate-value-key.former-loader-error.data b/libs/PyYAML-3.10/tests/data/duplicate-value-key.former-loader-error.data deleted file mode 100644 index b34a1d6..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-value-key.former-loader-error.data +++ /dev/null @@ -1,4 +0,0 @@ ---- -=: 1 -foo: bar -=: 2 diff --git a/libs/PyYAML-3.10/tests/data/duplicate-yaml-directive.loader-error b/libs/PyYAML-3.10/tests/data/duplicate-yaml-directive.loader-error deleted file mode 100644 index 9b72390..0000000 --- a/libs/PyYAML-3.10/tests/data/duplicate-yaml-directive.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 -%YAML 1.1 ---- foo diff --git a/libs/PyYAML-3.10/tests/data/emit-block-scalar-in-simple-key-context-bug.canonical b/libs/PyYAML-3.10/tests/data/emit-block-scalar-in-simple-key-context-bug.canonical deleted file mode 100644 index 473bed5..0000000 --- a/libs/PyYAML-3.10/tests/data/emit-block-scalar-in-simple-key-context-bug.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- !!map -{ - ? !!str "foo" - : !!str "bar" -} diff --git a/libs/PyYAML-3.10/tests/data/emit-block-scalar-in-simple-key-context-bug.data b/libs/PyYAML-3.10/tests/data/emit-block-scalar-in-simple-key-context-bug.data deleted file mode 100644 index b6b42ba..0000000 --- a/libs/PyYAML-3.10/tests/data/emit-block-scalar-in-simple-key-context-bug.data +++ /dev/null @@ -1,4 +0,0 @@ -? |- - foo -: |- - bar diff --git a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.code b/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.code deleted file mode 100644 index 4b92854..0000000 --- a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.code +++ /dev/null @@ -1 +0,0 @@ -u"\udd00" diff --git a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.data b/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.data deleted file mode 100644 index 2a5df00..0000000 --- a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.data +++ /dev/null @@ -1 +0,0 @@ -"\udd00" diff --git a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.skip-ext b/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py2.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.code b/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.code deleted file mode 100644 index 2a5df00..0000000 --- a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.code +++ /dev/null @@ -1 +0,0 @@ -"\udd00" diff --git a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.data b/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.data deleted file mode 100644 index 2a5df00..0000000 --- a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.data +++ /dev/null @@ -1 +0,0 @@ -"\udd00" diff --git a/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext b/libs/PyYAML-3.10/tests/data/emitting-unacceptable-unicode-character-bug-py3.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/empty-anchor.emitter-error b/libs/PyYAML-3.10/tests/data/empty-anchor.emitter-error deleted file mode 100644 index ce663b6..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-anchor.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart -- !Scalar { anchor: '', value: 'foo' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/empty-document-bug.canonical b/libs/PyYAML-3.10/tests/data/empty-document-bug.canonical deleted file mode 100644 index 28a6cf1..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-document-bug.canonical +++ /dev/null @@ -1 +0,0 @@ -# This YAML stream contains no YAML documents. diff --git a/libs/PyYAML-3.10/tests/data/empty-document-bug.data b/libs/PyYAML-3.10/tests/data/empty-document-bug.data deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/empty-document-bug.empty b/libs/PyYAML-3.10/tests/data/empty-document-bug.empty deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/empty-documents.single-loader-error b/libs/PyYAML-3.10/tests/data/empty-documents.single-loader-error deleted file mode 100644 index f8dba8d..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-documents.single-loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- # first document ---- # second document diff --git a/libs/PyYAML-3.10/tests/data/empty-python-module.loader-error b/libs/PyYAML-3.10/tests/data/empty-python-module.loader-error deleted file mode 100644 index 83d3232..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-python-module.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python:module: diff --git a/libs/PyYAML-3.10/tests/data/empty-python-name.loader-error b/libs/PyYAML-3.10/tests/data/empty-python-name.loader-error deleted file mode 100644 index 6162957..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-python-name.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/name: empty diff --git a/libs/PyYAML-3.10/tests/data/empty-tag-handle.emitter-error b/libs/PyYAML-3.10/tests/data/empty-tag-handle.emitter-error deleted file mode 100644 index 235c899..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-tag-handle.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart { tags: { '': 'bar' } } -- !Scalar { value: 'foo' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/empty-tag-prefix.emitter-error b/libs/PyYAML-3.10/tests/data/empty-tag-prefix.emitter-error deleted file mode 100644 index c6c0e95..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-tag-prefix.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart { tags: { '!': '' } } -- !Scalar { value: 'foo' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/empty-tag.emitter-error b/libs/PyYAML-3.10/tests/data/empty-tag.emitter-error deleted file mode 100644 index b7ca593..0000000 --- a/libs/PyYAML-3.10/tests/data/empty-tag.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart -- !Scalar { tag: '', value: 'key', implicit: [false,false] } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/expected-document-end.emitter-error b/libs/PyYAML-3.10/tests/data/expected-document-end.emitter-error deleted file mode 100644 index 0cbab89..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-document-end.emitter-error +++ /dev/null @@ -1,6 +0,0 @@ -- !StreamStart -- !DocumentStart -- !Scalar { value: 'data 1' } -- !Scalar { value: 'data 2' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/expected-document-start.emitter-error b/libs/PyYAML-3.10/tests/data/expected-document-start.emitter-error deleted file mode 100644 index 8ce575e..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-document-start.emitter-error +++ /dev/null @@ -1,4 +0,0 @@ -- !StreamStart -- !MappingStart -- !MappingEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/expected-mapping.loader-error b/libs/PyYAML-3.10/tests/data/expected-mapping.loader-error deleted file mode 100644 index 82aed98..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-mapping.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!map [not, a, map] diff --git a/libs/PyYAML-3.10/tests/data/expected-node-1.emitter-error b/libs/PyYAML-3.10/tests/data/expected-node-1.emitter-error deleted file mode 100644 index 36ceca3..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-node-1.emitter-error +++ /dev/null @@ -1,4 +0,0 @@ -- !StreamStart -- !DocumentStart -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/expected-node-2.emitter-error b/libs/PyYAML-3.10/tests/data/expected-node-2.emitter-error deleted file mode 100644 index 891ee37..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-node-2.emitter-error +++ /dev/null @@ -1,7 +0,0 @@ -- !StreamStart -- !DocumentStart -- !MappingStart -- !Scalar { value: 'key' } -- !MappingEnd -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/expected-nothing.emitter-error b/libs/PyYAML-3.10/tests/data/expected-nothing.emitter-error deleted file mode 100644 index 62c54d3..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-nothing.emitter-error +++ /dev/null @@ -1,4 +0,0 @@ -- !StreamStart -- !StreamEnd -- !StreamStart -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/expected-scalar.loader-error b/libs/PyYAML-3.10/tests/data/expected-scalar.loader-error deleted file mode 100644 index 7b3171e..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-scalar.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!str [not a scalar] diff --git a/libs/PyYAML-3.10/tests/data/expected-sequence.loader-error b/libs/PyYAML-3.10/tests/data/expected-sequence.loader-error deleted file mode 100644 index 08074ea..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-sequence.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!seq {foo, bar, baz} diff --git a/libs/PyYAML-3.10/tests/data/expected-stream-start.emitter-error b/libs/PyYAML-3.10/tests/data/expected-stream-start.emitter-error deleted file mode 100644 index 480dc2e..0000000 --- a/libs/PyYAML-3.10/tests/data/expected-stream-start.emitter-error +++ /dev/null @@ -1,2 +0,0 @@ -- !DocumentStart -- !DocumentEnd diff --git a/libs/PyYAML-3.10/tests/data/explicit-document.single-loader-error b/libs/PyYAML-3.10/tests/data/explicit-document.single-loader-error deleted file mode 100644 index 46c6f8b..0000000 --- a/libs/PyYAML-3.10/tests/data/explicit-document.single-loader-error +++ /dev/null @@ -1,4 +0,0 @@ ---- -foo: bar ---- -foo: bar diff --git a/libs/PyYAML-3.10/tests/data/fetch-complex-value-bug.loader-error b/libs/PyYAML-3.10/tests/data/fetch-complex-value-bug.loader-error deleted file mode 100644 index 25fac24..0000000 --- a/libs/PyYAML-3.10/tests/data/fetch-complex-value-bug.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -? "foo" - : "bar" diff --git a/libs/PyYAML-3.10/tests/data/float-representer-2.3-bug.code b/libs/PyYAML-3.10/tests/data/float-representer-2.3-bug.code deleted file mode 100644 index d8db834..0000000 --- a/libs/PyYAML-3.10/tests/data/float-representer-2.3-bug.code +++ /dev/null @@ -1,7 +0,0 @@ -{ -# 0.0: 0, - 1.0: 1, - 1e300000: +10, - -1e300000: -10, - 1e300000/1e300000: 100, -} diff --git a/libs/PyYAML-3.10/tests/data/float-representer-2.3-bug.data b/libs/PyYAML-3.10/tests/data/float-representer-2.3-bug.data deleted file mode 100644 index efd1716..0000000 --- a/libs/PyYAML-3.10/tests/data/float-representer-2.3-bug.data +++ /dev/null @@ -1,5 +0,0 @@ -#0.0: # hash(0) == hash(nan) and 0 == nan in Python 2.3 -1.0: 1 -+.inf: 10 --.inf: -10 -.nan: 100 diff --git a/libs/PyYAML-3.10/tests/data/float.data b/libs/PyYAML-3.10/tests/data/float.data deleted file mode 100644 index 524d5db..0000000 --- a/libs/PyYAML-3.10/tests/data/float.data +++ /dev/null @@ -1,6 +0,0 @@ -- 6.8523015e+5 -- 685.230_15e+03 -- 685_230.15 -- 190:20:30.15 -- -.inf -- .NaN diff --git a/libs/PyYAML-3.10/tests/data/float.detect b/libs/PyYAML-3.10/tests/data/float.detect deleted file mode 100644 index 1e12343..0000000 --- a/libs/PyYAML-3.10/tests/data/float.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:float diff --git a/libs/PyYAML-3.10/tests/data/forbidden-entry.loader-error b/libs/PyYAML-3.10/tests/data/forbidden-entry.loader-error deleted file mode 100644 index f2e3079..0000000 --- a/libs/PyYAML-3.10/tests/data/forbidden-entry.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -test: - foo - - bar diff --git a/libs/PyYAML-3.10/tests/data/forbidden-key.loader-error b/libs/PyYAML-3.10/tests/data/forbidden-key.loader-error deleted file mode 100644 index da9b471..0000000 --- a/libs/PyYAML-3.10/tests/data/forbidden-key.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -test: ? foo - : bar diff --git a/libs/PyYAML-3.10/tests/data/forbidden-value.loader-error b/libs/PyYAML-3.10/tests/data/forbidden-value.loader-error deleted file mode 100644 index efd7ce5..0000000 --- a/libs/PyYAML-3.10/tests/data/forbidden-value.loader-error +++ /dev/null @@ -1 +0,0 @@ -test: key: value diff --git a/libs/PyYAML-3.10/tests/data/implicit-document.single-loader-error b/libs/PyYAML-3.10/tests/data/implicit-document.single-loader-error deleted file mode 100644 index f8c9a5c..0000000 --- a/libs/PyYAML-3.10/tests/data/implicit-document.single-loader-error +++ /dev/null @@ -1,3 +0,0 @@ -foo: bar ---- -foo: bar diff --git a/libs/PyYAML-3.10/tests/data/int.data b/libs/PyYAML-3.10/tests/data/int.data deleted file mode 100644 index d44d376..0000000 --- a/libs/PyYAML-3.10/tests/data/int.data +++ /dev/null @@ -1,6 +0,0 @@ -- 685230 -- +685_230 -- 02472256 -- 0x_0A_74_AE -- 0b1010_0111_0100_1010_1110 -- 190:20:30 diff --git a/libs/PyYAML-3.10/tests/data/int.detect b/libs/PyYAML-3.10/tests/data/int.detect deleted file mode 100644 index 575c9eb..0000000 --- a/libs/PyYAML-3.10/tests/data/int.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:int diff --git a/libs/PyYAML-3.10/tests/data/invalid-anchor-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-anchor-1.loader-error deleted file mode 100644 index fcf7d0f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-anchor-1.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- &? foo # we allow only ascii and numeric characters in anchor names. diff --git a/libs/PyYAML-3.10/tests/data/invalid-anchor-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-anchor-2.loader-error deleted file mode 100644 index bfc4ff0..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-anchor-2.loader-error +++ /dev/null @@ -1,8 +0,0 @@ ---- -- [ - &correct foo, - *correct, - *correct] # still correct -- *correct: still correct -- &correct-or-not[foo, bar] - diff --git a/libs/PyYAML-3.10/tests/data/invalid-anchor.emitter-error b/libs/PyYAML-3.10/tests/data/invalid-anchor.emitter-error deleted file mode 100644 index 3d2a814..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-anchor.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart -- !Scalar { anchor: '5*5=25', value: 'foo' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/invalid-base64-data-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-base64-data-2.loader-error deleted file mode 100644 index 2553a4f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-base64-data-2.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- !!binary - двоичные данные в base64 diff --git a/libs/PyYAML-3.10/tests/data/invalid-base64-data.loader-error b/libs/PyYAML-3.10/tests/data/invalid-base64-data.loader-error deleted file mode 100644 index 798abba..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-base64-data.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- !!binary - binary data encoded in base64 should be here. diff --git a/libs/PyYAML-3.10/tests/data/invalid-block-scalar-indicator.loader-error b/libs/PyYAML-3.10/tests/data/invalid-block-scalar-indicator.loader-error deleted file mode 100644 index 16a6db1..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-block-scalar-indicator.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- > what is this? # a comment -data diff --git a/libs/PyYAML-3.10/tests/data/invalid-character.loader-error b/libs/PyYAML-3.10/tests/data/invalid-character.loader-error deleted file mode 100644 index 03687b0..0000000 Binary files a/libs/PyYAML-3.10/tests/data/invalid-character.loader-error and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/invalid-character.stream-error b/libs/PyYAML-3.10/tests/data/invalid-character.stream-error deleted file mode 100644 index 171face..0000000 Binary files a/libs/PyYAML-3.10/tests/data/invalid-character.stream-error and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/invalid-directive-line.loader-error b/libs/PyYAML-3.10/tests/data/invalid-directive-line.loader-error deleted file mode 100644 index 0892eb6..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-directive-line.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 1.1 ? # extra symbol ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-directive-name-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-directive-name-1.loader-error deleted file mode 100644 index 153fd88..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-directive-name-1.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -% # no name at all ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-directive-name-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-directive-name-2.loader-error deleted file mode 100644 index 3732a06..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-directive-name-2.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%invalid-characters:in-directive name ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-escape-character.loader-error b/libs/PyYAML-3.10/tests/data/invalid-escape-character.loader-error deleted file mode 100644 index a95ab76..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-escape-character.loader-error +++ /dev/null @@ -1 +0,0 @@ -"some escape characters are \ncorrect, but this one \?\nis not\n" diff --git a/libs/PyYAML-3.10/tests/data/invalid-escape-numbers.loader-error b/libs/PyYAML-3.10/tests/data/invalid-escape-numbers.loader-error deleted file mode 100644 index 614ec9f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-escape-numbers.loader-error +++ /dev/null @@ -1 +0,0 @@ -"hm.... \u123?" diff --git a/libs/PyYAML-3.10/tests/data/invalid-indentation-indicator-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-indentation-indicator-1.loader-error deleted file mode 100644 index a3cd12f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-indentation-indicator-1.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- >0 # not valid -data diff --git a/libs/PyYAML-3.10/tests/data/invalid-indentation-indicator-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-indentation-indicator-2.loader-error deleted file mode 100644 index eefb6ec..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-indentation-indicator-2.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- >-0 -data diff --git a/libs/PyYAML-3.10/tests/data/invalid-item-without-trailing-break.loader-error b/libs/PyYAML-3.10/tests/data/invalid-item-without-trailing-break.loader-error deleted file mode 100644 index fdcf6c6..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-item-without-trailing-break.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -- --0 \ No newline at end of file diff --git a/libs/PyYAML-3.10/tests/data/invalid-merge-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-merge-1.loader-error deleted file mode 100644 index fc3c284..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-merge-1.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -foo: bar -<<: baz diff --git a/libs/PyYAML-3.10/tests/data/invalid-merge-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-merge-2.loader-error deleted file mode 100644 index 8e88615..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-merge-2.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -foo: bar -<<: [x: 1, y: 2, z, t: 4] diff --git a/libs/PyYAML-3.10/tests/data/invalid-omap-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-omap-1.loader-error deleted file mode 100644 index 2863392..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-omap-1.loader-error +++ /dev/null @@ -1,3 +0,0 @@ ---- !!omap -foo: bar -baz: bat diff --git a/libs/PyYAML-3.10/tests/data/invalid-omap-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-omap-2.loader-error deleted file mode 100644 index c377dfb..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-omap-2.loader-error +++ /dev/null @@ -1,3 +0,0 @@ ---- !!omap -- foo: bar -- baz diff --git a/libs/PyYAML-3.10/tests/data/invalid-omap-3.loader-error b/libs/PyYAML-3.10/tests/data/invalid-omap-3.loader-error deleted file mode 100644 index 2a4f50d..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-omap-3.loader-error +++ /dev/null @@ -1,4 +0,0 @@ ---- !!omap -- foo: bar -- baz: bar - bar: bar diff --git a/libs/PyYAML-3.10/tests/data/invalid-pairs-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-pairs-1.loader-error deleted file mode 100644 index 42d19ae..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-pairs-1.loader-error +++ /dev/null @@ -1,3 +0,0 @@ ---- !!pairs -foo: bar -baz: bat diff --git a/libs/PyYAML-3.10/tests/data/invalid-pairs-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-pairs-2.loader-error deleted file mode 100644 index 31389ea..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-pairs-2.loader-error +++ /dev/null @@ -1,3 +0,0 @@ ---- !!pairs -- foo: bar -- baz diff --git a/libs/PyYAML-3.10/tests/data/invalid-pairs-3.loader-error b/libs/PyYAML-3.10/tests/data/invalid-pairs-3.loader-error deleted file mode 100644 index f8d7704..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-pairs-3.loader-error +++ /dev/null @@ -1,4 +0,0 @@ ---- !!pairs -- foo: bar -- baz: bar - bar: bar diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-bytes-2-py3.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-bytes-2-py3.loader-error deleted file mode 100644 index f43af59..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-bytes-2-py3.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- !!python/bytes - двоичные данные в base64 diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-bytes-py3.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-bytes-py3.loader-error deleted file mode 100644 index a19dfd0..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-bytes-py3.loader-error +++ /dev/null @@ -1,2 +0,0 @@ ---- !!python/bytes - binary data encoded in base64 should be here. diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-module-kind.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-module-kind.loader-error deleted file mode 100644 index 4f71cb5..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-module-kind.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/module:sys { must, be, scalar } diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-module-value.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-module-value.loader-error deleted file mode 100644 index f6797fc..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-module-value.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/module:sys "non-empty value" diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-module.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-module.loader-error deleted file mode 100644 index 4e24072..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-module.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/module:no.such.module diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-name-kind.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-name-kind.loader-error deleted file mode 100644 index 6ff8eb6..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-name-kind.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/name:sys.modules {} diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-name-module-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-name-module-2.loader-error deleted file mode 100644 index debc313..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-name-module-2.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/name:xml.parsers diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-name-module.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-name-module.loader-error deleted file mode 100644 index 1966f6a..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-name-module.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/name:sys.modules.keys diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-name-object.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-name-object.loader-error deleted file mode 100644 index 50f386f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-name-object.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/name:os.path.rm_rf diff --git a/libs/PyYAML-3.10/tests/data/invalid-python-name-value.loader-error b/libs/PyYAML-3.10/tests/data/invalid-python-name-value.loader-error deleted file mode 100644 index 7be1401..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-python-name-value.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !!python/name:sys.modules 5 diff --git a/libs/PyYAML-3.10/tests/data/invalid-simple-key.loader-error b/libs/PyYAML-3.10/tests/data/invalid-simple-key.loader-error deleted file mode 100644 index a58deec..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-simple-key.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -key: value -invalid simple key -next key: next value diff --git a/libs/PyYAML-3.10/tests/data/invalid-single-quote-bug.code b/libs/PyYAML-3.10/tests/data/invalid-single-quote-bug.code deleted file mode 100644 index 5558945..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-single-quote-bug.code +++ /dev/null @@ -1 +0,0 @@ -["foo 'bar'", "foo\n'bar'"] diff --git a/libs/PyYAML-3.10/tests/data/invalid-single-quote-bug.data b/libs/PyYAML-3.10/tests/data/invalid-single-quote-bug.data deleted file mode 100644 index 76ef7ae..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-single-quote-bug.data +++ /dev/null @@ -1,2 +0,0 @@ -- "foo 'bar'" -- "foo\n'bar'" diff --git a/libs/PyYAML-3.10/tests/data/invalid-starting-character.loader-error b/libs/PyYAML-3.10/tests/data/invalid-starting-character.loader-error deleted file mode 100644 index bb81c60..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-starting-character.loader-error +++ /dev/null @@ -1 +0,0 @@ -@@@@@@@@@@@@@@@@@@@ diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-tag-1.loader-error deleted file mode 100644 index a68cd38..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-1.loader-error +++ /dev/null @@ -1 +0,0 @@ -- ! baz diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-tag-2.loader-error deleted file mode 100644 index 3a36700..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-2.loader-error +++ /dev/null @@ -1 +0,0 @@ -- !prefix!foo#bar baz diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-directive-handle.loader-error b/libs/PyYAML-3.10/tests/data/invalid-tag-directive-handle.loader-error deleted file mode 100644 index 42b5d7e..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-directive-handle.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%TAG !!! !!! ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-directive-prefix.loader-error b/libs/PyYAML-3.10/tests/data/invalid-tag-directive-prefix.loader-error deleted file mode 100644 index 0cb482c..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-directive-prefix.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%TAG ! tag:zz.com/foo#bar # '#' is not allowed in URLs ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-1.emitter-error b/libs/PyYAML-3.10/tests/data/invalid-tag-handle-1.emitter-error deleted file mode 100644 index d5df9a2..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-1.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart { tags: { '!foo': 'bar' } } -- !Scalar { value: 'foo' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-tag-handle-1.loader-error deleted file mode 100644 index ef0d143..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-1.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%TAG foo bar ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-2.emitter-error b/libs/PyYAML-3.10/tests/data/invalid-tag-handle-2.emitter-error deleted file mode 100644 index d1831d5..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-2.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart { tags: { '!!!': 'bar' } } -- !Scalar { value: 'foo' } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-tag-handle-2.loader-error deleted file mode 100644 index 06c7f0e..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-tag-handle-2.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%TAG !foo bar ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-1.loader-error deleted file mode 100644 index a6ecb36..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-1.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- ! foo diff --git a/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-2.loader-error deleted file mode 100644 index b89e8f6..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-2.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !<%FF> foo diff --git a/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-3.loader-error b/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-3.loader-error deleted file mode 100644 index f2e4cb8..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-uri-escapes-3.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- ! baz diff --git a/libs/PyYAML-3.10/tests/data/invalid-uri.loader-error b/libs/PyYAML-3.10/tests/data/invalid-uri.loader-error deleted file mode 100644 index 06307e0..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-uri.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !foo! bar diff --git a/libs/PyYAML-3.10/tests/data/invalid-utf8-byte.loader-error b/libs/PyYAML-3.10/tests/data/invalid-utf8-byte.loader-error deleted file mode 100644 index 0a58c70..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-utf8-byte.loader-error +++ /dev/null @@ -1,66 +0,0 @@ -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -Invalid byte ('\xFF'): <-- -############################################################### diff --git a/libs/PyYAML-3.10/tests/data/invalid-utf8-byte.stream-error b/libs/PyYAML-3.10/tests/data/invalid-utf8-byte.stream-error deleted file mode 100644 index 0a58c70..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-utf8-byte.stream-error +++ /dev/null @@ -1,66 +0,0 @@ -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -############################################################### -Invalid byte ('\xFF'): <-- -############################################################### diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-1.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-1.loader-error deleted file mode 100644 index e9b4e3a..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-1.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -# No version at all. -%YAML ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-2.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-2.loader-error deleted file mode 100644 index 6aa7740..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-2.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 1e-5 ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-3.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-3.loader-error deleted file mode 100644 index 345e784..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-3.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 1. ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-4.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-4.loader-error deleted file mode 100644 index b35ca82..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-4.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 1.132.435 ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-5.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-5.loader-error deleted file mode 100644 index 7c2b49f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-5.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML A.0 ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-6.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-6.loader-error deleted file mode 100644 index bae714f..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-directive-version-6.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 123.C ---- diff --git a/libs/PyYAML-3.10/tests/data/invalid-yaml-version.loader-error b/libs/PyYAML-3.10/tests/data/invalid-yaml-version.loader-error deleted file mode 100644 index dd01948..0000000 --- a/libs/PyYAML-3.10/tests/data/invalid-yaml-version.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 2.0 ---- foo diff --git a/libs/PyYAML-3.10/tests/data/latin.unicode b/libs/PyYAML-3.10/tests/data/latin.unicode deleted file mode 100644 index 4fb799c..0000000 --- a/libs/PyYAML-3.10/tests/data/latin.unicode +++ /dev/null @@ -1,384 +0,0 @@ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ -ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyzªµºÀÁÂÃÄÅÆÇÈÉÊ -ËÌÍÎÏÐÑÒÓÔÕÖØÙÚÛÜÝÞßàáâãäåæçèéêëìíîïðñòóôõöøùúûüýþÿĀāĂ㥹ĆćĈĉĊċČčĎ -ďĐđĒēĔĕĖėĘęĚěĜĝĞğĠġĢģĤĥĦħĨĩĪīĬĭĮįİıIJijĴĵĶķĸĹĺĻļĽľĿŀŁłŃńŅņŇňʼnŊŋŌōŎŏŐ -őŒœŔŕŖŗŘřŚśŜŝŞşŠšŢţŤťŦŧŨũŪūŬŭŮůŰűŲųŴŵŶŷŸŹźŻżŽžſƀƁƂƃƄƅƆƇƈƉƊƋƌƍƎƏƐƑƒ -ƓƔƕƖƗƘƙƚƛƜƝƞƟƠơƢƣƤƥƦƧƨƩƪƫƬƭƮƯưƱƲƳƴƵƶƷƸƹƺƼƽƾƿDŽdžLJljNJnjǍǎǏǐǑǒǓǔǕǖǗǘǙǚǛǜ -ǝǞǟǠǡǢǣǤǥǦǧǨǩǪǫǬǭǮǯǰDZdzǴǵǶǷǸǹǺǻǼǽǾǿȀȁȂȃȄȅȆȇȈȉȊȋȌȍȎȏȐȑȒȓȔȕȖȗȘșȚțȜȝȞȟ -ȠȡȢȣȤȥȦȧȨȩȪȫȬȭȮȯȰȱȲȳȴȵȶȷȸȹȺȻȼȽȾȿɀɁɐɑɒɓɔɕɖɗɘəɚɛɜɝɞɟɠɡɢɣɤɥɦɧɨɩɪɫɬɭɮɯ -ɰɱɲɳɴɵɶɷɸɹɺɻɼɽɾɿʀʁʂʃʄʅʆʇʈʉʊʋʌʍʎʏʐʑʒʓʔʕʖʗʘʙʚʛʜʝʞʟʠʡʢʣʤʥʦʧʨʩʪʫʬʭʮʯΆΈ -ΉΊΌΎΏΐΑΒΓΔΕΖΗΘΙΚΛΜΝΞΟΠΡΣΤΥΦΧΨΩΪΫάέήίΰαβγδεζηθικλμνξοπρςστυφχψωϊϋόύ -ώϐϑϒϓϔϕϖϗϘϙϚϛϜϝϞϟϠϡϢϣϤϥϦϧϨϩϪϫϬϭϮϯϰϱϲϳϴϵϷϸϹϺϻϼϽϾϿЀЁЂЃЄЅІЇЈЉЊЋЌЍЎЏАБ -ВГДЕЖЗИЙКЛМНОПРСТУФХЦЧШЩЪЫЬЭЮЯабвгдежзийклмнопрстуфхцчшщъыьэюяѐёђѓ -єѕіїјљњћќѝўџѠѡѢѣѤѥѦѧѨѩѪѫѬѭѮѯѰѱѲѳѴѵѶѷѸѹѺѻѼѽѾѿҀҁҊҋҌҍҎҏҐґҒғҔҕҖҗҘҙҚқҜҝ -ҞҟҠҡҢңҤҥҦҧҨҩҪҫҬҭҮүҰұҲҳҴҵҶҷҸҹҺһҼҽҾҿӀӁӂӃӄӅӆӇӈӉӊӋӌӍӎӐӑӒӓӔӕӖӗӘәӚӛӜӝӞӟӠ -ӡӢӣӤӥӦӧӨөӪӫӬӭӮӯӰӱӲӳӴӵӶӷӸӹԀԁԂԃԄԅԆԇԈԉԊԋԌԍԎԏԱԲԳԴԵԶԷԸԹԺԻԼԽԾԿՀՁՂՃՄՅՆՇՈՉ -ՊՋՌՍՎՏՐՑՒՓՔՕՖաբգդեզէըթժիլխծկհձղճմյնշոչպջռսվտրցւփքօֆևႠႡႢႣႤႥႦႧႨႩႪႫႬႭ -ႮႯႰႱႲႳႴႵႶႷႸႹႺႻႼႽႾႿჀჁჂჃჄჅᴀᴁᴂᴃᴄᴅᴆᴇᴈᴉᴊᴋᴌᴍᴎᴏᴐᴑᴒᴓᴔᴕᴖᴗᴘᴙᴚᴛᴜᴝᴞᴟᴠᴡᴢᴣᴤᴥᴦᴧᴨᴩ -ᴪᴫᵢᵣᵤᵥᵦᵧᵨᵩᵪᵫᵬᵭᵮᵯᵰᵱᵲᵳᵴᵵᵶᵷᵹᵺᵻᵼᵽᵾᵿᶀᶁᶂᶃᶄᶅᶆᶇᶈᶉᶊᶋᶌᶍᶎᶏᶐᶑᶒᶓᶔᶕᶖᶗᶘᶙᶚḀḁḂḃḄḅḆḇ -ḈḉḊḋḌḍḎḏḐḑḒḓḔḕḖḗḘḙḚḛḜḝḞḟḠḡḢḣḤḥḦḧḨḩḪḫḬḭḮḯḰḱḲḳḴḵḶḷḸḹḺḻḼḽḾḿṀṁṂṃṄṅṆṇṈṉ -ṊṋṌṍṎṏṐṑṒṓṔṕṖṗṘṙṚṛṜṝṞṟṠṡṢṣṤṥṦṧṨṩṪṫṬṭṮṯṰṱṲṳṴṵṶṷṸṹṺṻṼṽṾṿẀẁẂẃẄẅẆẇẈẉẊẋ -ẌẍẎẏẐẑẒẓẔẕẖẗẘẙẚẛẠạẢảẤấẦầẨẩẪẫẬậẮắẰằẲẳẴẵẶặẸẹẺẻẼẽẾếỀềỂểỄễỆệỈỉỊịỌọỎỏỐố -ỒồỔổỖỗỘộỚớỜờỞởỠỡỢợỤụỦủỨứỪừỬửỮữỰựỲỳỴỵỶỷỸỹἀἁἂἃἄἅἆἇἈἉἊἋἌἍἎἏἐἑἒἓἔἕἘἙἚἛ -ἜἝἠἡἢἣἤἥἦἧἨἩἪἫἬἭἮἯἰἱἲἳἴἵἶἷἸἹἺἻἼἽἾἿὀὁὂὃὄὅὈὉὊὋὌὍὐὑὒὓὔὕὖὗὙὛὝὟὠὡὢὣὤὥὦὧ -ὨὩὪὫὬὭὮὯὰάὲέὴήὶίὸόὺύὼώᾀᾁᾂᾃᾄᾅᾆᾇᾐᾑᾒᾓᾔᾕᾖᾗᾠᾡᾢᾣᾤᾥᾦᾧᾰᾱᾲᾳᾴᾶᾷᾸᾹᾺΆιῂῃῄῆῇῈΈῊ -ΉῐῑῒΐῖῗῘῙῚΊῠῡῢΰῤῥῦῧῨῩῪΎῬῲῳῴῶῷῸΌῺΏⁱⁿℂℇℊℋℌℍℎℏℐℑℒℓℕℙℚℛℜℝℤΩℨKÅℬℭℯℰℱℳℴℹ diff --git a/libs/PyYAML-3.10/tests/data/mappings.events b/libs/PyYAML-3.10/tests/data/mappings.events deleted file mode 100644 index 3cb5579..0000000 --- a/libs/PyYAML-3.10/tests/data/mappings.events +++ /dev/null @@ -1,44 +0,0 @@ -- !StreamStart - -- !DocumentStart -- !MappingStart -- !Scalar { implicit: [true,true], value: 'key' } -- !Scalar { implicit: [true,true], value: 'value' } -- !Scalar { implicit: [true,true], value: 'empty mapping' } -- !MappingStart -- !MappingEnd -- !Scalar { implicit: [true,true], value: 'empty mapping with tag' } -- !MappingStart { tag: '!mytag', implicit: false } -- !MappingEnd -- !Scalar { implicit: [true,true], value: 'block mapping' } -- !MappingStart -- !MappingStart -- !Scalar { implicit: [true,true], value: 'complex' } -- !Scalar { implicit: [true,true], value: 'key' } -- !Scalar { implicit: [true,true], value: 'complex' } -- !Scalar { implicit: [true,true], value: 'key' } -- !MappingEnd -- !MappingStart -- !Scalar { implicit: [true,true], value: 'complex' } -- !Scalar { implicit: [true,true], value: 'key' } -- !MappingEnd -- !MappingEnd -- !Scalar { implicit: [true,true], value: 'flow mapping' } -- !MappingStart { flow_style: true } -- !Scalar { implicit: [true,true], value: 'key' } -- !Scalar { implicit: [true,true], value: 'value' } -- !MappingStart -- !Scalar { implicit: [true,true], value: 'complex' } -- !Scalar { implicit: [true,true], value: 'key' } -- !Scalar { implicit: [true,true], value: 'complex' } -- !Scalar { implicit: [true,true], value: 'key' } -- !MappingEnd -- !MappingStart -- !Scalar { implicit: [true,true], value: 'complex' } -- !Scalar { implicit: [true,true], value: 'key' } -- !MappingEnd -- !MappingEnd -- !MappingEnd -- !DocumentEnd - -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/merge.data b/libs/PyYAML-3.10/tests/data/merge.data deleted file mode 100644 index e455bbc..0000000 --- a/libs/PyYAML-3.10/tests/data/merge.data +++ /dev/null @@ -1 +0,0 @@ -- << diff --git a/libs/PyYAML-3.10/tests/data/merge.detect b/libs/PyYAML-3.10/tests/data/merge.detect deleted file mode 100644 index 1672d0d..0000000 --- a/libs/PyYAML-3.10/tests/data/merge.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:merge diff --git a/libs/PyYAML-3.10/tests/data/more-floats.code b/libs/PyYAML-3.10/tests/data/more-floats.code deleted file mode 100644 index e3e444e..0000000 --- a/libs/PyYAML-3.10/tests/data/more-floats.code +++ /dev/null @@ -1 +0,0 @@ -[0.0, +1.0, -1.0, +1e300000, -1e300000, 1e300000/1e300000, -(1e300000/1e300000)] # last two items are ind and qnan respectively. diff --git a/libs/PyYAML-3.10/tests/data/more-floats.data b/libs/PyYAML-3.10/tests/data/more-floats.data deleted file mode 100644 index 399eb17..0000000 --- a/libs/PyYAML-3.10/tests/data/more-floats.data +++ /dev/null @@ -1 +0,0 @@ -[0.0, +1.0, -1.0, +.inf, -.inf, .nan, .nan] diff --git a/libs/PyYAML-3.10/tests/data/negative-float-bug.code b/libs/PyYAML-3.10/tests/data/negative-float-bug.code deleted file mode 100644 index 18e16e3..0000000 --- a/libs/PyYAML-3.10/tests/data/negative-float-bug.code +++ /dev/null @@ -1 +0,0 @@ --1.0 diff --git a/libs/PyYAML-3.10/tests/data/negative-float-bug.data b/libs/PyYAML-3.10/tests/data/negative-float-bug.data deleted file mode 100644 index 18e16e3..0000000 --- a/libs/PyYAML-3.10/tests/data/negative-float-bug.data +++ /dev/null @@ -1 +0,0 @@ --1.0 diff --git a/libs/PyYAML-3.10/tests/data/no-alias-anchor.emitter-error b/libs/PyYAML-3.10/tests/data/no-alias-anchor.emitter-error deleted file mode 100644 index 5ff065c..0000000 --- a/libs/PyYAML-3.10/tests/data/no-alias-anchor.emitter-error +++ /dev/null @@ -1,8 +0,0 @@ -- !StreamStart -- !DocumentStart -- !SequenceStart -- !Scalar { anchor: A, value: data } -- !Alias { } -- !SequenceEnd -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/no-alias-anchor.skip-ext b/libs/PyYAML-3.10/tests/data/no-alias-anchor.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/no-block-collection-end.loader-error b/libs/PyYAML-3.10/tests/data/no-block-collection-end.loader-error deleted file mode 100644 index 02d4d37..0000000 --- a/libs/PyYAML-3.10/tests/data/no-block-collection-end.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -- foo -- bar -baz: bar diff --git a/libs/PyYAML-3.10/tests/data/no-block-mapping-end-2.loader-error b/libs/PyYAML-3.10/tests/data/no-block-mapping-end-2.loader-error deleted file mode 100644 index be63571..0000000 --- a/libs/PyYAML-3.10/tests/data/no-block-mapping-end-2.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -? foo -: bar -: baz diff --git a/libs/PyYAML-3.10/tests/data/no-block-mapping-end.loader-error b/libs/PyYAML-3.10/tests/data/no-block-mapping-end.loader-error deleted file mode 100644 index 1ea921c..0000000 --- a/libs/PyYAML-3.10/tests/data/no-block-mapping-end.loader-error +++ /dev/null @@ -1 +0,0 @@ -foo: "bar" "baz" diff --git a/libs/PyYAML-3.10/tests/data/no-document-start.loader-error b/libs/PyYAML-3.10/tests/data/no-document-start.loader-error deleted file mode 100644 index c725ec8..0000000 --- a/libs/PyYAML-3.10/tests/data/no-document-start.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 -# no --- -foo: bar diff --git a/libs/PyYAML-3.10/tests/data/no-flow-mapping-end.loader-error b/libs/PyYAML-3.10/tests/data/no-flow-mapping-end.loader-error deleted file mode 100644 index 8bd1403..0000000 --- a/libs/PyYAML-3.10/tests/data/no-flow-mapping-end.loader-error +++ /dev/null @@ -1 +0,0 @@ -{ foo: bar ] diff --git a/libs/PyYAML-3.10/tests/data/no-flow-sequence-end.loader-error b/libs/PyYAML-3.10/tests/data/no-flow-sequence-end.loader-error deleted file mode 100644 index 750d973..0000000 --- a/libs/PyYAML-3.10/tests/data/no-flow-sequence-end.loader-error +++ /dev/null @@ -1 +0,0 @@ -[foo, bar} diff --git a/libs/PyYAML-3.10/tests/data/no-node-1.loader-error b/libs/PyYAML-3.10/tests/data/no-node-1.loader-error deleted file mode 100644 index 07b1500..0000000 --- a/libs/PyYAML-3.10/tests/data/no-node-1.loader-error +++ /dev/null @@ -1 +0,0 @@ -- !foo ] diff --git a/libs/PyYAML-3.10/tests/data/no-node-2.loader-error b/libs/PyYAML-3.10/tests/data/no-node-2.loader-error deleted file mode 100644 index 563e3b3..0000000 --- a/libs/PyYAML-3.10/tests/data/no-node-2.loader-error +++ /dev/null @@ -1 +0,0 @@ -- [ !foo } ] diff --git a/libs/PyYAML-3.10/tests/data/no-tag.emitter-error b/libs/PyYAML-3.10/tests/data/no-tag.emitter-error deleted file mode 100644 index 384c62f..0000000 --- a/libs/PyYAML-3.10/tests/data/no-tag.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart -- !Scalar { value: 'foo', implicit: [false,false] } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/null.data b/libs/PyYAML-3.10/tests/data/null.data deleted file mode 100644 index ad12528..0000000 --- a/libs/PyYAML-3.10/tests/data/null.data +++ /dev/null @@ -1,3 +0,0 @@ -- -- ~ -- null diff --git a/libs/PyYAML-3.10/tests/data/null.detect b/libs/PyYAML-3.10/tests/data/null.detect deleted file mode 100644 index 19110c7..0000000 --- a/libs/PyYAML-3.10/tests/data/null.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:null diff --git a/libs/PyYAML-3.10/tests/data/odd-utf16.stream-error b/libs/PyYAML-3.10/tests/data/odd-utf16.stream-error deleted file mode 100644 index b59e434..0000000 Binary files a/libs/PyYAML-3.10/tests/data/odd-utf16.stream-error and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/recursive-anchor.former-loader-error b/libs/PyYAML-3.10/tests/data/recursive-anchor.former-loader-error deleted file mode 100644 index 661166c..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive-anchor.former-loader-error +++ /dev/null @@ -1,4 +0,0 @@ -- &foo [1 - 2, - 3, - *foo] diff --git a/libs/PyYAML-3.10/tests/data/recursive-dict.recursive b/libs/PyYAML-3.10/tests/data/recursive-dict.recursive deleted file mode 100644 index 8f326f5..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive-dict.recursive +++ /dev/null @@ -1,3 +0,0 @@ -value = {} -instance = AnInstance(value, value) -value[instance] = instance diff --git a/libs/PyYAML-3.10/tests/data/recursive-list.recursive b/libs/PyYAML-3.10/tests/data/recursive-list.recursive deleted file mode 100644 index 27a4ae5..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive-list.recursive +++ /dev/null @@ -1,2 +0,0 @@ -value = [] -value.append(value) diff --git a/libs/PyYAML-3.10/tests/data/recursive-set.recursive b/libs/PyYAML-3.10/tests/data/recursive-set.recursive deleted file mode 100644 index 457c50d..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive-set.recursive +++ /dev/null @@ -1,7 +0,0 @@ -try: - set -except NameError: - from sets import Set as set -value = set() -value.add(AnInstance(foo=value, bar=value)) -value.add(AnInstance(foo=value, bar=value)) diff --git a/libs/PyYAML-3.10/tests/data/recursive-state.recursive b/libs/PyYAML-3.10/tests/data/recursive-state.recursive deleted file mode 100644 index bffe61e..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive-state.recursive +++ /dev/null @@ -1,2 +0,0 @@ -value = [] -value.append(AnInstanceWithState(value, value)) diff --git a/libs/PyYAML-3.10/tests/data/recursive-tuple.recursive b/libs/PyYAML-3.10/tests/data/recursive-tuple.recursive deleted file mode 100644 index dc08d02..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive-tuple.recursive +++ /dev/null @@ -1,3 +0,0 @@ -value = ([], []) -value[0].append(value) -value[1].append(value[0]) diff --git a/libs/PyYAML-3.10/tests/data/recursive.former-dumper-error b/libs/PyYAML-3.10/tests/data/recursive.former-dumper-error deleted file mode 100644 index 3c7cc2f..0000000 --- a/libs/PyYAML-3.10/tests/data/recursive.former-dumper-error +++ /dev/null @@ -1,3 +0,0 @@ -data = [] -data.append(data) -dump(data) diff --git a/libs/PyYAML-3.10/tests/data/remove-possible-simple-key-bug.loader-error b/libs/PyYAML-3.10/tests/data/remove-possible-simple-key-bug.loader-error deleted file mode 100644 index fe1bc6c..0000000 --- a/libs/PyYAML-3.10/tests/data/remove-possible-simple-key-bug.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -foo: &A bar -*A ] # The ']' indicator triggers remove_possible_simple_key, - # which should raise an error. diff --git a/libs/PyYAML-3.10/tests/data/resolver.data b/libs/PyYAML-3.10/tests/data/resolver.data deleted file mode 100644 index a296404..0000000 --- a/libs/PyYAML-3.10/tests/data/resolver.data +++ /dev/null @@ -1,30 +0,0 @@ ---- -"this scalar should be selected" ---- -key11: !foo - key12: - is: [selected] - key22: - key13: [not, selected] - key23: [not, selected] - key32: - key31: [not, selected] - key32: [not, selected] - key33: {not: selected} -key21: !bar - - not selected - - selected - - not selected -key31: !baz - key12: - key13: - key14: {selected} - key23: - key14: [not, selected] - key33: - key14: {selected} - key24: {not: selected} - key22: - - key14: {selected} - key24: {not: selected} - - key14: {selected} diff --git a/libs/PyYAML-3.10/tests/data/resolver.path b/libs/PyYAML-3.10/tests/data/resolver.path deleted file mode 100644 index ec677d2..0000000 --- a/libs/PyYAML-3.10/tests/data/resolver.path +++ /dev/null @@ -1,30 +0,0 @@ ---- !root/scalar -"this scalar should be selected" ---- !root -key11: !foo - key12: !root/key11/key12/* - is: [selected] - key22: - key13: [not, selected] - key23: [not, selected] - key32: - key31: [not, selected] - key32: [not, selected] - key33: {not: selected} -key21: !bar - - not selected - - !root/key21/1/* selected - - not selected -key31: !baz - key12: - key13: - key14: !root/key31/*/*/key14/map {selected} - key23: - key14: [not, selected] - key33: - key14: !root/key31/*/*/key14/map {selected} - key24: {not: selected} - key22: - - key14: !root/key31/*/*/key14/map {selected} - key24: {not: selected} - - key14: !root/key31/*/*/key14/map {selected} diff --git a/libs/PyYAML-3.10/tests/data/run-parser-crash-bug.data b/libs/PyYAML-3.10/tests/data/run-parser-crash-bug.data deleted file mode 100644 index fe01734..0000000 --- a/libs/PyYAML-3.10/tests/data/run-parser-crash-bug.data +++ /dev/null @@ -1,8 +0,0 @@ ---- -- Harry Potter and the Prisoner of Azkaban -- Harry Potter and the Goblet of Fire -- Harry Potter and the Order of the Phoenix ---- -- Memoirs Found in a Bathtub -- Snow Crash -- Ghost World diff --git a/libs/PyYAML-3.10/tests/data/scalars.events b/libs/PyYAML-3.10/tests/data/scalars.events deleted file mode 100644 index 32c40f4..0000000 --- a/libs/PyYAML-3.10/tests/data/scalars.events +++ /dev/null @@ -1,28 +0,0 @@ -- !StreamStart - -- !DocumentStart -- !MappingStart -- !Scalar { implicit: [true,true], value: 'empty scalar' } -- !Scalar { implicit: [true,false], value: '' } -- !Scalar { implicit: [true,true], value: 'implicit scalar' } -- !Scalar { implicit: [true,true], value: 'data' } -- !Scalar { implicit: [true,true], value: 'quoted scalar' } -- !Scalar { value: 'data', style: '"' } -- !Scalar { implicit: [true,true], value: 'block scalar' } -- !Scalar { value: 'data', style: '|' } -- !Scalar { implicit: [true,true], value: 'empty scalar with tag' } -- !Scalar { implicit: [false,false], tag: '!mytag', value: '' } -- !Scalar { implicit: [true,true], value: 'implicit scalar with tag' } -- !Scalar { implicit: [false,false], tag: '!mytag', value: 'data' } -- !Scalar { implicit: [true,true], value: 'quoted scalar with tag' } -- !Scalar { value: 'data', style: '"', tag: '!mytag', implicit: [false,false] } -- !Scalar { implicit: [true,true], value: 'block scalar with tag' } -- !Scalar { value: 'data', style: '|', tag: '!mytag', implicit: [false,false] } -- !Scalar { implicit: [true,true], value: 'single character' } -- !Scalar { value: 'a', implicit: [true,true] } -- !Scalar { implicit: [true,true], value: 'single digit' } -- !Scalar { value: '1', implicit: [true,false] } -- !MappingEnd -- !DocumentEnd - -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/scan-document-end-bug.canonical b/libs/PyYAML-3.10/tests/data/scan-document-end-bug.canonical deleted file mode 100644 index 4a0e8a8..0000000 --- a/libs/PyYAML-3.10/tests/data/scan-document-end-bug.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!null "" diff --git a/libs/PyYAML-3.10/tests/data/scan-document-end-bug.data b/libs/PyYAML-3.10/tests/data/scan-document-end-bug.data deleted file mode 100644 index 3c70543..0000000 --- a/libs/PyYAML-3.10/tests/data/scan-document-end-bug.data +++ /dev/null @@ -1,3 +0,0 @@ -# Ticket #4 ---- -... \ No newline at end of file diff --git a/libs/PyYAML-3.10/tests/data/scan-line-break-bug.canonical b/libs/PyYAML-3.10/tests/data/scan-line-break-bug.canonical deleted file mode 100644 index 79f08b7..0000000 --- a/libs/PyYAML-3.10/tests/data/scan-line-break-bug.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!map { ? !!str "foo" : !!str "bar baz" } diff --git a/libs/PyYAML-3.10/tests/data/scan-line-break-bug.data b/libs/PyYAML-3.10/tests/data/scan-line-break-bug.data deleted file mode 100644 index 5856454..0000000 --- a/libs/PyYAML-3.10/tests/data/scan-line-break-bug.data +++ /dev/null @@ -1,3 +0,0 @@ -foo: - bar - baz diff --git a/libs/PyYAML-3.10/tests/data/sequences.events b/libs/PyYAML-3.10/tests/data/sequences.events deleted file mode 100644 index 692a329..0000000 --- a/libs/PyYAML-3.10/tests/data/sequences.events +++ /dev/null @@ -1,81 +0,0 @@ -- !StreamStart - -- !DocumentStart -- !SequenceStart -- !SequenceEnd -- !DocumentEnd - -- !DocumentStart -- !SequenceStart { tag: '!mytag', implicit: false } -- !SequenceEnd -- !DocumentEnd - -- !DocumentStart -- !SequenceStart -- !SequenceStart -- !SequenceEnd -- !SequenceStart { tag: '!mytag', implicit: false } -- !SequenceEnd -- !SequenceStart -- !Scalar -- !Scalar { value: 'data' } -- !Scalar { tag: '!mytag', implicit: [false,false], value: 'data' } -- !SequenceEnd -- !SequenceStart -- !SequenceStart -- !SequenceStart -- !Scalar -- !SequenceEnd -- !SequenceEnd -- !SequenceEnd -- !SequenceStart -- !SequenceStart { tag: '!mytag', implicit: false } -- !SequenceStart -- !Scalar { value: 'data' } -- !SequenceEnd -- !SequenceEnd -- !SequenceEnd -- !SequenceEnd -- !DocumentEnd - -- !DocumentStart -- !SequenceStart -- !MappingStart -- !Scalar { value: 'key1' } -- !SequenceStart -- !Scalar { value: 'data1' } -- !Scalar { value: 'data2' } -- !SequenceEnd -- !Scalar { value: 'key2' } -- !SequenceStart { tag: '!mytag1', implicit: false } -- !Scalar { value: 'data3' } -- !SequenceStart -- !Scalar { value: 'data4' } -- !Scalar { value: 'data5' } -- !SequenceEnd -- !SequenceStart { tag: '!mytag2', implicit: false } -- !Scalar { value: 'data6' } -- !Scalar { value: 'data7' } -- !SequenceEnd -- !SequenceEnd -- !MappingEnd -- !SequenceEnd -- !DocumentEnd - -- !DocumentStart -- !SequenceStart -- !SequenceStart { flow_style: true } -- !SequenceStart -- !SequenceEnd -- !Scalar -- !Scalar { value: 'data' } -- !Scalar { tag: '!mytag', implicit: [false,false], value: 'data' } -- !SequenceStart { tag: '!mytag', implicit: false } -- !Scalar { value: 'data' } -- !Scalar { value: 'data' } -- !SequenceEnd -- !SequenceEnd -- !SequenceEnd -- !DocumentEnd - -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/serializer-is-already-opened.dumper-error b/libs/PyYAML-3.10/tests/data/serializer-is-already-opened.dumper-error deleted file mode 100644 index 9a23525..0000000 --- a/libs/PyYAML-3.10/tests/data/serializer-is-already-opened.dumper-error +++ /dev/null @@ -1,3 +0,0 @@ -dumper = yaml.Dumper(StringIO()) -dumper.open() -dumper.open() diff --git a/libs/PyYAML-3.10/tests/data/serializer-is-closed-1.dumper-error b/libs/PyYAML-3.10/tests/data/serializer-is-closed-1.dumper-error deleted file mode 100644 index 8e7e600..0000000 --- a/libs/PyYAML-3.10/tests/data/serializer-is-closed-1.dumper-error +++ /dev/null @@ -1,4 +0,0 @@ -dumper = yaml.Dumper(StringIO()) -dumper.open() -dumper.close() -dumper.open() diff --git a/libs/PyYAML-3.10/tests/data/serializer-is-closed-2.dumper-error b/libs/PyYAML-3.10/tests/data/serializer-is-closed-2.dumper-error deleted file mode 100644 index 89aef7e..0000000 --- a/libs/PyYAML-3.10/tests/data/serializer-is-closed-2.dumper-error +++ /dev/null @@ -1,4 +0,0 @@ -dumper = yaml.Dumper(StringIO()) -dumper.open() -dumper.close() -dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar')) diff --git a/libs/PyYAML-3.10/tests/data/serializer-is-not-opened-1.dumper-error b/libs/PyYAML-3.10/tests/data/serializer-is-not-opened-1.dumper-error deleted file mode 100644 index 8f22e73..0000000 --- a/libs/PyYAML-3.10/tests/data/serializer-is-not-opened-1.dumper-error +++ /dev/null @@ -1,2 +0,0 @@ -dumper = yaml.Dumper(StringIO()) -dumper.close() diff --git a/libs/PyYAML-3.10/tests/data/serializer-is-not-opened-2.dumper-error b/libs/PyYAML-3.10/tests/data/serializer-is-not-opened-2.dumper-error deleted file mode 100644 index ebd9df1..0000000 --- a/libs/PyYAML-3.10/tests/data/serializer-is-not-opened-2.dumper-error +++ /dev/null @@ -1,2 +0,0 @@ -dumper = yaml.Dumper(StringIO()) -dumper.serialize(yaml.ScalarNode(tag='!foo', value='bar')) diff --git a/libs/PyYAML-3.10/tests/data/single-dot-is-not-float-bug.code b/libs/PyYAML-3.10/tests/data/single-dot-is-not-float-bug.code deleted file mode 100644 index dcd0c2f..0000000 --- a/libs/PyYAML-3.10/tests/data/single-dot-is-not-float-bug.code +++ /dev/null @@ -1 +0,0 @@ -'.' diff --git a/libs/PyYAML-3.10/tests/data/single-dot-is-not-float-bug.data b/libs/PyYAML-3.10/tests/data/single-dot-is-not-float-bug.data deleted file mode 100644 index 9c558e3..0000000 --- a/libs/PyYAML-3.10/tests/data/single-dot-is-not-float-bug.data +++ /dev/null @@ -1 +0,0 @@ -. diff --git a/libs/PyYAML-3.10/tests/data/sloppy-indentation.canonical b/libs/PyYAML-3.10/tests/data/sloppy-indentation.canonical deleted file mode 100644 index 438bc04..0000000 --- a/libs/PyYAML-3.10/tests/data/sloppy-indentation.canonical +++ /dev/null @@ -1,18 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "in the block context" - : !!map { - ? !!str "indentation should be kept" - : !!map { - ? !!str "but in the flow context" - : !!seq [ !!str "it may be violated" ] - } - } -} ---- !!str -"the parser does not require scalars to be indented with at least one space" ---- !!str -"the parser does not require scalars to be indented with at least one space" ---- !!map -{ ? !!str "foo": { ? !!str "bar" : !!str "quoted scalars may not adhere indentation" } } diff --git a/libs/PyYAML-3.10/tests/data/sloppy-indentation.data b/libs/PyYAML-3.10/tests/data/sloppy-indentation.data deleted file mode 100644 index 2eb4f5a..0000000 --- a/libs/PyYAML-3.10/tests/data/sloppy-indentation.data +++ /dev/null @@ -1,17 +0,0 @@ ---- -in the block context: - indentation should be kept: { - but in the flow context: [ -it may be violated] -} ---- -the parser does not require scalars -to be indented with at least one space -... ---- -"the parser does not require scalars -to be indented with at least one space" ---- -foo: - bar: 'quoted scalars -may not adhere indentation' diff --git a/libs/PyYAML-3.10/tests/data/spec-02-01.data b/libs/PyYAML-3.10/tests/data/spec-02-01.data deleted file mode 100644 index d12e671..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-01.data +++ /dev/null @@ -1,3 +0,0 @@ -- Mark McGwire -- Sammy Sosa -- Ken Griffey diff --git a/libs/PyYAML-3.10/tests/data/spec-02-01.structure b/libs/PyYAML-3.10/tests/data/spec-02-01.structure deleted file mode 100644 index f532f4a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-01.structure +++ /dev/null @@ -1 +0,0 @@ -[True, True, True] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-01.tokens b/libs/PyYAML-3.10/tests/data/spec-02-01.tokens deleted file mode 100644 index ce44cac..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-01.tokens +++ /dev/null @@ -1 +0,0 @@ -[[ , _ , _ , _ ]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-02.data b/libs/PyYAML-3.10/tests/data/spec-02-02.data deleted file mode 100644 index 7b7ec94..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-02.data +++ /dev/null @@ -1,3 +0,0 @@ -hr: 65 # Home runs -avg: 0.278 # Batting average -rbi: 147 # Runs Batted In diff --git a/libs/PyYAML-3.10/tests/data/spec-02-02.structure b/libs/PyYAML-3.10/tests/data/spec-02-02.structure deleted file mode 100644 index aba1ced..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-02.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-02.tokens b/libs/PyYAML-3.10/tests/data/spec-02-02.tokens deleted file mode 100644 index e4e381b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-02.tokens +++ /dev/null @@ -1,5 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-03.data b/libs/PyYAML-3.10/tests/data/spec-02-03.data deleted file mode 100644 index 656d628..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-03.data +++ /dev/null @@ -1,8 +0,0 @@ -american: - - Boston Red Sox - - Detroit Tigers - - New York Yankees -national: - - New York Mets - - Chicago Cubs - - Atlanta Braves diff --git a/libs/PyYAML-3.10/tests/data/spec-02-03.structure b/libs/PyYAML-3.10/tests/data/spec-02-03.structure deleted file mode 100644 index 25de5d2..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-03.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, [True, True, True]), (True, [True, True, True])] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-03.tokens b/libs/PyYAML-3.10/tests/data/spec-02-03.tokens deleted file mode 100644 index 89815f2..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-03.tokens +++ /dev/null @@ -1,4 +0,0 @@ -{{ -? _ : [[ , _ , _ , _ ]} -? _ : [[ , _ , _ , _ ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-04.data b/libs/PyYAML-3.10/tests/data/spec-02-04.data deleted file mode 100644 index 430f6b3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-04.data +++ /dev/null @@ -1,8 +0,0 @@ -- - name: Mark McGwire - hr: 65 - avg: 0.278 -- - name: Sammy Sosa - hr: 63 - avg: 0.288 diff --git a/libs/PyYAML-3.10/tests/data/spec-02-04.structure b/libs/PyYAML-3.10/tests/data/spec-02-04.structure deleted file mode 100644 index e7b526c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-04.structure +++ /dev/null @@ -1,4 +0,0 @@ -[ - [(True, True), (True, True), (True, True)], - [(True, True), (True, True), (True, True)], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-04.tokens b/libs/PyYAML-3.10/tests/data/spec-02-04.tokens deleted file mode 100644 index 9cb9815..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-04.tokens +++ /dev/null @@ -1,4 +0,0 @@ -[[ -, {{ ? _ : _ ? _ : _ ? _ : _ ]} -, {{ ? _ : _ ? _ : _ ? _ : _ ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-05.data b/libs/PyYAML-3.10/tests/data/spec-02-05.data deleted file mode 100644 index cdd7770..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-05.data +++ /dev/null @@ -1,3 +0,0 @@ -- [name , hr, avg ] -- [Mark McGwire, 65, 0.278] -- [Sammy Sosa , 63, 0.288] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-05.structure b/libs/PyYAML-3.10/tests/data/spec-02-05.structure deleted file mode 100644 index e06b75a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-05.structure +++ /dev/null @@ -1,5 +0,0 @@ -[ - [True, True, True], - [True, True, True], - [True, True, True], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-05.tokens b/libs/PyYAML-3.10/tests/data/spec-02-05.tokens deleted file mode 100644 index 3f6f1ab..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-05.tokens +++ /dev/null @@ -1,5 +0,0 @@ -[[ -, [ _ , _ , _ ] -, [ _ , _ , _ ] -, [ _ , _ , _ ] -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-06.data b/libs/PyYAML-3.10/tests/data/spec-02-06.data deleted file mode 100644 index 7a957b2..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-06.data +++ /dev/null @@ -1,5 +0,0 @@ -Mark McGwire: {hr: 65, avg: 0.278} -Sammy Sosa: { - hr: 63, - avg: 0.288 - } diff --git a/libs/PyYAML-3.10/tests/data/spec-02-06.structure b/libs/PyYAML-3.10/tests/data/spec-02-06.structure deleted file mode 100644 index 3ef0f4b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-06.structure +++ /dev/null @@ -1,4 +0,0 @@ -[ - (True, [(True, True), (True, True)]), - (True, [(True, True), (True, True)]), -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-06.tokens b/libs/PyYAML-3.10/tests/data/spec-02-06.tokens deleted file mode 100644 index a1a5eef..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-06.tokens +++ /dev/null @@ -1,4 +0,0 @@ -{{ -? _ : { ? _ : _ , ? _ : _ } -? _ : { ? _ : _ , ? _ : _ } -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-07.data b/libs/PyYAML-3.10/tests/data/spec-02-07.data deleted file mode 100644 index bc711d5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-07.data +++ /dev/null @@ -1,10 +0,0 @@ -# Ranking of 1998 home runs ---- -- Mark McGwire -- Sammy Sosa -- Ken Griffey - -# Team ranking ---- -- Chicago Cubs -- St Louis Cardinals diff --git a/libs/PyYAML-3.10/tests/data/spec-02-07.structure b/libs/PyYAML-3.10/tests/data/spec-02-07.structure deleted file mode 100644 index c5d72a3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-07.structure +++ /dev/null @@ -1,4 +0,0 @@ -[ -[True, True, True], -[True, True], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-07.tokens b/libs/PyYAML-3.10/tests/data/spec-02-07.tokens deleted file mode 100644 index ed48883..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-07.tokens +++ /dev/null @@ -1,12 +0,0 @@ ---- -[[ -, _ -, _ -, _ -]} - ---- -[[ -, _ -, _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-08.data b/libs/PyYAML-3.10/tests/data/spec-02-08.data deleted file mode 100644 index 05e102d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-08.data +++ /dev/null @@ -1,10 +0,0 @@ ---- -time: 20:03:20 -player: Sammy Sosa -action: strike (miss) -... ---- -time: 20:03:47 -player: Sammy Sosa -action: grand slam -... diff --git a/libs/PyYAML-3.10/tests/data/spec-02-08.structure b/libs/PyYAML-3.10/tests/data/spec-02-08.structure deleted file mode 100644 index 24cff73..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-08.structure +++ /dev/null @@ -1,4 +0,0 @@ -[ -[(True, True), (True, True), (True, True)], -[(True, True), (True, True), (True, True)], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-08.tokens b/libs/PyYAML-3.10/tests/data/spec-02-08.tokens deleted file mode 100644 index 7d2c03d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-08.tokens +++ /dev/null @@ -1,15 +0,0 @@ ---- -{{ -? _ : _ -? _ : _ -? _ : _ -]} -... - ---- -{{ -? _ : _ -? _ : _ -? _ : _ -]} -... diff --git a/libs/PyYAML-3.10/tests/data/spec-02-09.data b/libs/PyYAML-3.10/tests/data/spec-02-09.data deleted file mode 100644 index e264180..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-09.data +++ /dev/null @@ -1,8 +0,0 @@ ---- -hr: # 1998 hr ranking - - Mark McGwire - - Sammy Sosa -rbi: - # 1998 rbi ranking - - Sammy Sosa - - Ken Griffey diff --git a/libs/PyYAML-3.10/tests/data/spec-02-09.structure b/libs/PyYAML-3.10/tests/data/spec-02-09.structure deleted file mode 100644 index b4c9914..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-09.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, [True, True]), (True, [True, True])] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-09.tokens b/libs/PyYAML-3.10/tests/data/spec-02-09.tokens deleted file mode 100644 index b2ec10e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-09.tokens +++ /dev/null @@ -1,5 +0,0 @@ ---- -{{ -? _ : [[ , _ , _ ]} -? _ : [[ , _ , _ ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-10.data b/libs/PyYAML-3.10/tests/data/spec-02-10.data deleted file mode 100644 index 61808f6..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-10.data +++ /dev/null @@ -1,8 +0,0 @@ ---- -hr: - - Mark McGwire - # Following node labeled SS - - &SS Sammy Sosa -rbi: - - *SS # Subsequent occurrence - - Ken Griffey diff --git a/libs/PyYAML-3.10/tests/data/spec-02-10.structure b/libs/PyYAML-3.10/tests/data/spec-02-10.structure deleted file mode 100644 index ff8f4c3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-10.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, [True, True]), (True, ['*', True])] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-10.tokens b/libs/PyYAML-3.10/tests/data/spec-02-10.tokens deleted file mode 100644 index 26caa2b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-10.tokens +++ /dev/null @@ -1,5 +0,0 @@ ---- -{{ -? _ : [[ , _ , & _ ]} -? _ : [[ , * , _ ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-11.data b/libs/PyYAML-3.10/tests/data/spec-02-11.data deleted file mode 100644 index 9123ce2..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-11.data +++ /dev/null @@ -1,9 +0,0 @@ -? - Detroit Tigers - - Chicago cubs -: - - 2001-07-23 - -? [ New York Yankees, - Atlanta Braves ] -: [ 2001-07-02, 2001-08-12, - 2001-08-14 ] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-11.structure b/libs/PyYAML-3.10/tests/data/spec-02-11.structure deleted file mode 100644 index 3d8f1ff..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-11.structure +++ /dev/null @@ -1,4 +0,0 @@ -[ -([True, True], [True]), -([True, True], [True, True, True]), -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-11.tokens b/libs/PyYAML-3.10/tests/data/spec-02-11.tokens deleted file mode 100644 index fe24203..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-11.tokens +++ /dev/null @@ -1,6 +0,0 @@ -{{ -? [[ , _ , _ ]} -: [[ , _ ]} -? [ _ , _ ] -: [ _ , _ , _ ] -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-12.data b/libs/PyYAML-3.10/tests/data/spec-02-12.data deleted file mode 100644 index 1fc33f9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-12.data +++ /dev/null @@ -1,8 +0,0 @@ ---- -# products purchased -- item : Super Hoop - quantity: 1 -- item : Basketball - quantity: 4 -- item : Big Shoes - quantity: 1 diff --git a/libs/PyYAML-3.10/tests/data/spec-02-12.structure b/libs/PyYAML-3.10/tests/data/spec-02-12.structure deleted file mode 100644 index e9c5359..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-12.structure +++ /dev/null @@ -1,5 +0,0 @@ -[ -[(True, True), (True, True)], -[(True, True), (True, True)], -[(True, True), (True, True)], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-12.tokens b/libs/PyYAML-3.10/tests/data/spec-02-12.tokens deleted file mode 100644 index ea21e50..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-12.tokens +++ /dev/null @@ -1,6 +0,0 @@ ---- -[[ -, {{ ? _ : _ ? _ : _ ]} -, {{ ? _ : _ ? _ : _ ]} -, {{ ? _ : _ ? _ : _ ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-13.data b/libs/PyYAML-3.10/tests/data/spec-02-13.data deleted file mode 100644 index 13fb656..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-13.data +++ /dev/null @@ -1,4 +0,0 @@ -# ASCII Art ---- | - \//||\/|| - // || ||__ diff --git a/libs/PyYAML-3.10/tests/data/spec-02-13.structure b/libs/PyYAML-3.10/tests/data/spec-02-13.structure deleted file mode 100644 index 0ca9514..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-13.structure +++ /dev/null @@ -1 +0,0 @@ -True diff --git a/libs/PyYAML-3.10/tests/data/spec-02-13.tokens b/libs/PyYAML-3.10/tests/data/spec-02-13.tokens deleted file mode 100644 index 7456c05..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-13.tokens +++ /dev/null @@ -1 +0,0 @@ ---- _ diff --git a/libs/PyYAML-3.10/tests/data/spec-02-14.data b/libs/PyYAML-3.10/tests/data/spec-02-14.data deleted file mode 100644 index 59943de..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-14.data +++ /dev/null @@ -1,4 +0,0 @@ ---- - Mark McGwire's - year was crippled - by a knee injury. diff --git a/libs/PyYAML-3.10/tests/data/spec-02-14.structure b/libs/PyYAML-3.10/tests/data/spec-02-14.structure deleted file mode 100644 index 0ca9514..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-14.structure +++ /dev/null @@ -1 +0,0 @@ -True diff --git a/libs/PyYAML-3.10/tests/data/spec-02-14.tokens b/libs/PyYAML-3.10/tests/data/spec-02-14.tokens deleted file mode 100644 index 7456c05..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-14.tokens +++ /dev/null @@ -1 +0,0 @@ ---- _ diff --git a/libs/PyYAML-3.10/tests/data/spec-02-15.data b/libs/PyYAML-3.10/tests/data/spec-02-15.data deleted file mode 100644 index 80b89a6..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-15.data +++ /dev/null @@ -1,8 +0,0 @@ -> - Sammy Sosa completed another - fine season with great stats. - - 63 Home Runs - 0.288 Batting Average - - What a year! diff --git a/libs/PyYAML-3.10/tests/data/spec-02-15.structure b/libs/PyYAML-3.10/tests/data/spec-02-15.structure deleted file mode 100644 index 0ca9514..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-15.structure +++ /dev/null @@ -1 +0,0 @@ -True diff --git a/libs/PyYAML-3.10/tests/data/spec-02-15.tokens b/libs/PyYAML-3.10/tests/data/spec-02-15.tokens deleted file mode 100644 index 31354ec..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-15.tokens +++ /dev/null @@ -1 +0,0 @@ -_ diff --git a/libs/PyYAML-3.10/tests/data/spec-02-16.data b/libs/PyYAML-3.10/tests/data/spec-02-16.data deleted file mode 100644 index 9f66d88..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-16.data +++ /dev/null @@ -1,7 +0,0 @@ -name: Mark McGwire -accomplishment: > - Mark set a major league - home run record in 1998. -stats: | - 65 Home Runs - 0.278 Batting Average diff --git a/libs/PyYAML-3.10/tests/data/spec-02-16.structure b/libs/PyYAML-3.10/tests/data/spec-02-16.structure deleted file mode 100644 index aba1ced..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-16.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-16.tokens b/libs/PyYAML-3.10/tests/data/spec-02-16.tokens deleted file mode 100644 index e4e381b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-16.tokens +++ /dev/null @@ -1,5 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-17.data b/libs/PyYAML-3.10/tests/data/spec-02-17.data deleted file mode 100644 index b2870c5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-17.data +++ /dev/null @@ -1,7 +0,0 @@ -unicode: "Sosa did fine.\u263A" -control: "\b1998\t1999\t2000\n" -hexesc: "\x13\x10 is \r\n" - -single: '"Howdy!" he cried.' -quoted: ' # not a ''comment''.' -tie-fighter: '|\-*-/|' diff --git a/libs/PyYAML-3.10/tests/data/spec-02-17.structure b/libs/PyYAML-3.10/tests/data/spec-02-17.structure deleted file mode 100644 index 933646d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-17.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True), (True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-17.tokens b/libs/PyYAML-3.10/tests/data/spec-02-17.tokens deleted file mode 100644 index db65540..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-17.tokens +++ /dev/null @@ -1,8 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-18.data b/libs/PyYAML-3.10/tests/data/spec-02-18.data deleted file mode 100644 index e0a8bfa..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-18.data +++ /dev/null @@ -1,6 +0,0 @@ -plain: - This unquoted scalar - spans many lines. - -quoted: "So does this - quoted scalar.\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-02-18.structure b/libs/PyYAML-3.10/tests/data/spec-02-18.structure deleted file mode 100644 index 0ca4991..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-18.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-18.tokens b/libs/PyYAML-3.10/tests/data/spec-02-18.tokens deleted file mode 100644 index 83b31dc..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-18.tokens +++ /dev/null @@ -1,4 +0,0 @@ -{{ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-19.data b/libs/PyYAML-3.10/tests/data/spec-02-19.data deleted file mode 100644 index bf69de6..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-19.data +++ /dev/null @@ -1,5 +0,0 @@ -canonical: 12345 -decimal: +12,345 -sexagesimal: 3:25:45 -octal: 014 -hexadecimal: 0xC diff --git a/libs/PyYAML-3.10/tests/data/spec-02-19.structure b/libs/PyYAML-3.10/tests/data/spec-02-19.structure deleted file mode 100644 index 48ca99d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-19.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-19.tokens b/libs/PyYAML-3.10/tests/data/spec-02-19.tokens deleted file mode 100644 index 5bda68f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-19.tokens +++ /dev/null @@ -1,7 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-20.data b/libs/PyYAML-3.10/tests/data/spec-02-20.data deleted file mode 100644 index 1d4897f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-20.data +++ /dev/null @@ -1,6 +0,0 @@ -canonical: 1.23015e+3 -exponential: 12.3015e+02 -sexagesimal: 20:30.15 -fixed: 1,230.15 -negative infinity: -.inf -not a number: .NaN diff --git a/libs/PyYAML-3.10/tests/data/spec-02-20.structure b/libs/PyYAML-3.10/tests/data/spec-02-20.structure deleted file mode 100644 index 933646d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-20.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True), (True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-20.tokens b/libs/PyYAML-3.10/tests/data/spec-02-20.tokens deleted file mode 100644 index db65540..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-20.tokens +++ /dev/null @@ -1,8 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-21.data b/libs/PyYAML-3.10/tests/data/spec-02-21.data deleted file mode 100644 index dec6a56..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-21.data +++ /dev/null @@ -1,4 +0,0 @@ -null: ~ -true: y -false: n -string: '12345' diff --git a/libs/PyYAML-3.10/tests/data/spec-02-21.structure b/libs/PyYAML-3.10/tests/data/spec-02-21.structure deleted file mode 100644 index 021635f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-21.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-21.tokens b/libs/PyYAML-3.10/tests/data/spec-02-21.tokens deleted file mode 100644 index aeccbaf..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-21.tokens +++ /dev/null @@ -1,6 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-22.data b/libs/PyYAML-3.10/tests/data/spec-02-22.data deleted file mode 100644 index aaac185..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-22.data +++ /dev/null @@ -1,4 +0,0 @@ -canonical: 2001-12-15T02:59:43.1Z -iso8601: 2001-12-14t21:59:43.10-05:00 -spaced: 2001-12-14 21:59:43.10 -5 -date: 2002-12-14 diff --git a/libs/PyYAML-3.10/tests/data/spec-02-22.structure b/libs/PyYAML-3.10/tests/data/spec-02-22.structure deleted file mode 100644 index 021635f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-22.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-22.tokens b/libs/PyYAML-3.10/tests/data/spec-02-22.tokens deleted file mode 100644 index aeccbaf..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-22.tokens +++ /dev/null @@ -1,6 +0,0 @@ -{{ -? _ : _ -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-23.data b/libs/PyYAML-3.10/tests/data/spec-02-23.data deleted file mode 100644 index 5dbd992..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-23.data +++ /dev/null @@ -1,13 +0,0 @@ ---- -not-date: !!str 2002-04-28 - -picture: !!binary | - R0lGODlhDAAMAIQAAP//9/X - 17unp5WZmZgAAAOfn515eXv - Pz7Y6OjuDg4J+fn5OTk6enp - 56enmleECcgggoBADs= - -application specific tag: !something | - The semantics of the tag - above may be different for - different documents. diff --git a/libs/PyYAML-3.10/tests/data/spec-02-23.structure b/libs/PyYAML-3.10/tests/data/spec-02-23.structure deleted file mode 100644 index aba1ced..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-23.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, True), (True, True), (True, True)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-23.tokens b/libs/PyYAML-3.10/tests/data/spec-02-23.tokens deleted file mode 100644 index 9ac54aa..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-23.tokens +++ /dev/null @@ -1,6 +0,0 @@ ---- -{{ -? _ : ! _ -? _ : ! _ -? _ : ! _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-24.data b/libs/PyYAML-3.10/tests/data/spec-02-24.data deleted file mode 100644 index 1180757..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-24.data +++ /dev/null @@ -1,14 +0,0 @@ -%TAG ! tag:clarkevans.com,2002: ---- !shape - # Use the ! handle for presenting - # tag:clarkevans.com,2002:circle -- !circle - center: &ORIGIN {x: 73, y: 129} - radius: 7 -- !line - start: *ORIGIN - finish: { x: 89, y: 102 } -- !label - start: *ORIGIN - color: 0xFFEEBB - text: Pretty vector drawing. diff --git a/libs/PyYAML-3.10/tests/data/spec-02-24.structure b/libs/PyYAML-3.10/tests/data/spec-02-24.structure deleted file mode 100644 index a800729..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-24.structure +++ /dev/null @@ -1,5 +0,0 @@ -[ -[(True, [(True, True), (True, True)]), (True, True)], -[(True, '*'), (True, [(True, True), (True, True)])], -[(True, '*'), (True, True), (True, True)], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-24.tokens b/libs/PyYAML-3.10/tests/data/spec-02-24.tokens deleted file mode 100644 index 039c385..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-24.tokens +++ /dev/null @@ -1,20 +0,0 @@ -% ---- ! -[[ -, ! - {{ - ? _ : & { ? _ : _ , ? _ : _ } - ? _ : _ - ]} -, ! - {{ - ? _ : * - ? _ : { ? _ : _ , ? _ : _ } - ]} -, ! - {{ - ? _ : * - ? _ : _ - ? _ : _ - ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-25.data b/libs/PyYAML-3.10/tests/data/spec-02-25.data deleted file mode 100644 index 769ac31..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-25.data +++ /dev/null @@ -1,7 +0,0 @@ -# sets are represented as a -# mapping where each key is -# associated with the empty string ---- !!set -? Mark McGwire -? Sammy Sosa -? Ken Griff diff --git a/libs/PyYAML-3.10/tests/data/spec-02-25.structure b/libs/PyYAML-3.10/tests/data/spec-02-25.structure deleted file mode 100644 index 0b40e61..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-25.structure +++ /dev/null @@ -1 +0,0 @@ -[(True, None), (True, None), (True, None)] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-25.tokens b/libs/PyYAML-3.10/tests/data/spec-02-25.tokens deleted file mode 100644 index b700236..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-25.tokens +++ /dev/null @@ -1,6 +0,0 @@ ---- ! -{{ -? _ -? _ -? _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-26.data b/libs/PyYAML-3.10/tests/data/spec-02-26.data deleted file mode 100644 index 3143763..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-26.data +++ /dev/null @@ -1,7 +0,0 @@ -# ordered maps are represented as -# a sequence of mappings, with -# each mapping having one key ---- !!omap -- Mark McGwire: 65 -- Sammy Sosa: 63 -- Ken Griffy: 58 diff --git a/libs/PyYAML-3.10/tests/data/spec-02-26.structure b/libs/PyYAML-3.10/tests/data/spec-02-26.structure deleted file mode 100644 index cf429b9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-26.structure +++ /dev/null @@ -1,5 +0,0 @@ -[ -[(True, True)], -[(True, True)], -[(True, True)], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-26.tokens b/libs/PyYAML-3.10/tests/data/spec-02-26.tokens deleted file mode 100644 index 7bee492..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-26.tokens +++ /dev/null @@ -1,6 +0,0 @@ ---- ! -[[ -, {{ ? _ : _ ]} -, {{ ? _ : _ ]} -, {{ ? _ : _ ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-27.data b/libs/PyYAML-3.10/tests/data/spec-02-27.data deleted file mode 100644 index 4625739..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-27.data +++ /dev/null @@ -1,29 +0,0 @@ ---- ! -invoice: 34843 -date : 2001-01-23 -bill-to: &id001 - given : Chris - family : Dumars - address: - lines: | - 458 Walkman Dr. - Suite #292 - city : Royal Oak - state : MI - postal : 48046 -ship-to: *id001 -product: - - sku : BL394D - quantity : 4 - description : Basketball - price : 450.00 - - sku : BL4438H - quantity : 1 - description : Super Hoop - price : 2392.00 -tax : 251.42 -total: 4443.52 -comments: - Late afternoon is best. - Backup contact is Nancy - Billsmer @ 338-4338. diff --git a/libs/PyYAML-3.10/tests/data/spec-02-27.structure b/libs/PyYAML-3.10/tests/data/spec-02-27.structure deleted file mode 100644 index a2113b9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-27.structure +++ /dev/null @@ -1,17 +0,0 @@ -[ -(True, True), -(True, True), -(True, [ - (True, True), - (True, True), - (True, [(True, True), (True, True), (True, True), (True, True)]), - ]), -(True, '*'), -(True, [ - [(True, True), (True, True), (True, True), (True, True)], - [(True, True), (True, True), (True, True), (True, True)], - ]), -(True, True), -(True, True), -(True, True), -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-27.tokens b/libs/PyYAML-3.10/tests/data/spec-02-27.tokens deleted file mode 100644 index 2dc1c25..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-27.tokens +++ /dev/null @@ -1,20 +0,0 @@ ---- ! -{{ -? _ : _ -? _ : _ -? _ : & - {{ - ? _ : _ - ? _ : _ - ? _ : {{ ? _ : _ ? _ : _ ? _ : _ ? _ : _ ]} - ]} -? _ : * -? _ : - [[ - , {{ ? _ : _ ? _ : _ ? _ : _ ? _ : _ ]} - , {{ ? _ : _ ? _ : _ ? _ : _ ? _ : _ ]} - ]} -? _ : _ -? _ : _ -? _ : _ -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-02-28.data b/libs/PyYAML-3.10/tests/data/spec-02-28.data deleted file mode 100644 index a5c8dc8..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-28.data +++ /dev/null @@ -1,26 +0,0 @@ ---- -Time: 2001-11-23 15:01:42 -5 -User: ed -Warning: - This is an error message - for the log file ---- -Time: 2001-11-23 15:02:31 -5 -User: ed -Warning: - A slightly different error - message. ---- -Date: 2001-11-23 15:03:17 -5 -User: ed -Fatal: - Unknown variable "bar" -Stack: - - file: TopClass.py - line: 23 - code: | - x = MoreObject("345\n") - - file: MoreClass.py - line: 58 - code: |- - foo = bar diff --git a/libs/PyYAML-3.10/tests/data/spec-02-28.structure b/libs/PyYAML-3.10/tests/data/spec-02-28.structure deleted file mode 100644 index 8ec0b56..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-28.structure +++ /dev/null @@ -1,10 +0,0 @@ -[ -[(True, True), (True, True), (True, True)], -[(True, True), (True, True), (True, True)], -[(True, True), (True, True), (True, True), -(True, [ - [(True, True), (True, True), (True, True)], - [(True, True), (True, True), (True, True)], - ]), -] -] diff --git a/libs/PyYAML-3.10/tests/data/spec-02-28.tokens b/libs/PyYAML-3.10/tests/data/spec-02-28.tokens deleted file mode 100644 index 8d5e1bc..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-02-28.tokens +++ /dev/null @@ -1,23 +0,0 @@ ---- -{{ -? _ : _ -? _ : _ -? _ : _ -]} ---- -{{ -? _ : _ -? _ : _ -? _ : _ -]} ---- -{{ -? _ : _ -? _ : _ -? _ : _ -? _ : - [[ - , {{ ? _ : _ ? _ : _ ? _ : _ ]} - , {{ ? _ : _ ? _ : _ ? _ : _ ]} - ]} -]} diff --git a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16be.data b/libs/PyYAML-3.10/tests/data/spec-05-01-utf16be.data deleted file mode 100644 index 3525062..0000000 Binary files a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16be.data and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16be.empty b/libs/PyYAML-3.10/tests/data/spec-05-01-utf16be.empty deleted file mode 100644 index bfffa8b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16be.empty +++ /dev/null @@ -1,2 +0,0 @@ -# This stream contains no -# documents, only comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16le.data b/libs/PyYAML-3.10/tests/data/spec-05-01-utf16le.data deleted file mode 100644 index 0823f74..0000000 Binary files a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16le.data and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16le.empty b/libs/PyYAML-3.10/tests/data/spec-05-01-utf16le.empty deleted file mode 100644 index bfffa8b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-01-utf16le.empty +++ /dev/null @@ -1,2 +0,0 @@ -# This stream contains no -# documents, only comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-01-utf8.data b/libs/PyYAML-3.10/tests/data/spec-05-01-utf8.data deleted file mode 100644 index 780d25b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-01-utf8.data +++ /dev/null @@ -1 +0,0 @@ -# Comment only. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-01-utf8.empty b/libs/PyYAML-3.10/tests/data/spec-05-01-utf8.empty deleted file mode 100644 index bfffa8b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-01-utf8.empty +++ /dev/null @@ -1,2 +0,0 @@ -# This stream contains no -# documents, only comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16be.data b/libs/PyYAML-3.10/tests/data/spec-05-02-utf16be.data deleted file mode 100644 index 5ebbb04..0000000 Binary files a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16be.data and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16be.error b/libs/PyYAML-3.10/tests/data/spec-05-02-utf16be.error deleted file mode 100644 index 1df3616..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16be.error +++ /dev/null @@ -1,3 +0,0 @@ -ERROR: - A BOM must not appear - inside a document. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16le.data b/libs/PyYAML-3.10/tests/data/spec-05-02-utf16le.data deleted file mode 100644 index 0cd90a2..0000000 Binary files a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16le.data and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16le.error b/libs/PyYAML-3.10/tests/data/spec-05-02-utf16le.error deleted file mode 100644 index 1df3616..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-02-utf16le.error +++ /dev/null @@ -1,3 +0,0 @@ -ERROR: - A BOM must not appear - inside a document. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-02-utf8.data b/libs/PyYAML-3.10/tests/data/spec-05-02-utf8.data deleted file mode 100644 index fb74866..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-02-utf8.data +++ /dev/null @@ -1,3 +0,0 @@ -# Invalid use of BOM -# inside a -# document. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-02-utf8.error b/libs/PyYAML-3.10/tests/data/spec-05-02-utf8.error deleted file mode 100644 index 1df3616..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-02-utf8.error +++ /dev/null @@ -1,3 +0,0 @@ -ERROR: - A BOM must not appear - inside a document. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-03.canonical b/libs/PyYAML-3.10/tests/data/spec-05-03.canonical deleted file mode 100644 index a143a73..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-03.canonical +++ /dev/null @@ -1,14 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "sequence" - : !!seq [ - !!str "one", !!str "two" - ], - ? !!str "mapping" - : !!map { - ? !!str "sky" : !!str "blue", -# ? !!str "sea" : !!str "green", - ? !!map { ? !!str "sea" : !!str "green" } : !!null "", - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-05-03.data b/libs/PyYAML-3.10/tests/data/spec-05-03.data deleted file mode 100644 index 4661f33..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-03.data +++ /dev/null @@ -1,7 +0,0 @@ -sequence: -- one -- two -mapping: - ? sky - : blue - ? sea : green diff --git a/libs/PyYAML-3.10/tests/data/spec-05-04.canonical b/libs/PyYAML-3.10/tests/data/spec-05-04.canonical deleted file mode 100644 index 00c9723..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-04.canonical +++ /dev/null @@ -1,13 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "sequence" - : !!seq [ - !!str "one", !!str "two" - ], - ? !!str "mapping" - : !!map { - ? !!str "sky" : !!str "blue", - ? !!str "sea" : !!str "green", - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-05-04.data b/libs/PyYAML-3.10/tests/data/spec-05-04.data deleted file mode 100644 index df33847..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-04.data +++ /dev/null @@ -1,2 +0,0 @@ -sequence: [ one, two, ] -mapping: { sky: blue, sea: green } diff --git a/libs/PyYAML-3.10/tests/data/spec-05-05.data b/libs/PyYAML-3.10/tests/data/spec-05-05.data deleted file mode 100644 index 62524c0..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-05.data +++ /dev/null @@ -1 +0,0 @@ -# Comment only. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-05.empty b/libs/PyYAML-3.10/tests/data/spec-05-05.empty deleted file mode 100644 index bfffa8b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-05.empty +++ /dev/null @@ -1,2 +0,0 @@ -# This stream contains no -# documents, only comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-06.canonical b/libs/PyYAML-3.10/tests/data/spec-05-06.canonical deleted file mode 100644 index 4f30c11..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-06.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "anchored" - : &A1 !local "value", - ? !!str "alias" - : *A1, -} diff --git a/libs/PyYAML-3.10/tests/data/spec-05-06.data b/libs/PyYAML-3.10/tests/data/spec-05-06.data deleted file mode 100644 index 7a1f9b3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-06.data +++ /dev/null @@ -1,2 +0,0 @@ -anchored: !local &anchor value -alias: *anchor diff --git a/libs/PyYAML-3.10/tests/data/spec-05-07.canonical b/libs/PyYAML-3.10/tests/data/spec-05-07.canonical deleted file mode 100644 index dc3732a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-07.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "literal" - : !!str "text\n", - ? !!str "folded" - : !!str "text\n", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-05-07.data b/libs/PyYAML-3.10/tests/data/spec-05-07.data deleted file mode 100644 index 97eb3a3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-07.data +++ /dev/null @@ -1,4 +0,0 @@ -literal: | - text -folded: > - text diff --git a/libs/PyYAML-3.10/tests/data/spec-05-08.canonical b/libs/PyYAML-3.10/tests/data/spec-05-08.canonical deleted file mode 100644 index 610bd68..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-08.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "single" - : !!str "text", - ? !!str "double" - : !!str "text", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-05-08.data b/libs/PyYAML-3.10/tests/data/spec-05-08.data deleted file mode 100644 index 04ebf69..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-08.data +++ /dev/null @@ -1,2 +0,0 @@ -single: 'text' -double: "text" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-09.canonical b/libs/PyYAML-3.10/tests/data/spec-05-09.canonical deleted file mode 100644 index 597e3de..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-09.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "text" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-09.data b/libs/PyYAML-3.10/tests/data/spec-05-09.data deleted file mode 100644 index a43431b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-09.data +++ /dev/null @@ -1,2 +0,0 @@ -%YAML 1.1 ---- text diff --git a/libs/PyYAML-3.10/tests/data/spec-05-10.data b/libs/PyYAML-3.10/tests/data/spec-05-10.data deleted file mode 100644 index a4caf91..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-10.data +++ /dev/null @@ -1,2 +0,0 @@ -commercial-at: @text -grave-accent: `text diff --git a/libs/PyYAML-3.10/tests/data/spec-05-10.error b/libs/PyYAML-3.10/tests/data/spec-05-10.error deleted file mode 100644 index 46f776e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-10.error +++ /dev/null @@ -1,3 +0,0 @@ -ERROR: - Reserved indicators can't - start a plain scalar. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-11.canonical b/libs/PyYAML-3.10/tests/data/spec-05-11.canonical deleted file mode 100644 index fc25bef..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-11.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- !!str -"Generic line break (no glyph)\n\ - Generic line break (glyphed)\n\ - Line separator\u2028\ - Paragraph separator\u2029" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-11.data b/libs/PyYAML-3.10/tests/data/spec-05-11.data deleted file mode 100644 index b448b75..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-11.data +++ /dev/null @@ -1,3 +0,0 @@ -| - Generic line break (no glyph) - Generic line break (glyphed)… Line separator
 Paragraph separator
 \ No newline at end of file diff --git a/libs/PyYAML-3.10/tests/data/spec-05-12.data b/libs/PyYAML-3.10/tests/data/spec-05-12.data deleted file mode 100644 index 7c3ad7f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-12.data +++ /dev/null @@ -1,9 +0,0 @@ -# Tabs do's and don'ts: -# comment: -quoted: "Quoted " -block: | - void main() { - printf("Hello, world!\n"); - } -elsewhere: # separation - indentation, in plain scalar diff --git a/libs/PyYAML-3.10/tests/data/spec-05-12.error b/libs/PyYAML-3.10/tests/data/spec-05-12.error deleted file mode 100644 index 8aad4c8..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-12.error +++ /dev/null @@ -1,8 +0,0 @@ -ERROR: - Tabs may appear inside - comments and quoted or - block scalar content. - Tabs must not appear - elsewhere, such as - in indentation and - separation spaces. diff --git a/libs/PyYAML-3.10/tests/data/spec-05-13.canonical b/libs/PyYAML-3.10/tests/data/spec-05-13.canonical deleted file mode 100644 index 90c1c5c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-13.canonical +++ /dev/null @@ -1,5 +0,0 @@ -%YAML 1.1 ---- !!str -"Text containing \ - both space and \ - tab characters" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-13.data b/libs/PyYAML-3.10/tests/data/spec-05-13.data deleted file mode 100644 index fce7951..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-13.data +++ /dev/null @@ -1,3 +0,0 @@ - "Text containing - both space and - tab characters" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-14.canonical b/libs/PyYAML-3.10/tests/data/spec-05-14.canonical deleted file mode 100644 index 4bff01c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-14.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -"Fun with \x5C - \x22 \x07 \x08 \x1B \x0C - \x0A \x0D \x09 \x0B \x00 - \x20 \xA0 \x85 \u2028 \u2029 - A A A" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-14.data b/libs/PyYAML-3.10/tests/data/spec-05-14.data deleted file mode 100644 index d6e8ce4..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-14.data +++ /dev/null @@ -1,2 +0,0 @@ -"Fun with \\ - \" \a \b \e \f \… \n \r \t \v \0 \
 \ \_ \N \L \P \
 \x41 \u0041 \U00000041" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-15.data b/libs/PyYAML-3.10/tests/data/spec-05-15.data deleted file mode 100644 index 7bf12b6..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-15.data +++ /dev/null @@ -1,3 +0,0 @@ -Bad escapes: - "\c - \xq-" diff --git a/libs/PyYAML-3.10/tests/data/spec-05-15.error b/libs/PyYAML-3.10/tests/data/spec-05-15.error deleted file mode 100644 index 71ffbd9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-05-15.error +++ /dev/null @@ -1,3 +0,0 @@ -ERROR: -- c is an invalid escaped character. -- q and - are invalid hex digits. diff --git a/libs/PyYAML-3.10/tests/data/spec-06-01.canonical b/libs/PyYAML-3.10/tests/data/spec-06-01.canonical deleted file mode 100644 index f17ec92..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-01.canonical +++ /dev/null @@ -1,15 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "Not indented" - : !!map { - ? !!str "By one space" - : !!str "By four\n spaces\n", - ? !!str "Flow style" - : !!seq [ - !!str "By two", - !!str "Also by two", - !!str "Still by two", - ] - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-06-01.data b/libs/PyYAML-3.10/tests/data/spec-06-01.data deleted file mode 100644 index 6134ba1..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-01.data +++ /dev/null @@ -1,14 +0,0 @@ - # Leading comment line spaces are - # neither content nor indentation. - -Not indented: - By one space: | - By four - spaces - Flow style: [ # Leading spaces - By two, # in flow style - Also by two, # are neither -# Tabs are not allowed: -# Still by two # content nor - Still by two # content nor - ] # indentation. diff --git a/libs/PyYAML-3.10/tests/data/spec-06-02.data b/libs/PyYAML-3.10/tests/data/spec-06-02.data deleted file mode 100644 index ff741e5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-02.data +++ /dev/null @@ -1,3 +0,0 @@ - # Comment - - diff --git a/libs/PyYAML-3.10/tests/data/spec-06-02.empty b/libs/PyYAML-3.10/tests/data/spec-06-02.empty deleted file mode 100644 index bfffa8b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-02.empty +++ /dev/null @@ -1,2 +0,0 @@ -# This stream contains no -# documents, only comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-06-03.canonical b/libs/PyYAML-3.10/tests/data/spec-06-03.canonical deleted file mode 100644 index ec26902..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-03.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "key" - : !!str "value" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-06-03.data b/libs/PyYAML-3.10/tests/data/spec-06-03.data deleted file mode 100644 index 9db0912..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-03.data +++ /dev/null @@ -1,2 +0,0 @@ -key: # Comment - value diff --git a/libs/PyYAML-3.10/tests/data/spec-06-04.canonical b/libs/PyYAML-3.10/tests/data/spec-06-04.canonical deleted file mode 100644 index ec26902..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-04.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "key" - : !!str "value" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-06-04.data b/libs/PyYAML-3.10/tests/data/spec-06-04.data deleted file mode 100644 index 86308dd..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-04.data +++ /dev/null @@ -1,4 +0,0 @@ -key: # Comment - # lines - value - diff --git a/libs/PyYAML-3.10/tests/data/spec-06-05.canonical b/libs/PyYAML-3.10/tests/data/spec-06-05.canonical deleted file mode 100644 index 8da431d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-05.canonical +++ /dev/null @@ -1,16 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!map { - ? !!str "first" - : !!str "Sammy", - ? !!str "last" - : !!str "Sosa" - } - : !!map { - ? !!str "hr" - : !!int "65", - ? !!str "avg" - : !!float "0.278" - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-06-05.data b/libs/PyYAML-3.10/tests/data/spec-06-05.data deleted file mode 100644 index 37613f5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-05.data +++ /dev/null @@ -1,6 +0,0 @@ -{ first: Sammy, last: Sosa }: -# Statistics: - hr: # Home runs - 65 - avg: # Average - 0.278 diff --git a/libs/PyYAML-3.10/tests/data/spec-06-06.canonical b/libs/PyYAML-3.10/tests/data/spec-06-06.canonical deleted file mode 100644 index 513d07a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-06.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "plain" - : !!str "text lines", - ? !!str "quoted" - : !!str "text lines", - ? !!str "block" - : !!str "text\n lines\n" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-06-06.data b/libs/PyYAML-3.10/tests/data/spec-06-06.data deleted file mode 100644 index 2f62d08..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-06.data +++ /dev/null @@ -1,7 +0,0 @@ -plain: text - lines -quoted: "text - lines" -block: | - text - lines diff --git a/libs/PyYAML-3.10/tests/data/spec-06-07.canonical b/libs/PyYAML-3.10/tests/data/spec-06-07.canonical deleted file mode 100644 index 11357e4..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-07.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "foo\nbar", - !!str "foo\n\nbar" -] diff --git a/libs/PyYAML-3.10/tests/data/spec-06-07.data b/libs/PyYAML-3.10/tests/data/spec-06-07.data deleted file mode 100644 index 130cfa7..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-07.data +++ /dev/null @@ -1,8 +0,0 @@ -- foo - - bar -- |- - foo - - bar - diff --git a/libs/PyYAML-3.10/tests/data/spec-06-08.canonical b/libs/PyYAML-3.10/tests/data/spec-06-08.canonical deleted file mode 100644 index cc72bc8..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-08.canonical +++ /dev/null @@ -1,5 +0,0 @@ -%YAML 1.1 ---- !!str -"specific\L\ - trimmed\n\n\n\ - as space" diff --git a/libs/PyYAML-3.10/tests/data/spec-06-08.data b/libs/PyYAML-3.10/tests/data/spec-06-08.data deleted file mode 100644 index f2896ed..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-06-08.data +++ /dev/null @@ -1,2 +0,0 @@ ->- - specific
 trimmed… … …… as… space diff --git a/libs/PyYAML-3.10/tests/data/spec-07-01.canonical b/libs/PyYAML-3.10/tests/data/spec-07-01.canonical deleted file mode 100644 index 8c8c48d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-01.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- !!str -"foo" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-01.data b/libs/PyYAML-3.10/tests/data/spec-07-01.data deleted file mode 100644 index 2113eb6..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-01.data +++ /dev/null @@ -1,3 +0,0 @@ -%FOO bar baz # Should be ignored - # with a warning. ---- "foo" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-01.skip-ext b/libs/PyYAML-3.10/tests/data/spec-07-01.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/spec-07-02.canonical b/libs/PyYAML-3.10/tests/data/spec-07-02.canonical deleted file mode 100644 index cb7dd1c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-02.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "foo" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-02.data b/libs/PyYAML-3.10/tests/data/spec-07-02.data deleted file mode 100644 index c8b7322..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-02.data +++ /dev/null @@ -1,4 +0,0 @@ -%YAML 1.2 # Attempt parsing - # with a warning ---- -"foo" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-02.skip-ext b/libs/PyYAML-3.10/tests/data/spec-07-02.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/spec-07-03.data b/libs/PyYAML-3.10/tests/data/spec-07-03.data deleted file mode 100644 index 4bfa07a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-03.data +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 -%YAML 1.1 -foo diff --git a/libs/PyYAML-3.10/tests/data/spec-07-03.error b/libs/PyYAML-3.10/tests/data/spec-07-03.error deleted file mode 100644 index b0ac446..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-03.error +++ /dev/null @@ -1,3 +0,0 @@ -ERROR: -The YAML directive must only be -given at most once per document. diff --git a/libs/PyYAML-3.10/tests/data/spec-07-04.canonical b/libs/PyYAML-3.10/tests/data/spec-07-04.canonical deleted file mode 100644 index cb7dd1c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-04.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "foo" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-04.data b/libs/PyYAML-3.10/tests/data/spec-07-04.data deleted file mode 100644 index 50f5ab9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-04.data +++ /dev/null @@ -1,3 +0,0 @@ -%TAG !yaml! tag:yaml.org,2002: ---- -!yaml!str "foo" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-05.data b/libs/PyYAML-3.10/tests/data/spec-07-05.data deleted file mode 100644 index 7276eae..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-05.data +++ /dev/null @@ -1,3 +0,0 @@ -%TAG ! !foo -%TAG ! !foo -bar diff --git a/libs/PyYAML-3.10/tests/data/spec-07-05.error b/libs/PyYAML-3.10/tests/data/spec-07-05.error deleted file mode 100644 index 5601b19..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-05.error +++ /dev/null @@ -1,4 +0,0 @@ -ERROR: -The TAG directive must only -be given at most once per -handle in the same document. diff --git a/libs/PyYAML-3.10/tests/data/spec-07-06.canonical b/libs/PyYAML-3.10/tests/data/spec-07-06.canonical deleted file mode 100644 index bddf616..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-06.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - ! "baz", - ! "string" -] diff --git a/libs/PyYAML-3.10/tests/data/spec-07-06.data b/libs/PyYAML-3.10/tests/data/spec-07-06.data deleted file mode 100644 index d9854cb..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-06.data +++ /dev/null @@ -1,5 +0,0 @@ -%TAG ! !foo -%TAG !yaml! tag:yaml.org,2002: ---- -- !bar "baz" -- !yaml!str "string" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-07a.canonical b/libs/PyYAML-3.10/tests/data/spec-07-07a.canonical deleted file mode 100644 index fa086df..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-07a.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -! "bar" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-07a.data b/libs/PyYAML-3.10/tests/data/spec-07-07a.data deleted file mode 100644 index 9d42ec3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-07a.data +++ /dev/null @@ -1,2 +0,0 @@ -# Private application: -!foo "bar" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-07b.canonical b/libs/PyYAML-3.10/tests/data/spec-07-07b.canonical deleted file mode 100644 index fe917d8..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-07b.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -! "bar" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-07b.data b/libs/PyYAML-3.10/tests/data/spec-07-07b.data deleted file mode 100644 index 2d36d0e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-07b.data +++ /dev/null @@ -1,4 +0,0 @@ -# Migrated to global: -%TAG ! tag:ben-kiki.org,2000:app/ ---- -!foo "bar" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-08.canonical b/libs/PyYAML-3.10/tests/data/spec-07-08.canonical deleted file mode 100644 index 703aa7b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-08.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - ! "bar", - ! "string", - ! "baz" -] diff --git a/libs/PyYAML-3.10/tests/data/spec-07-08.data b/libs/PyYAML-3.10/tests/data/spec-07-08.data deleted file mode 100644 index e2c6d9e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-08.data +++ /dev/null @@ -1,9 +0,0 @@ -# Explicitly specify default settings: -%TAG ! ! -%TAG !! tag:yaml.org,2002: -# Named handles have no default: -%TAG !o! tag:ben-kiki.org,2000: ---- -- !foo "bar" -- !!str "string" -- !o!type "baz" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-09.canonical b/libs/PyYAML-3.10/tests/data/spec-07-09.canonical deleted file mode 100644 index 32d9e94..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-09.canonical +++ /dev/null @@ -1,9 +0,0 @@ -%YAML 1.1 ---- -!!str "foo" -%YAML 1.1 ---- -!!str "bar" -%YAML 1.1 ---- -!!str "baz" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-09.data b/libs/PyYAML-3.10/tests/data/spec-07-09.data deleted file mode 100644 index 1209d47..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-09.data +++ /dev/null @@ -1,11 +0,0 @@ ---- -foo -... -# Repeated end marker. -... ---- -bar -# No end marker. ---- -baz -... diff --git a/libs/PyYAML-3.10/tests/data/spec-07-10.canonical b/libs/PyYAML-3.10/tests/data/spec-07-10.canonical deleted file mode 100644 index 1db650a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-10.canonical +++ /dev/null @@ -1,15 +0,0 @@ -%YAML 1.1 ---- -!!str "Root flow scalar" -%YAML 1.1 ---- -!!str "Root block scalar\n" -%YAML 1.1 ---- -!!map { - ? !!str "foo" - : !!str "bar" -} ---- -#!!str "" -!!null "" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-10.data b/libs/PyYAML-3.10/tests/data/spec-07-10.data deleted file mode 100644 index 6939b39..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-10.data +++ /dev/null @@ -1,11 +0,0 @@ -"Root flow - scalar" ---- !!str > - Root block - scalar ---- -# Root collection: -foo : bar -... # Is optional. ---- -# Explicit document may be empty. diff --git a/libs/PyYAML-3.10/tests/data/spec-07-11.data b/libs/PyYAML-3.10/tests/data/spec-07-11.data deleted file mode 100644 index d11302d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-11.data +++ /dev/null @@ -1,2 +0,0 @@ -# A stream may contain -# no documents. diff --git a/libs/PyYAML-3.10/tests/data/spec-07-11.empty b/libs/PyYAML-3.10/tests/data/spec-07-11.empty deleted file mode 100644 index bfffa8b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-11.empty +++ /dev/null @@ -1,2 +0,0 @@ -# This stream contains no -# documents, only comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-07-12a.canonical b/libs/PyYAML-3.10/tests/data/spec-07-12a.canonical deleted file mode 100644 index efc116f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-12a.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "foo" - : !!str "bar" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-07-12a.data b/libs/PyYAML-3.10/tests/data/spec-07-12a.data deleted file mode 100644 index 3807d57..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-12a.data +++ /dev/null @@ -1,3 +0,0 @@ -# Implicit document. Root -# collection (mapping) node. -foo : bar diff --git a/libs/PyYAML-3.10/tests/data/spec-07-12b.canonical b/libs/PyYAML-3.10/tests/data/spec-07-12b.canonical deleted file mode 100644 index 04bcffc..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-12b.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "Text content\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-12b.data b/libs/PyYAML-3.10/tests/data/spec-07-12b.data deleted file mode 100644 index 43250db..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-12b.data +++ /dev/null @@ -1,4 +0,0 @@ -# Explicit document. Root -# scalar (literal) node. ---- | - Text content diff --git a/libs/PyYAML-3.10/tests/data/spec-07-13.canonical b/libs/PyYAML-3.10/tests/data/spec-07-13.canonical deleted file mode 100644 index 5af71e9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-13.canonical +++ /dev/null @@ -1,9 +0,0 @@ -%YAML 1.1 ---- -!!str "First document" ---- -! "No directives" ---- -! "With directives" ---- -! "Reset settings" diff --git a/libs/PyYAML-3.10/tests/data/spec-07-13.data b/libs/PyYAML-3.10/tests/data/spec-07-13.data deleted file mode 100644 index ba7ec63..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-07-13.data +++ /dev/null @@ -1,9 +0,0 @@ -! "First document" ---- -!foo "No directives" -%TAG ! !foo ---- -!bar "With directives" -%YAML 1.1 ---- -!baz "Reset settings" diff --git a/libs/PyYAML-3.10/tests/data/spec-08-01.canonical b/libs/PyYAML-3.10/tests/data/spec-08-01.canonical deleted file mode 100644 index 69e4161..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-01.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? &A1 !!str "foo" - : !!str "bar", - ? &A2 !!str "baz" - : *A1 -} diff --git a/libs/PyYAML-3.10/tests/data/spec-08-01.data b/libs/PyYAML-3.10/tests/data/spec-08-01.data deleted file mode 100644 index 48986ec..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-01.data +++ /dev/null @@ -1,2 +0,0 @@ -!!str &a1 "foo" : !!str bar -&a2 baz : *a1 diff --git a/libs/PyYAML-3.10/tests/data/spec-08-02.canonical b/libs/PyYAML-3.10/tests/data/spec-08-02.canonical deleted file mode 100644 index dd6f76e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-02.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "First occurrence" - : &A !!str "Value", - ? !!str "Second occurrence" - : *A -} diff --git a/libs/PyYAML-3.10/tests/data/spec-08-02.data b/libs/PyYAML-3.10/tests/data/spec-08-02.data deleted file mode 100644 index 600d179..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-02.data +++ /dev/null @@ -1,2 +0,0 @@ -First occurrence: &anchor Value -Second occurrence: *anchor diff --git a/libs/PyYAML-3.10/tests/data/spec-08-03.canonical b/libs/PyYAML-3.10/tests/data/spec-08-03.canonical deleted file mode 100644 index be7ea8f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-03.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? ! "foo" - : ! "baz" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-08-03.data b/libs/PyYAML-3.10/tests/data/spec-08-03.data deleted file mode 100644 index 8e51f52..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-03.data +++ /dev/null @@ -1,2 +0,0 @@ -! foo : - ! baz diff --git a/libs/PyYAML-3.10/tests/data/spec-08-04.data b/libs/PyYAML-3.10/tests/data/spec-08-04.data deleted file mode 100644 index f7d1b01..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-04.data +++ /dev/null @@ -1,2 +0,0 @@ -- ! foo -- !<$:?> bar diff --git a/libs/PyYAML-3.10/tests/data/spec-08-04.error b/libs/PyYAML-3.10/tests/data/spec-08-04.error deleted file mode 100644 index 6066375..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-04.error +++ /dev/null @@ -1,6 +0,0 @@ -ERROR: -- Verbatim tags aren't resolved, - so ! is invalid. -- The $:? tag is neither a global - URI tag nor a local tag starting - with “!”. diff --git a/libs/PyYAML-3.10/tests/data/spec-08-05.canonical b/libs/PyYAML-3.10/tests/data/spec-08-05.canonical deleted file mode 100644 index a5c710a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-05.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - ! "foo", - ! "bar", - ! "baz", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-08-05.data b/libs/PyYAML-3.10/tests/data/spec-08-05.data deleted file mode 100644 index 93576ed..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-05.data +++ /dev/null @@ -1,5 +0,0 @@ -%TAG !o! tag:ben-kiki.org,2000: ---- -- !local foo -- !!str bar -- !o!type baz diff --git a/libs/PyYAML-3.10/tests/data/spec-08-06.data b/libs/PyYAML-3.10/tests/data/spec-08-06.data deleted file mode 100644 index 8580010..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-06.data +++ /dev/null @@ -1,5 +0,0 @@ -%TAG !o! tag:ben-kiki.org,2000: ---- -- !$a!b foo -- !o! bar -- !h!type baz diff --git a/libs/PyYAML-3.10/tests/data/spec-08-06.error b/libs/PyYAML-3.10/tests/data/spec-08-06.error deleted file mode 100644 index fb76f42..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-06.error +++ /dev/null @@ -1,4 +0,0 @@ -ERROR: -- The !$a! looks like a handle. -- The !o! handle has no suffix. -- The !h! handle wasn't declared. diff --git a/libs/PyYAML-3.10/tests/data/spec-08-07.canonical b/libs/PyYAML-3.10/tests/data/spec-08-07.canonical deleted file mode 100644 index e2f43d9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-07.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - ! "12", - ! "12", -# ! "12", - ! "12", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-08-07.data b/libs/PyYAML-3.10/tests/data/spec-08-07.data deleted file mode 100644 index 98aa565..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-07.data +++ /dev/null @@ -1,4 +0,0 @@ -# Assuming conventional resolution: -- "12" -- 12 -- ! 12 diff --git a/libs/PyYAML-3.10/tests/data/spec-08-08.canonical b/libs/PyYAML-3.10/tests/data/spec-08-08.canonical deleted file mode 100644 index d3f8b1a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-08.canonical +++ /dev/null @@ -1,15 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "foo" - : !!str "bar baz" -} -%YAML 1.1 ---- -!!str "foo bar" -%YAML 1.1 ---- -!!str "foo bar" -%YAML 1.1 ---- -!!str "foo\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-08-08.data b/libs/PyYAML-3.10/tests/data/spec-08-08.data deleted file mode 100644 index 757a93d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-08.data +++ /dev/null @@ -1,13 +0,0 @@ ---- -foo: - "bar - baz" ---- -"foo - bar" ---- -foo - bar ---- | - foo -... diff --git a/libs/PyYAML-3.10/tests/data/spec-08-09.canonical b/libs/PyYAML-3.10/tests/data/spec-08-09.canonical deleted file mode 100644 index 3805daf..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-09.canonical +++ /dev/null @@ -1,21 +0,0 @@ -%YAML 1.1 ---- !!map { - ? !!str "scalars" : !!map { - ? !!str "plain" - : !!str "some text", - ? !!str "quoted" - : !!map { - ? !!str "single" - : !!str "some text", - ? !!str "double" - : !!str "some text" - } }, - ? !!str "collections" : !!map { - ? !!str "sequence" : !!seq [ - !!str "entry", - !!map { - ? !!str "key" : !!str "value" - } ], - ? !!str "mapping" : !!map { - ? !!str "key" : !!str "value" -} } } diff --git a/libs/PyYAML-3.10/tests/data/spec-08-09.data b/libs/PyYAML-3.10/tests/data/spec-08-09.data deleted file mode 100644 index 69da042..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-09.data +++ /dev/null @@ -1,11 +0,0 @@ ---- -scalars: - plain: !!str some text - quoted: - single: 'some text' - double: "some text" -collections: - sequence: !!seq [ !!str entry, - # Mapping entry: - key: value ] - mapping: { key: value } diff --git a/libs/PyYAML-3.10/tests/data/spec-08-10.canonical b/libs/PyYAML-3.10/tests/data/spec-08-10.canonical deleted file mode 100644 index 8281c5e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-10.canonical +++ /dev/null @@ -1,23 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "block styles" : !!map { - ? !!str "scalars" : !!map { - ? !!str "literal" - : !!str "#!/usr/bin/perl\n\ - print \"Hello, - world!\\n\";\n", - ? !!str "folded" - : !!str "This sentence - is false.\n" - }, - ? !!str "collections" : !!map { - ? !!str "sequence" : !!seq [ - !!str "entry", - !!map { - ? !!str "key" : !!str "value" - } - ], - ? !!str "mapping" : !!map { - ? !!str "key" : !!str "value" -} } } } diff --git a/libs/PyYAML-3.10/tests/data/spec-08-10.data b/libs/PyYAML-3.10/tests/data/spec-08-10.data deleted file mode 100644 index 72acc56..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-10.data +++ /dev/null @@ -1,15 +0,0 @@ -block styles: - scalars: - literal: !!str | - #!/usr/bin/perl - print "Hello, world!\n"; - folded: > - This sentence - is false. - collections: !!map - sequence: !!seq # Entry: - - entry # Plain - # Mapping entry: - - key: value - mapping: - key: value diff --git a/libs/PyYAML-3.10/tests/data/spec-08-11.canonical b/libs/PyYAML-3.10/tests/data/spec-08-11.canonical deleted file mode 100644 index dd6f76e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-11.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "First occurrence" - : &A !!str "Value", - ? !!str "Second occurrence" - : *A -} diff --git a/libs/PyYAML-3.10/tests/data/spec-08-11.data b/libs/PyYAML-3.10/tests/data/spec-08-11.data deleted file mode 100644 index 600d179..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-11.data +++ /dev/null @@ -1,2 +0,0 @@ -First occurrence: &anchor Value -Second occurrence: *anchor diff --git a/libs/PyYAML-3.10/tests/data/spec-08-12.canonical b/libs/PyYAML-3.10/tests/data/spec-08-12.canonical deleted file mode 100644 index 93899f4..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-12.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "Without properties", - &A !!str "Anchored", - !!str "Tagged", - *A, - !!str "", - !!str "", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-08-12.data b/libs/PyYAML-3.10/tests/data/spec-08-12.data deleted file mode 100644 index 3d4c6b7..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-12.data +++ /dev/null @@ -1,8 +0,0 @@ -[ - Without properties, - &anchor "Anchored", - !!str 'Tagged', - *anchor, # Alias node - !!str , # Empty plain scalar - '', # Empty plain scalar -] diff --git a/libs/PyYAML-3.10/tests/data/spec-08-13.canonical b/libs/PyYAML-3.10/tests/data/spec-08-13.canonical deleted file mode 100644 index 618bb7b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-13.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "foo" -# : !!str "", -# ? !!str "" - : !!null "", - ? !!null "" - : !!str "bar", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-08-13.data b/libs/PyYAML-3.10/tests/data/spec-08-13.data deleted file mode 100644 index ebe663a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-13.data +++ /dev/null @@ -1,4 +0,0 @@ -{ - ? foo :, - ? : bar, -} diff --git a/libs/PyYAML-3.10/tests/data/spec-08-13.skip-ext b/libs/PyYAML-3.10/tests/data/spec-08-13.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/spec-08-14.canonical b/libs/PyYAML-3.10/tests/data/spec-08-14.canonical deleted file mode 100644 index 11db439..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-14.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "flow in block", - !!str "Block scalar\n", - !!map { - ? !!str "foo" - : !!str "bar" - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-08-14.data b/libs/PyYAML-3.10/tests/data/spec-08-14.data deleted file mode 100644 index 2fbb1f7..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-14.data +++ /dev/null @@ -1,5 +0,0 @@ -- "flow in block" -- > - Block scalar -- !!map # Block collection - foo : bar diff --git a/libs/PyYAML-3.10/tests/data/spec-08-15.canonical b/libs/PyYAML-3.10/tests/data/spec-08-15.canonical deleted file mode 100644 index 76f028e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-15.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!null "", - !!map { - ? !!str "foo" - : !!null "", - ? !!null "" - : !!str "bar", - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-08-15.data b/libs/PyYAML-3.10/tests/data/spec-08-15.data deleted file mode 100644 index 7c86bcf..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-08-15.data +++ /dev/null @@ -1,5 +0,0 @@ -- # Empty plain scalar -- ? foo - : - ? - : bar diff --git a/libs/PyYAML-3.10/tests/data/spec-09-01.canonical b/libs/PyYAML-3.10/tests/data/spec-09-01.canonical deleted file mode 100644 index e71a548..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-01.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "simple key" - : !!map { - ? !!str "also simple" - : !!str "value", - ? !!str "not a simple key" - : !!str "any value" - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-01.data b/libs/PyYAML-3.10/tests/data/spec-09-01.data deleted file mode 100644 index 9e83eaf..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-01.data +++ /dev/null @@ -1,6 +0,0 @@ -"simple key" : { - "also simple" : value, - ? "not a - simple key" : "any - value" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-02.canonical b/libs/PyYAML-3.10/tests/data/spec-09-02.canonical deleted file mode 100644 index 6f8f41a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-02.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!str "as space \ - trimmed\n\ - specific\L\n\ - escaped\t\n\ - none" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-02.data b/libs/PyYAML-3.10/tests/data/spec-09-02.data deleted file mode 100644 index d84883d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-02.data +++ /dev/null @@ -1,6 +0,0 @@ - "as space - trimmed - - specific
 - escaped \
 - none" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-03.canonical b/libs/PyYAML-3.10/tests/data/spec-09-03.canonical deleted file mode 100644 index 658c6df..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-03.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str " last", - !!str " last", - !!str " \tfirst last", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-03.data b/libs/PyYAML-3.10/tests/data/spec-09-03.data deleted file mode 100644 index e0b914d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-03.data +++ /dev/null @@ -1,6 +0,0 @@ -- " - last" -- " - last" -- " first - last" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-04.canonical b/libs/PyYAML-3.10/tests/data/spec-09-04.canonical deleted file mode 100644 index fa46632..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-04.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!str "first \ - inner 1 \ - inner 2 \ - last" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-04.data b/libs/PyYAML-3.10/tests/data/spec-09-04.data deleted file mode 100644 index 313a91b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-04.data +++ /dev/null @@ -1,4 +0,0 @@ - "first - inner 1 - \ inner 2 \ - last" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-05.canonical b/libs/PyYAML-3.10/tests/data/spec-09-05.canonical deleted file mode 100644 index 24d1052..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-05.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "first ", - !!str "first\nlast", - !!str "first inner \tlast", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-05.data b/libs/PyYAML-3.10/tests/data/spec-09-05.data deleted file mode 100644 index 624c30e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-05.data +++ /dev/null @@ -1,8 +0,0 @@ -- "first - " -- "first - - last" -- "first - inner - \ last" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-06.canonical b/libs/PyYAML-3.10/tests/data/spec-09-06.canonical deleted file mode 100644 index 5028772..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-06.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "here's to \"quotes\"" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-06.data b/libs/PyYAML-3.10/tests/data/spec-09-06.data deleted file mode 100644 index b038078..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-06.data +++ /dev/null @@ -1 +0,0 @@ - 'here''s to "quotes"' diff --git a/libs/PyYAML-3.10/tests/data/spec-09-07.canonical b/libs/PyYAML-3.10/tests/data/spec-09-07.canonical deleted file mode 100644 index e71a548..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-07.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "simple key" - : !!map { - ? !!str "also simple" - : !!str "value", - ? !!str "not a simple key" - : !!str "any value" - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-07.data b/libs/PyYAML-3.10/tests/data/spec-09-07.data deleted file mode 100644 index 755b54a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-07.data +++ /dev/null @@ -1,6 +0,0 @@ -'simple key' : { - 'also simple' : value, - ? 'not a - simple key' : 'any - value' -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-08.canonical b/libs/PyYAML-3.10/tests/data/spec-09-08.canonical deleted file mode 100644 index 06abdb5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-08.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!str "as space \ - trimmed\n\ - specific\L\n\ - none" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-08.data b/libs/PyYAML-3.10/tests/data/spec-09-08.data deleted file mode 100644 index aa4d458..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-08.data +++ /dev/null @@ -1 +0,0 @@ - 'as space … trimmed …… specific
… none' diff --git a/libs/PyYAML-3.10/tests/data/spec-09-09.canonical b/libs/PyYAML-3.10/tests/data/spec-09-09.canonical deleted file mode 100644 index 658c6df..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-09.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str " last", - !!str " last", - !!str " \tfirst last", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-09.data b/libs/PyYAML-3.10/tests/data/spec-09-09.data deleted file mode 100644 index 52171df..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-09.data +++ /dev/null @@ -1,6 +0,0 @@ -- ' - last' -- ' - last' -- ' first - last' diff --git a/libs/PyYAML-3.10/tests/data/spec-09-10.canonical b/libs/PyYAML-3.10/tests/data/spec-09-10.canonical deleted file mode 100644 index 2028d04..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-10.canonical +++ /dev/null @@ -1,5 +0,0 @@ -%YAML 1.1 ---- -!!str "first \ - inner \ - last" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-10.data b/libs/PyYAML-3.10/tests/data/spec-09-10.data deleted file mode 100644 index 0e41449..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-10.data +++ /dev/null @@ -1,3 +0,0 @@ - 'first - inner - last' diff --git a/libs/PyYAML-3.10/tests/data/spec-09-11.canonical b/libs/PyYAML-3.10/tests/data/spec-09-11.canonical deleted file mode 100644 index 4eb222c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-11.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "first ", - !!str "first\nlast", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-11.data b/libs/PyYAML-3.10/tests/data/spec-09-11.data deleted file mode 100644 index 5efa873..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-11.data +++ /dev/null @@ -1,5 +0,0 @@ -- 'first - ' -- 'first - - last' diff --git a/libs/PyYAML-3.10/tests/data/spec-09-12.canonical b/libs/PyYAML-3.10/tests/data/spec-09-12.canonical deleted file mode 100644 index d8e6dce..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-12.canonical +++ /dev/null @@ -1,12 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "::std::vector", - !!str "Up, up, and away!", - !!int "-123", - !!seq [ - !!str "::std::vector", - !!str "Up, up, and away!", - !!int "-123", - ] -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-12.data b/libs/PyYAML-3.10/tests/data/spec-09-12.data deleted file mode 100644 index b9a3ac5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-12.data +++ /dev/null @@ -1,8 +0,0 @@ -# Outside flow collection: -- ::std::vector -- Up, up, and away! -- -123 -# Inside flow collection: -- [ '::std::vector', - "Up, up, and away!", - -123 ] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-13.canonical b/libs/PyYAML-3.10/tests/data/spec-09-13.canonical deleted file mode 100644 index e71a548..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-13.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "simple key" - : !!map { - ? !!str "also simple" - : !!str "value", - ? !!str "not a simple key" - : !!str "any value" - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-13.data b/libs/PyYAML-3.10/tests/data/spec-09-13.data deleted file mode 100644 index b156386..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-13.data +++ /dev/null @@ -1,6 +0,0 @@ -simple key : { - also simple : value, - ? not a - simple key : any - value -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-14.data b/libs/PyYAML-3.10/tests/data/spec-09-14.data deleted file mode 100644 index 97f2316..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-14.data +++ /dev/null @@ -1,14 +0,0 @@ ---- ---- ||| : foo -... >>>: bar ---- -[ ---- -, -... , -{ ---- : -... # Nested -} -] -... diff --git a/libs/PyYAML-3.10/tests/data/spec-09-14.error b/libs/PyYAML-3.10/tests/data/spec-09-14.error deleted file mode 100644 index 9f3db7b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-14.error +++ /dev/null @@ -1,6 +0,0 @@ -ERROR: - The --- and ... document - start and end markers must - not be specified as the - first content line of a - non-indented plain scalar. diff --git a/libs/PyYAML-3.10/tests/data/spec-09-15.canonical b/libs/PyYAML-3.10/tests/data/spec-09-15.canonical deleted file mode 100644 index df02040..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-15.canonical +++ /dev/null @@ -1,18 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "---" - : !!str "foo", - ? !!str "..." - : !!str "bar" -} -%YAML 1.1 ---- -!!seq [ - !!str "---", - !!str "...", - !!map { - ? !!str "---" - : !!str "..." - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-15.data b/libs/PyYAML-3.10/tests/data/spec-09-15.data deleted file mode 100644 index e6863b0..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-15.data +++ /dev/null @@ -1,13 +0,0 @@ ---- -"---" : foo -...: bar ---- -[ ----, -..., -{ -? --- -: ... -} -] -... diff --git a/libs/PyYAML-3.10/tests/data/spec-09-16.canonical b/libs/PyYAML-3.10/tests/data/spec-09-16.canonical deleted file mode 100644 index 06abdb5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-16.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!str "as space \ - trimmed\n\ - specific\L\n\ - none" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-16.data b/libs/PyYAML-3.10/tests/data/spec-09-16.data deleted file mode 100644 index 473beb9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-16.data +++ /dev/null @@ -1,3 +0,0 @@ -# Tabs are confusing: -# as space/trimmed/specific/none - as space … trimmed …… specific
… none diff --git a/libs/PyYAML-3.10/tests/data/spec-09-17.canonical b/libs/PyYAML-3.10/tests/data/spec-09-17.canonical deleted file mode 100644 index 68cb70d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-17.canonical +++ /dev/null @@ -1,4 +0,0 @@ -%YAML 1.1 ---- -!!str "first line\n\ - more line" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-17.data b/libs/PyYAML-3.10/tests/data/spec-09-17.data deleted file mode 100644 index 97bc46c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-17.data +++ /dev/null @@ -1,3 +0,0 @@ - first line - - more line diff --git a/libs/PyYAML-3.10/tests/data/spec-09-18.canonical b/libs/PyYAML-3.10/tests/data/spec-09-18.canonical deleted file mode 100644 index f21428f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-18.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "literal\n", - !!str " folded\n", - !!str "keep\n\n", - !!str " strip", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-18.data b/libs/PyYAML-3.10/tests/data/spec-09-18.data deleted file mode 100644 index 68c5d7c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-18.data +++ /dev/null @@ -1,9 +0,0 @@ -- | # Just the style - literal -- >1 # Indentation indicator - folded -- |+ # Chomping indicator - keep - -- >-1 # Both indicators - strip diff --git a/libs/PyYAML-3.10/tests/data/spec-09-19.canonical b/libs/PyYAML-3.10/tests/data/spec-09-19.canonical deleted file mode 100644 index 3e828d7..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-19.canonical +++ /dev/null @@ -1,6 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "literal\n", - !!str "folded\n", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-19.data b/libs/PyYAML-3.10/tests/data/spec-09-19.data deleted file mode 100644 index f0e589d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-19.data +++ /dev/null @@ -1,4 +0,0 @@ -- | - literal -- > - folded diff --git a/libs/PyYAML-3.10/tests/data/spec-09-20.canonical b/libs/PyYAML-3.10/tests/data/spec-09-20.canonical deleted file mode 100644 index d03bef5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-20.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "detected\n", - !!str "\n\n# detected\n", - !!str " explicit\n", - !!str "\t\ndetected\n", -] diff --git a/libs/PyYAML-3.10/tests/data/spec-09-20.data b/libs/PyYAML-3.10/tests/data/spec-09-20.data deleted file mode 100644 index 39bee04..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-20.data +++ /dev/null @@ -1,11 +0,0 @@ -- | - detected -- > - - - # detected -- |1 - explicit -- > - - detected diff --git a/libs/PyYAML-3.10/tests/data/spec-09-20.skip-ext b/libs/PyYAML-3.10/tests/data/spec-09-20.skip-ext deleted file mode 100644 index e69de29..0000000 diff --git a/libs/PyYAML-3.10/tests/data/spec-09-21.data b/libs/PyYAML-3.10/tests/data/spec-09-21.data deleted file mode 100644 index 0fdd14f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-21.data +++ /dev/null @@ -1,8 +0,0 @@ -- | - - text -- > - text - text -- |1 - text diff --git a/libs/PyYAML-3.10/tests/data/spec-09-21.error b/libs/PyYAML-3.10/tests/data/spec-09-21.error deleted file mode 100644 index 1379ca5..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-21.error +++ /dev/null @@ -1,7 +0,0 @@ -ERROR: -- A leading all-space line must - not have too many spaces. -- A following text line must - not be less indented. -- The text is less indented - than the indicated level. diff --git a/libs/PyYAML-3.10/tests/data/spec-09-22.canonical b/libs/PyYAML-3.10/tests/data/spec-09-22.canonical deleted file mode 100644 index c1bbcd2..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-22.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "strip" - : !!str "text", - ? !!str "clip" - : !!str "text\n", - ? !!str "keep" - : !!str "text\L", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-22.data b/libs/PyYAML-3.10/tests/data/spec-09-22.data deleted file mode 100644 index 0dd51eb..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-22.data +++ /dev/null @@ -1,4 +0,0 @@ -strip: |- - text
clip: | - text…keep: |+ - text
 \ No newline at end of file diff --git a/libs/PyYAML-3.10/tests/data/spec-09-23.canonical b/libs/PyYAML-3.10/tests/data/spec-09-23.canonical deleted file mode 100644 index c4444ca..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-23.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "strip" - : !!str "# text", - ? !!str "clip" - : !!str "# text\n", - ? !!str "keep" - : !!str "# text\L\n", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-23.data b/libs/PyYAML-3.10/tests/data/spec-09-23.data deleted file mode 100644 index 8972d2b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-23.data +++ /dev/null @@ -1,11 +0,0 @@ - # Strip - # Comments: -strip: |- - # text
 
 # Clip - # comments: -…clip: | - # text… 
 # Keep - # comments: -…keep: |+ - # text
… # Trail - # comments. diff --git a/libs/PyYAML-3.10/tests/data/spec-09-24.canonical b/libs/PyYAML-3.10/tests/data/spec-09-24.canonical deleted file mode 100644 index 45a99b0..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-24.canonical +++ /dev/null @@ -1,10 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "strip" - : !!str "", - ? !!str "clip" - : !!str "", - ? !!str "keep" - : !!str "\n", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-09-24.data b/libs/PyYAML-3.10/tests/data/spec-09-24.data deleted file mode 100644 index de0b64b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-24.data +++ /dev/null @@ -1,6 +0,0 @@ -strip: >- - -clip: > - -keep: |+ - diff --git a/libs/PyYAML-3.10/tests/data/spec-09-25.canonical b/libs/PyYAML-3.10/tests/data/spec-09-25.canonical deleted file mode 100644 index 9d2327b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-25.canonical +++ /dev/null @@ -1,4 +0,0 @@ -%YAML 1.1 ---- -!!str "literal\n\ - \ttext\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-25.data b/libs/PyYAML-3.10/tests/data/spec-09-25.data deleted file mode 100644 index f6303a1..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-25.data +++ /dev/null @@ -1,3 +0,0 @@ -| # Simple block scalar - literal - text diff --git a/libs/PyYAML-3.10/tests/data/spec-09-26.canonical b/libs/PyYAML-3.10/tests/data/spec-09-26.canonical deleted file mode 100644 index 3029a11..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-26.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "\n\nliteral\n\ntext\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-26.data b/libs/PyYAML-3.10/tests/data/spec-09-26.data deleted file mode 100644 index f28555a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-26.data +++ /dev/null @@ -1,8 +0,0 @@ -| - - - literal - - text - - # Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-09-27.canonical b/libs/PyYAML-3.10/tests/data/spec-09-27.canonical deleted file mode 100644 index 3029a11..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-27.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "\n\nliteral\n\ntext\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-27.data b/libs/PyYAML-3.10/tests/data/spec-09-27.data deleted file mode 100644 index f28555a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-27.data +++ /dev/null @@ -1,8 +0,0 @@ -| - - - literal - - text - - # Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-09-28.canonical b/libs/PyYAML-3.10/tests/data/spec-09-28.canonical deleted file mode 100644 index 3029a11..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-28.canonical +++ /dev/null @@ -1,3 +0,0 @@ -%YAML 1.1 ---- -!!str "\n\nliteral\n\ntext\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-28.data b/libs/PyYAML-3.10/tests/data/spec-09-28.data deleted file mode 100644 index f28555a..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-28.data +++ /dev/null @@ -1,8 +0,0 @@ -| - - - literal - - text - - # Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-09-29.canonical b/libs/PyYAML-3.10/tests/data/spec-09-29.canonical deleted file mode 100644 index 0980789..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-29.canonical +++ /dev/null @@ -1,4 +0,0 @@ -%YAML 1.1 ---- -!!str "folded text\n\ - \tlines\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-29.data b/libs/PyYAML-3.10/tests/data/spec-09-29.data deleted file mode 100644 index 82e611f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-29.data +++ /dev/null @@ -1,4 +0,0 @@ -> # Simple folded scalar - folded - text - lines diff --git a/libs/PyYAML-3.10/tests/data/spec-09-30.canonical b/libs/PyYAML-3.10/tests/data/spec-09-30.canonical deleted file mode 100644 index fc37db1..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-30.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!str "folded line\n\ - next line\n\n\ - \ * bullet\n\ - \ * list\n\n\ - last line\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-30.data b/libs/PyYAML-3.10/tests/data/spec-09-30.data deleted file mode 100644 index a4d8c36..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-30.data +++ /dev/null @@ -1,14 +0,0 @@ -> - folded - line - - next - line - - * bullet - * list - - last - line - -# Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-09-31.canonical b/libs/PyYAML-3.10/tests/data/spec-09-31.canonical deleted file mode 100644 index fc37db1..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-31.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!str "folded line\n\ - next line\n\n\ - \ * bullet\n\ - \ * list\n\n\ - last line\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-31.data b/libs/PyYAML-3.10/tests/data/spec-09-31.data deleted file mode 100644 index a4d8c36..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-31.data +++ /dev/null @@ -1,14 +0,0 @@ -> - folded - line - - next - line - - * bullet - * list - - last - line - -# Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-09-32.canonical b/libs/PyYAML-3.10/tests/data/spec-09-32.canonical deleted file mode 100644 index fc37db1..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-32.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!str "folded line\n\ - next line\n\n\ - \ * bullet\n\ - \ * list\n\n\ - last line\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-32.data b/libs/PyYAML-3.10/tests/data/spec-09-32.data deleted file mode 100644 index a4d8c36..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-32.data +++ /dev/null @@ -1,14 +0,0 @@ -> - folded - line - - next - line - - * bullet - * list - - last - line - -# Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-09-33.canonical b/libs/PyYAML-3.10/tests/data/spec-09-33.canonical deleted file mode 100644 index fc37db1..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-33.canonical +++ /dev/null @@ -1,7 +0,0 @@ -%YAML 1.1 ---- -!!str "folded line\n\ - next line\n\n\ - \ * bullet\n\ - \ * list\n\n\ - last line\n" diff --git a/libs/PyYAML-3.10/tests/data/spec-09-33.data b/libs/PyYAML-3.10/tests/data/spec-09-33.data deleted file mode 100644 index a4d8c36..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-09-33.data +++ /dev/null @@ -1,14 +0,0 @@ -> - folded - line - - next - line - - * bullet - * list - - last - line - -# Comment diff --git a/libs/PyYAML-3.10/tests/data/spec-10-01.canonical b/libs/PyYAML-3.10/tests/data/spec-10-01.canonical deleted file mode 100644 index d08cdd4..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-01.canonical +++ /dev/null @@ -1,12 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!seq [ - !!str "inner", - !!str "inner", - ], - !!seq [ - !!str "inner", - !!str "last", - ], -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-01.data b/libs/PyYAML-3.10/tests/data/spec-10-01.data deleted file mode 100644 index e668d38..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-01.data +++ /dev/null @@ -1,2 +0,0 @@ -- [ inner, inner, ] -- [inner,last] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-02.canonical b/libs/PyYAML-3.10/tests/data/spec-10-02.canonical deleted file mode 100644 index 82fe0d9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-02.canonical +++ /dev/null @@ -1,14 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!str "double quoted", - !!str "single quoted", - !!str "plain text", - !!seq [ - !!str "nested", - ], - !!map { - ? !!str "single" - : !!str "pair" - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-02.data b/libs/PyYAML-3.10/tests/data/spec-10-02.data deleted file mode 100644 index 3b23351..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-02.data +++ /dev/null @@ -1,8 +0,0 @@ -[ -"double - quoted", 'single - quoted', -plain - text, [ nested ], -single: pair , -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-03.canonical b/libs/PyYAML-3.10/tests/data/spec-10-03.canonical deleted file mode 100644 index 1443395..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-03.canonical +++ /dev/null @@ -1,12 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "block" - : !!seq [ - !!str "one", - !!map { - ? !!str "two" - : !!str "three" - } - ] -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-03.data b/libs/PyYAML-3.10/tests/data/spec-10-03.data deleted file mode 100644 index 9e15f83..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-03.data +++ /dev/null @@ -1,4 +0,0 @@ -block: # Block - # sequence -- one -- two : three diff --git a/libs/PyYAML-3.10/tests/data/spec-10-04.canonical b/libs/PyYAML-3.10/tests/data/spec-10-04.canonical deleted file mode 100644 index ae486a3..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-04.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "block" - : !!seq [ - !!str "one", - !!seq [ - !!str "two" - ] - ] -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-04.data b/libs/PyYAML-3.10/tests/data/spec-10-04.data deleted file mode 100644 index 2905b0d..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-04.data +++ /dev/null @@ -1,4 +0,0 @@ -block: -- one -- - - two diff --git a/libs/PyYAML-3.10/tests/data/spec-10-05.canonical b/libs/PyYAML-3.10/tests/data/spec-10-05.canonical deleted file mode 100644 index 07cc0c9..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-05.canonical +++ /dev/null @@ -1,14 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!null "", - !!str "block node\n", - !!seq [ - !!str "one", - !!str "two", - ], - !!map { - ? !!str "one" - : !!str "two", - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-05.data b/libs/PyYAML-3.10/tests/data/spec-10-05.data deleted file mode 100644 index f19a99e..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-05.data +++ /dev/null @@ -1,7 +0,0 @@ -- # Empty -- | - block node -- - one # in-line - - two # sequence -- one: two # in-line - # mapping diff --git a/libs/PyYAML-3.10/tests/data/spec-10-06.canonical b/libs/PyYAML-3.10/tests/data/spec-10-06.canonical deleted file mode 100644 index d9986c2..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-06.canonical +++ /dev/null @@ -1,16 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!map { - ? !!str "inner" - : !!str "entry", - ? !!str "also" - : !!str "inner" - }, - !!map { - ? !!str "inner" - : !!str "entry", - ? !!str "last" - : !!str "entry" - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-06.data b/libs/PyYAML-3.10/tests/data/spec-10-06.data deleted file mode 100644 index 860ba25..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-06.data +++ /dev/null @@ -1,2 +0,0 @@ -- { inner : entry , also: inner , } -- {inner: entry,last : entry} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-07.canonical b/libs/PyYAML-3.10/tests/data/spec-10-07.canonical deleted file mode 100644 index ec74230..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-07.canonical +++ /dev/null @@ -1,16 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!null "" - : !!str "value", - ? !!str "explicit key" - : !!str "value", - ? !!str "simple key" - : !!str "value", - ? !!seq [ - !!str "collection", - !!str "simple", - !!str "key" - ] - : !!str "value" -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-07.data b/libs/PyYAML-3.10/tests/data/spec-10-07.data deleted file mode 100644 index ff943fb..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-07.data +++ /dev/null @@ -1,7 +0,0 @@ -{ -? : value, # Empty key -? explicit - key: value, -simple key : value, -[ collection, simple, key ]: value -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-08.data b/libs/PyYAML-3.10/tests/data/spec-10-08.data deleted file mode 100644 index 55bd788..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-08.data +++ /dev/null @@ -1,5 +0,0 @@ -{ -multi-line - simple key : value, -very long ...................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................(>1KB)................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................................... key: value -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-08.error b/libs/PyYAML-3.10/tests/data/spec-10-08.error deleted file mode 100644 index 3979e1f..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-08.error +++ /dev/null @@ -1,5 +0,0 @@ -ERROR: -- A simple key is restricted - to only one line. -- A simple key must not be - longer than 1024 characters. diff --git a/libs/PyYAML-3.10/tests/data/spec-10-09.canonical b/libs/PyYAML-3.10/tests/data/spec-10-09.canonical deleted file mode 100644 index 4d9827b..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-09.canonical +++ /dev/null @@ -1,8 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "key" - : !!str "value", - ? !!str "empty" - : !!null "", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-09.data b/libs/PyYAML-3.10/tests/data/spec-10-09.data deleted file mode 100644 index 4d55e21..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-09.data +++ /dev/null @@ -1,4 +0,0 @@ -{ -key : value, -empty: # empty value↓ -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-10.canonical b/libs/PyYAML-3.10/tests/data/spec-10-10.canonical deleted file mode 100644 index 016fb64..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-10.canonical +++ /dev/null @@ -1,16 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "explicit key1" - : !!str "explicit value", - ? !!str "explicit key2" - : !!null "", - ? !!str "explicit key3" - : !!null "", - ? !!str "simple key1" - : !!str "explicit value", - ? !!str "simple key2" - : !!null "", - ? !!str "simple key3" - : !!null "", -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-10.data b/libs/PyYAML-3.10/tests/data/spec-10-10.data deleted file mode 100644 index 0888b05..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-10.data +++ /dev/null @@ -1,8 +0,0 @@ -{ -? explicit key1 : explicit value, -? explicit key2 : , # Explicit empty -? explicit key3, # Empty value -simple key1 : explicit value, -simple key2 : , # Explicit empty -simple key3, # Empty value -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-11.canonical b/libs/PyYAML-3.10/tests/data/spec-10-11.canonical deleted file mode 100644 index 7309544..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-11.canonical +++ /dev/null @@ -1,24 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!map { - ? !!str "explicit key1" - : !!str "explicit value", - }, - !!map { - ? !!str "explicit key2" - : !!null "", - }, - !!map { - ? !!str "explicit key3" - : !!null "", - }, - !!map { - ? !!str "simple key1" - : !!str "explicit value", - }, - !!map { - ? !!str "simple key2" - : !!null "", - }, -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-11.data b/libs/PyYAML-3.10/tests/data/spec-10-11.data deleted file mode 100644 index 9f05568..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-11.data +++ /dev/null @@ -1,7 +0,0 @@ -[ -? explicit key1 : explicit value, -? explicit key2 : , # Explicit empty -? explicit key3, # Implicit empty -simple key1 : explicit value, -simple key2 : , # Explicit empty -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-12.canonical b/libs/PyYAML-3.10/tests/data/spec-10-12.canonical deleted file mode 100644 index a95dd40..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-12.canonical +++ /dev/null @@ -1,9 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "block" - : !!map { - ? !!str "key" - : !!str "value" - } -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-12.data b/libs/PyYAML-3.10/tests/data/spec-10-12.data deleted file mode 100644 index 5521443..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-12.data +++ /dev/null @@ -1,3 +0,0 @@ -block: # Block - # mapping - key: value diff --git a/libs/PyYAML-3.10/tests/data/spec-10-13.canonical b/libs/PyYAML-3.10/tests/data/spec-10-13.canonical deleted file mode 100644 index e183c50..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-13.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "explicit key" - : !!null "", - ? !!str "block key\n" - : !!seq [ - !!str "one", - !!str "two", - ] -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-13.data b/libs/PyYAML-3.10/tests/data/spec-10-13.data deleted file mode 100644 index b5b97db..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-13.data +++ /dev/null @@ -1,5 +0,0 @@ -? explicit key # implicit value -? | - block key -: - one # explicit in-line - - two # block value diff --git a/libs/PyYAML-3.10/tests/data/spec-10-14.canonical b/libs/PyYAML-3.10/tests/data/spec-10-14.canonical deleted file mode 100644 index e87c880..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-14.canonical +++ /dev/null @@ -1,11 +0,0 @@ -%YAML 1.1 ---- -!!map { - ? !!str "plain key" - : !!null "", - ? !!str "quoted key" - : !!seq [ - !!str "one", - !!str "two", - ] -} diff --git a/libs/PyYAML-3.10/tests/data/spec-10-14.data b/libs/PyYAML-3.10/tests/data/spec-10-14.data deleted file mode 100644 index 7f5995c..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-14.data +++ /dev/null @@ -1,4 +0,0 @@ -plain key: # empty value -"quoted key": -- one # explicit next-line -- two # block value diff --git a/libs/PyYAML-3.10/tests/data/spec-10-15.canonical b/libs/PyYAML-3.10/tests/data/spec-10-15.canonical deleted file mode 100644 index 85fbbd0..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-15.canonical +++ /dev/null @@ -1,18 +0,0 @@ -%YAML 1.1 ---- -!!seq [ - !!map { - ? !!str "sun" - : !!str "yellow" - }, - !!map { - ? !!map { - ? !!str "earth" - : !!str "blue" - } - : !!map { - ? !!str "moon" - : !!str "white" - } - } -] diff --git a/libs/PyYAML-3.10/tests/data/spec-10-15.data b/libs/PyYAML-3.10/tests/data/spec-10-15.data deleted file mode 100644 index d675cfd..0000000 --- a/libs/PyYAML-3.10/tests/data/spec-10-15.data +++ /dev/null @@ -1,3 +0,0 @@ -- sun: yellow -- ? earth: blue - : moon: white diff --git a/libs/PyYAML-3.10/tests/data/str.data b/libs/PyYAML-3.10/tests/data/str.data deleted file mode 100644 index 7cbdb7c..0000000 --- a/libs/PyYAML-3.10/tests/data/str.data +++ /dev/null @@ -1 +0,0 @@ -- abcd diff --git a/libs/PyYAML-3.10/tests/data/str.detect b/libs/PyYAML-3.10/tests/data/str.detect deleted file mode 100644 index 7d5026f..0000000 --- a/libs/PyYAML-3.10/tests/data/str.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:str diff --git a/libs/PyYAML-3.10/tests/data/tags.events b/libs/PyYAML-3.10/tests/data/tags.events deleted file mode 100644 index bb93dce..0000000 --- a/libs/PyYAML-3.10/tests/data/tags.events +++ /dev/null @@ -1,12 +0,0 @@ -- !StreamStart -- !DocumentStart -- !SequenceStart -- !Scalar { value: 'data' } -#- !Scalar { tag: '!', value: 'data' } -- !Scalar { tag: 'tag:yaml.org,2002:str', value: 'data' } -- !Scalar { tag: '!myfunnytag', value: 'data' } -- !Scalar { tag: '!my!ugly!tag', value: 'data' } -- !Scalar { tag: 'tag:my.domain.org,2002:data!? #', value: 'data' } -- !SequenceEnd -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/test_mark.marks b/libs/PyYAML-3.10/tests/data/test_mark.marks deleted file mode 100644 index 7b08ee4..0000000 --- a/libs/PyYAML-3.10/tests/data/test_mark.marks +++ /dev/null @@ -1,38 +0,0 @@ ---- -*The first line. -The last line. ---- -The first*line. -The last line. ---- -The first line.* -The last line. ---- -The first line. -*The last line. ---- -The first line. -The last*line. ---- -The first line. -The last line.* ---- -The first line. -*The selected line. -The last line. ---- -The first line. -The selected*line. -The last line. ---- -The first line. -The selected line.* -The last line. ---- -*The only line. ---- -The only*line. ---- -The only line.* ---- -Loooooooooooooooooooooooooooooooooooooooooooooong*Liiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiiine diff --git a/libs/PyYAML-3.10/tests/data/timestamp-bugs.code b/libs/PyYAML-3.10/tests/data/timestamp-bugs.code deleted file mode 100644 index b1d6e9c..0000000 --- a/libs/PyYAML-3.10/tests/data/timestamp-bugs.code +++ /dev/null @@ -1,8 +0,0 @@ -[ - datetime.datetime(2001, 12, 15, 3, 29, 43, 100000), - datetime.datetime(2001, 12, 14, 16, 29, 43, 100000), - datetime.datetime(2001, 12, 14, 21, 59, 43, 1010), - datetime.datetime(2001, 12, 14, 21, 59, 43, 0, FixedOffset(60, "+1")), - datetime.datetime(2001, 12, 14, 21, 59, 43, 0, FixedOffset(-90, "-1:30")), - datetime.datetime(2005, 7, 8, 17, 35, 4, 517600), -] diff --git a/libs/PyYAML-3.10/tests/data/timestamp-bugs.data b/libs/PyYAML-3.10/tests/data/timestamp-bugs.data deleted file mode 100644 index 721d290..0000000 --- a/libs/PyYAML-3.10/tests/data/timestamp-bugs.data +++ /dev/null @@ -1,6 +0,0 @@ -- 2001-12-14 21:59:43.10 -5:30 -- 2001-12-14 21:59:43.10 +5:30 -- 2001-12-14 21:59:43.00101 -- 2001-12-14 21:59:43+1 -- 2001-12-14 21:59:43-1:30 -- 2005-07-08 17:35:04.517600 diff --git a/libs/PyYAML-3.10/tests/data/timestamp.data b/libs/PyYAML-3.10/tests/data/timestamp.data deleted file mode 100644 index 7d214ce..0000000 --- a/libs/PyYAML-3.10/tests/data/timestamp.data +++ /dev/null @@ -1,5 +0,0 @@ -- 2001-12-15T02:59:43.1Z -- 2001-12-14t21:59:43.10-05:00 -- 2001-12-14 21:59:43.10 -5 -- 2001-12-15 2:59:43.10 -- 2002-12-14 diff --git a/libs/PyYAML-3.10/tests/data/timestamp.detect b/libs/PyYAML-3.10/tests/data/timestamp.detect deleted file mode 100644 index 2013936..0000000 --- a/libs/PyYAML-3.10/tests/data/timestamp.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:timestamp diff --git a/libs/PyYAML-3.10/tests/data/unacceptable-key.loader-error b/libs/PyYAML-3.10/tests/data/unacceptable-key.loader-error deleted file mode 100644 index d748e37..0000000 --- a/libs/PyYAML-3.10/tests/data/unacceptable-key.loader-error +++ /dev/null @@ -1,4 +0,0 @@ ---- -? - foo - - bar -: baz diff --git a/libs/PyYAML-3.10/tests/data/unclosed-bracket.loader-error b/libs/PyYAML-3.10/tests/data/unclosed-bracket.loader-error deleted file mode 100644 index 8c82077..0000000 --- a/libs/PyYAML-3.10/tests/data/unclosed-bracket.loader-error +++ /dev/null @@ -1,6 +0,0 @@ -test: - - [ foo: bar -# comment the rest of the stream to let the scanner detect the problem. -# - baz -#"we could have detected the unclosed bracket on the above line, but this would forbid such syntax as": { -#} diff --git a/libs/PyYAML-3.10/tests/data/unclosed-quoted-scalar.loader-error b/libs/PyYAML-3.10/tests/data/unclosed-quoted-scalar.loader-error deleted file mode 100644 index 8537429..0000000 --- a/libs/PyYAML-3.10/tests/data/unclosed-quoted-scalar.loader-error +++ /dev/null @@ -1,2 +0,0 @@ -'foo - bar diff --git a/libs/PyYAML-3.10/tests/data/undefined-anchor.loader-error b/libs/PyYAML-3.10/tests/data/undefined-anchor.loader-error deleted file mode 100644 index 9469103..0000000 --- a/libs/PyYAML-3.10/tests/data/undefined-anchor.loader-error +++ /dev/null @@ -1,3 +0,0 @@ -- foo -- &bar baz -- *bat diff --git a/libs/PyYAML-3.10/tests/data/undefined-constructor.loader-error b/libs/PyYAML-3.10/tests/data/undefined-constructor.loader-error deleted file mode 100644 index 9a37ccc..0000000 --- a/libs/PyYAML-3.10/tests/data/undefined-constructor.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !foo bar diff --git a/libs/PyYAML-3.10/tests/data/undefined-tag-handle.loader-error b/libs/PyYAML-3.10/tests/data/undefined-tag-handle.loader-error deleted file mode 100644 index 82ba335..0000000 --- a/libs/PyYAML-3.10/tests/data/undefined-tag-handle.loader-error +++ /dev/null @@ -1 +0,0 @@ ---- !foo!bar baz diff --git a/libs/PyYAML-3.10/tests/data/unknown.dumper-error b/libs/PyYAML-3.10/tests/data/unknown.dumper-error deleted file mode 100644 index 83204d2..0000000 --- a/libs/PyYAML-3.10/tests/data/unknown.dumper-error +++ /dev/null @@ -1 +0,0 @@ -yaml.safe_dump(object) diff --git a/libs/PyYAML-3.10/tests/data/unsupported-version.emitter-error b/libs/PyYAML-3.10/tests/data/unsupported-version.emitter-error deleted file mode 100644 index f9c6197..0000000 --- a/libs/PyYAML-3.10/tests/data/unsupported-version.emitter-error +++ /dev/null @@ -1,5 +0,0 @@ -- !StreamStart -- !DocumentStart { version: [5,6] } -- !Scalar { value: foo } -- !DocumentEnd -- !StreamEnd diff --git a/libs/PyYAML-3.10/tests/data/utf16be.code b/libs/PyYAML-3.10/tests/data/utf16be.code deleted file mode 100644 index c45b371..0000000 --- a/libs/PyYAML-3.10/tests/data/utf16be.code +++ /dev/null @@ -1 +0,0 @@ -"UTF-16-BE" diff --git a/libs/PyYAML-3.10/tests/data/utf16be.data b/libs/PyYAML-3.10/tests/data/utf16be.data deleted file mode 100644 index 50dcfae..0000000 Binary files a/libs/PyYAML-3.10/tests/data/utf16be.data and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/utf16le.code b/libs/PyYAML-3.10/tests/data/utf16le.code deleted file mode 100644 index 400530a..0000000 --- a/libs/PyYAML-3.10/tests/data/utf16le.code +++ /dev/null @@ -1 +0,0 @@ -"UTF-16-LE" diff --git a/libs/PyYAML-3.10/tests/data/utf16le.data b/libs/PyYAML-3.10/tests/data/utf16le.data deleted file mode 100644 index 76f5e73..0000000 Binary files a/libs/PyYAML-3.10/tests/data/utf16le.data and /dev/null differ diff --git a/libs/PyYAML-3.10/tests/data/utf8-implicit.code b/libs/PyYAML-3.10/tests/data/utf8-implicit.code deleted file mode 100644 index 29326db..0000000 --- a/libs/PyYAML-3.10/tests/data/utf8-implicit.code +++ /dev/null @@ -1 +0,0 @@ -"implicit UTF-8" diff --git a/libs/PyYAML-3.10/tests/data/utf8-implicit.data b/libs/PyYAML-3.10/tests/data/utf8-implicit.data deleted file mode 100644 index 9d8081e..0000000 --- a/libs/PyYAML-3.10/tests/data/utf8-implicit.data +++ /dev/null @@ -1 +0,0 @@ ---- implicit UTF-8 diff --git a/libs/PyYAML-3.10/tests/data/utf8.code b/libs/PyYAML-3.10/tests/data/utf8.code deleted file mode 100644 index dcf11cc..0000000 --- a/libs/PyYAML-3.10/tests/data/utf8.code +++ /dev/null @@ -1 +0,0 @@ -"UTF-8" diff --git a/libs/PyYAML-3.10/tests/data/utf8.data b/libs/PyYAML-3.10/tests/data/utf8.data deleted file mode 100644 index 686f48a..0000000 --- a/libs/PyYAML-3.10/tests/data/utf8.data +++ /dev/null @@ -1 +0,0 @@ ---- UTF-8 diff --git a/libs/PyYAML-3.10/tests/data/value.data b/libs/PyYAML-3.10/tests/data/value.data deleted file mode 100644 index c5b7680..0000000 --- a/libs/PyYAML-3.10/tests/data/value.data +++ /dev/null @@ -1 +0,0 @@ -- = diff --git a/libs/PyYAML-3.10/tests/data/value.detect b/libs/PyYAML-3.10/tests/data/value.detect deleted file mode 100644 index 7c37d02..0000000 --- a/libs/PyYAML-3.10/tests/data/value.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:value diff --git a/libs/PyYAML-3.10/tests/data/yaml.data b/libs/PyYAML-3.10/tests/data/yaml.data deleted file mode 100644 index a4bb3f8..0000000 --- a/libs/PyYAML-3.10/tests/data/yaml.data +++ /dev/null @@ -1,3 +0,0 @@ -- !!yaml '!' -- !!yaml '&' -- !!yaml '*' diff --git a/libs/PyYAML-3.10/tests/data/yaml.detect b/libs/PyYAML-3.10/tests/data/yaml.detect deleted file mode 100644 index e2cf189..0000000 --- a/libs/PyYAML-3.10/tests/data/yaml.detect +++ /dev/null @@ -1 +0,0 @@ -tag:yaml.org,2002:yaml diff --git a/libs/PyYAML-3.10/tests/lib/canonical.py b/libs/PyYAML-3.10/tests/lib/canonical.py deleted file mode 100644 index 020e6db..0000000 --- a/libs/PyYAML-3.10/tests/lib/canonical.py +++ /dev/null @@ -1,360 +0,0 @@ - -import yaml, yaml.composer, yaml.constructor, yaml.resolver - -class CanonicalError(yaml.YAMLError): - pass - -class CanonicalScanner: - - def __init__(self, data): - try: - self.data = unicode(data, 'utf-8')+u'\0' - except UnicodeDecodeError: - raise CanonicalError("utf-8 stream is expected") - self.index = 0 - self.tokens = [] - self.scanned = False - - def check_token(self, *choices): - if not self.scanned: - self.scan() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - if not self.scanned: - self.scan() - if self.tokens: - return self.tokens[0] - - def get_token(self, choice=None): - if not self.scanned: - self.scan() - token = self.tokens.pop(0) - if choice and not isinstance(token, choice): - raise CanonicalError("unexpected token "+repr(token)) - return token - - def get_token_value(self): - token = self.get_token() - return token.value - - def scan(self): - self.tokens.append(yaml.StreamStartToken(None, None)) - while True: - self.find_token() - ch = self.data[self.index] - if ch == u'\0': - self.tokens.append(yaml.StreamEndToken(None, None)) - break - elif ch == u'%': - self.tokens.append(self.scan_directive()) - elif ch == u'-' and self.data[self.index:self.index+3] == u'---': - self.index += 3 - self.tokens.append(yaml.DocumentStartToken(None, None)) - elif ch == u'[': - self.index += 1 - self.tokens.append(yaml.FlowSequenceStartToken(None, None)) - elif ch == u'{': - self.index += 1 - self.tokens.append(yaml.FlowMappingStartToken(None, None)) - elif ch == u']': - self.index += 1 - self.tokens.append(yaml.FlowSequenceEndToken(None, None)) - elif ch == u'}': - self.index += 1 - self.tokens.append(yaml.FlowMappingEndToken(None, None)) - elif ch == u'?': - self.index += 1 - self.tokens.append(yaml.KeyToken(None, None)) - elif ch == u':': - self.index += 1 - self.tokens.append(yaml.ValueToken(None, None)) - elif ch == u',': - self.index += 1 - self.tokens.append(yaml.FlowEntryToken(None, None)) - elif ch == u'*' or ch == u'&': - self.tokens.append(self.scan_alias()) - elif ch == u'!': - self.tokens.append(self.scan_tag()) - elif ch == u'"': - self.tokens.append(self.scan_scalar()) - else: - raise CanonicalError("invalid token") - self.scanned = True - - DIRECTIVE = u'%YAML 1.1' - - def scan_directive(self): - if self.data[self.index:self.index+len(self.DIRECTIVE)] == self.DIRECTIVE and \ - self.data[self.index+len(self.DIRECTIVE)] in u' \n\0': - self.index += len(self.DIRECTIVE) - return yaml.DirectiveToken('YAML', (1, 1), None, None) - else: - raise CanonicalError("invalid directive") - - def scan_alias(self): - if self.data[self.index] == u'*': - TokenClass = yaml.AliasToken - else: - TokenClass = yaml.AnchorToken - self.index += 1 - start = self.index - while self.data[self.index] not in u', \n\0': - self.index += 1 - value = self.data[start:self.index] - return TokenClass(value, None, None) - - def scan_tag(self): - self.index += 1 - start = self.index - while self.data[self.index] not in u' \n\0': - self.index += 1 - value = self.data[start:self.index] - if not value: - value = u'!' - elif value[0] == u'!': - value = 'tag:yaml.org,2002:'+value[1:] - elif value[0] == u'<' and value[-1] == u'>': - value = value[1:-1] - else: - value = u'!'+value - return yaml.TagToken(value, None, None) - - QUOTE_CODES = { - 'x': 2, - 'u': 4, - 'U': 8, - } - - QUOTE_REPLACES = { - u'\\': u'\\', - u'\"': u'\"', - u' ': u' ', - u'a': u'\x07', - u'b': u'\x08', - u'e': u'\x1B', - u'f': u'\x0C', - u'n': u'\x0A', - u'r': u'\x0D', - u't': u'\x09', - u'v': u'\x0B', - u'N': u'\u0085', - u'L': u'\u2028', - u'P': u'\u2029', - u'_': u'_', - u'0': u'\x00', - - } - - def scan_scalar(self): - self.index += 1 - chunks = [] - start = self.index - ignore_spaces = False - while self.data[self.index] != u'"': - if self.data[self.index] == u'\\': - ignore_spaces = False - chunks.append(self.data[start:self.index]) - self.index += 1 - ch = self.data[self.index] - self.index += 1 - if ch == u'\n': - ignore_spaces = True - elif ch in self.QUOTE_CODES: - length = self.QUOTE_CODES[ch] - code = int(self.data[self.index:self.index+length], 16) - chunks.append(unichr(code)) - self.index += length - else: - if ch not in self.QUOTE_REPLACES: - raise CanonicalError("invalid escape code") - chunks.append(self.QUOTE_REPLACES[ch]) - start = self.index - elif self.data[self.index] == u'\n': - chunks.append(self.data[start:self.index]) - chunks.append(u' ') - self.index += 1 - start = self.index - ignore_spaces = True - elif ignore_spaces and self.data[self.index] == u' ': - self.index += 1 - start = self.index - else: - ignore_spaces = False - self.index += 1 - chunks.append(self.data[start:self.index]) - self.index += 1 - return yaml.ScalarToken(u''.join(chunks), False, None, None) - - def find_token(self): - found = False - while not found: - while self.data[self.index] in u' \t': - self.index += 1 - if self.data[self.index] == u'#': - while self.data[self.index] != u'\n': - self.index += 1 - if self.data[self.index] == u'\n': - self.index += 1 - else: - found = True - -class CanonicalParser: - - def __init__(self): - self.events = [] - self.parsed = False - - def dispose(self): - pass - - # stream: STREAM-START document* STREAM-END - def parse_stream(self): - self.get_token(yaml.StreamStartToken) - self.events.append(yaml.StreamStartEvent(None, None)) - while not self.check_token(yaml.StreamEndToken): - if self.check_token(yaml.DirectiveToken, yaml.DocumentStartToken): - self.parse_document() - else: - raise CanonicalError("document is expected, got "+repr(self.tokens[0])) - self.get_token(yaml.StreamEndToken) - self.events.append(yaml.StreamEndEvent(None, None)) - - # document: DIRECTIVE? DOCUMENT-START node - def parse_document(self): - node = None - if self.check_token(yaml.DirectiveToken): - self.get_token(yaml.DirectiveToken) - self.get_token(yaml.DocumentStartToken) - self.events.append(yaml.DocumentStartEvent(None, None)) - self.parse_node() - self.events.append(yaml.DocumentEndEvent(None, None)) - - # node: ALIAS | ANCHOR? TAG? (SCALAR|sequence|mapping) - def parse_node(self): - if self.check_token(yaml.AliasToken): - self.events.append(yaml.AliasEvent(self.get_token_value(), None, None)) - else: - anchor = None - if self.check_token(yaml.AnchorToken): - anchor = self.get_token_value() - tag = None - if self.check_token(yaml.TagToken): - tag = self.get_token_value() - if self.check_token(yaml.ScalarToken): - self.events.append(yaml.ScalarEvent(anchor, tag, (False, False), self.get_token_value(), None, None)) - elif self.check_token(yaml.FlowSequenceStartToken): - self.events.append(yaml.SequenceStartEvent(anchor, tag, None, None)) - self.parse_sequence() - elif self.check_token(yaml.FlowMappingStartToken): - self.events.append(yaml.MappingStartEvent(anchor, tag, None, None)) - self.parse_mapping() - else: - raise CanonicalError("SCALAR, '[', or '{' is expected, got "+repr(self.tokens[0])) - - # sequence: SEQUENCE-START (node (ENTRY node)*)? ENTRY? SEQUENCE-END - def parse_sequence(self): - self.get_token(yaml.FlowSequenceStartToken) - if not self.check_token(yaml.FlowSequenceEndToken): - self.parse_node() - while not self.check_token(yaml.FlowSequenceEndToken): - self.get_token(yaml.FlowEntryToken) - if not self.check_token(yaml.FlowSequenceEndToken): - self.parse_node() - self.get_token(yaml.FlowSequenceEndToken) - self.events.append(yaml.SequenceEndEvent(None, None)) - - # mapping: MAPPING-START (map_entry (ENTRY map_entry)*)? ENTRY? MAPPING-END - def parse_mapping(self): - self.get_token(yaml.FlowMappingStartToken) - if not self.check_token(yaml.FlowMappingEndToken): - self.parse_map_entry() - while not self.check_token(yaml.FlowMappingEndToken): - self.get_token(yaml.FlowEntryToken) - if not self.check_token(yaml.FlowMappingEndToken): - self.parse_map_entry() - self.get_token(yaml.FlowMappingEndToken) - self.events.append(yaml.MappingEndEvent(None, None)) - - # map_entry: KEY node VALUE node - def parse_map_entry(self): - self.get_token(yaml.KeyToken) - self.parse_node() - self.get_token(yaml.ValueToken) - self.parse_node() - - def parse(self): - self.parse_stream() - self.parsed = True - - def get_event(self): - if not self.parsed: - self.parse() - return self.events.pop(0) - - def check_event(self, *choices): - if not self.parsed: - self.parse() - if self.events: - if not choices: - return True - for choice in choices: - if isinstance(self.events[0], choice): - return True - return False - - def peek_event(self): - if not self.parsed: - self.parse() - return self.events[0] - -class CanonicalLoader(CanonicalScanner, CanonicalParser, - yaml.composer.Composer, yaml.constructor.Constructor, yaml.resolver.Resolver): - - def __init__(self, stream): - if hasattr(stream, 'read'): - stream = stream.read() - CanonicalScanner.__init__(self, stream) - CanonicalParser.__init__(self) - yaml.composer.Composer.__init__(self) - yaml.constructor.Constructor.__init__(self) - yaml.resolver.Resolver.__init__(self) - -yaml.CanonicalLoader = CanonicalLoader - -def canonical_scan(stream): - return yaml.scan(stream, Loader=CanonicalLoader) - -yaml.canonical_scan = canonical_scan - -def canonical_parse(stream): - return yaml.parse(stream, Loader=CanonicalLoader) - -yaml.canonical_parse = canonical_parse - -def canonical_compose(stream): - return yaml.compose(stream, Loader=CanonicalLoader) - -yaml.canonical_compose = canonical_compose - -def canonical_compose_all(stream): - return yaml.compose_all(stream, Loader=CanonicalLoader) - -yaml.canonical_compose_all = canonical_compose_all - -def canonical_load(stream): - return yaml.load(stream, Loader=CanonicalLoader) - -yaml.canonical_load = canonical_load - -def canonical_load_all(stream): - return yaml.load_all(stream, Loader=CanonicalLoader) - -yaml.canonical_load_all = canonical_load_all - diff --git a/libs/PyYAML-3.10/tests/lib/test_all.py b/libs/PyYAML-3.10/tests/lib/test_all.py deleted file mode 100644 index fec4ae4..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_all.py +++ /dev/null @@ -1,15 +0,0 @@ - -import sys, yaml, test_appliance - -def main(args=None): - collections = [] - import test_yaml - collections.append(test_yaml) - if yaml.__with_libyaml__: - import test_yaml_ext - collections.append(test_yaml_ext) - test_appliance.run(collections, args) - -if __name__ == '__main__': - main() - diff --git a/libs/PyYAML-3.10/tests/lib/test_appliance.py b/libs/PyYAML-3.10/tests/lib/test_appliance.py deleted file mode 100644 index d50d5a2..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_appliance.py +++ /dev/null @@ -1,151 +0,0 @@ - -import sys, os, os.path, types, traceback, pprint - -DATA = 'tests/data' - -def find_test_functions(collections): - if not isinstance(collections, list): - collections = [collections] - functions = [] - for collection in collections: - if not isinstance(collection, dict): - collection = vars(collection) - keys = collection.keys() - keys.sort() - for key in keys: - value = collection[key] - if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'): - functions.append(value) - return functions - -def find_test_filenames(directory): - filenames = {} - for filename in os.listdir(directory): - if os.path.isfile(os.path.join(directory, filename)): - base, ext = os.path.splitext(filename) - if base.endswith('-py3'): - continue - filenames.setdefault(base, []).append(ext) - filenames = filenames.items() - filenames.sort() - return filenames - -def parse_arguments(args): - if args is None: - args = sys.argv[1:] - verbose = False - if '-v' in args: - verbose = True - args.remove('-v') - if '--verbose' in args: - verbose = True - if 'YAML_TEST_VERBOSE' in os.environ: - verbose = True - include_functions = [] - if args: - include_functions.append(args.pop(0)) - if 'YAML_TEST_FUNCTIONS' in os.environ: - include_functions.extend(os.environ['YAML_TEST_FUNCTIONS'].split()) - include_filenames = [] - include_filenames.extend(args) - if 'YAML_TEST_FILENAMES' in os.environ: - include_filenames.extend(os.environ['YAML_TEST_FILENAMES'].split()) - return include_functions, include_filenames, verbose - -def execute(function, filenames, verbose): - if hasattr(function, 'unittest_name'): - name = function.unittest_name - else: - name = function.func_name - if verbose: - sys.stdout.write('='*75+'\n') - sys.stdout.write('%s(%s)...\n' % (name, ', '.join(filenames))) - try: - function(verbose=verbose, *filenames) - except Exception, exc: - info = sys.exc_info() - if isinstance(exc, AssertionError): - kind = 'FAILURE' - else: - kind = 'ERROR' - if verbose: - traceback.print_exc(limit=1, file=sys.stdout) - else: - sys.stdout.write(kind[0]) - sys.stdout.flush() - else: - kind = 'SUCCESS' - info = None - if not verbose: - sys.stdout.write('.') - sys.stdout.flush() - return (name, filenames, kind, info) - -def display(results, verbose): - if results and not verbose: - sys.stdout.write('\n') - total = len(results) - failures = 0 - errors = 0 - for name, filenames, kind, info in results: - if kind == 'SUCCESS': - continue - if kind == 'FAILURE': - failures += 1 - if kind == 'ERROR': - errors += 1 - sys.stdout.write('='*75+'\n') - sys.stdout.write('%s(%s): %s\n' % (name, ', '.join(filenames), kind)) - if kind == 'ERROR': - traceback.print_exception(file=sys.stdout, *info) - else: - sys.stdout.write('Traceback (most recent call last):\n') - traceback.print_tb(info[2], file=sys.stdout) - sys.stdout.write('%s: see below\n' % info[0].__name__) - sys.stdout.write('~'*75+'\n') - for arg in info[1].args: - pprint.pprint(arg, stream=sys.stdout) - for filename in filenames: - sys.stdout.write('-'*75+'\n') - sys.stdout.write('%s:\n' % filename) - data = open(filename, 'rb').read() - sys.stdout.write(data) - if data and data[-1] != '\n': - sys.stdout.write('\n') - sys.stdout.write('='*75+'\n') - sys.stdout.write('TESTS: %s\n' % total) - if failures: - sys.stdout.write('FAILURES: %s\n' % failures) - if errors: - sys.stdout.write('ERRORS: %s\n' % errors) - -def run(collections, args=None): - test_functions = find_test_functions(collections) - test_filenames = find_test_filenames(DATA) - include_functions, include_filenames, verbose = parse_arguments(args) - results = [] - for function in test_functions: - if include_functions and function.func_name not in include_functions: - continue - if function.unittest: - for base, exts in test_filenames: - if include_filenames and base not in include_filenames: - continue - filenames = [] - for ext in function.unittest: - if ext not in exts: - break - filenames.append(os.path.join(DATA, base+ext)) - else: - skip_exts = getattr(function, 'skip', []) - for skip_ext in skip_exts: - if skip_ext in exts: - break - else: - result = execute(function, filenames, verbose) - results.append(result) - else: - result = execute(function, [], verbose) - results.append(result) - display(results, verbose=verbose) - diff --git a/libs/PyYAML-3.10/tests/lib/test_build.py b/libs/PyYAML-3.10/tests/lib/test_build.py deleted file mode 100644 index 901e8ed..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_build.py +++ /dev/null @@ -1,10 +0,0 @@ - -if __name__ == '__main__': - import sys, os, distutils.util - build_lib = 'build/lib' - build_lib_ext = os.path.join('build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3])) - sys.path.insert(0, build_lib) - sys.path.insert(0, build_lib_ext) - import test_yaml, test_appliance - test_appliance.run(test_yaml) - diff --git a/libs/PyYAML-3.10/tests/lib/test_build_ext.py b/libs/PyYAML-3.10/tests/lib/test_build_ext.py deleted file mode 100644 index ff195d5..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_build_ext.py +++ /dev/null @@ -1,11 +0,0 @@ - - -if __name__ == '__main__': - import sys, os, distutils.util - build_lib = 'build/lib' - build_lib_ext = os.path.join('build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3])) - sys.path.insert(0, build_lib) - sys.path.insert(0, build_lib_ext) - import test_yaml_ext, test_appliance - test_appliance.run(test_yaml_ext) - diff --git a/libs/PyYAML-3.10/tests/lib/test_canonical.py b/libs/PyYAML-3.10/tests/lib/test_canonical.py deleted file mode 100644 index a851ef2..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_canonical.py +++ /dev/null @@ -1,40 +0,0 @@ - -import yaml, canonical - -def test_canonical_scanner(canonical_filename, verbose=False): - data = open(canonical_filename, 'rb').read() - tokens = list(yaml.canonical_scan(data)) - assert tokens, tokens - if verbose: - for token in tokens: - print token - -test_canonical_scanner.unittest = ['.canonical'] - -def test_canonical_parser(canonical_filename, verbose=False): - data = open(canonical_filename, 'rb').read() - events = list(yaml.canonical_parse(data)) - assert events, events - if verbose: - for event in events: - print event - -test_canonical_parser.unittest = ['.canonical'] - -def test_canonical_error(data_filename, canonical_filename, verbose=False): - data = open(data_filename, 'rb').read() - try: - output = list(yaml.canonical_load_all(data)) - except yaml.YAMLError, exc: - if verbose: - print exc - else: - raise AssertionError("expected an exception") - -test_canonical_error.unittest = ['.data', '.canonical'] -test_canonical_error.skip = ['.empty'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_constructor.py b/libs/PyYAML-3.10/tests/lib/test_constructor.py deleted file mode 100644 index beee7b0..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_constructor.py +++ /dev/null @@ -1,275 +0,0 @@ - -import yaml -import pprint - -import datetime -try: - set -except NameError: - from sets import Set as set -import yaml.tokens - -def execute(code): - exec code - return value - -def _make_objects(): - global MyLoader, MyDumper, MyTestClass1, MyTestClass2, MyTestClass3, YAMLObject1, YAMLObject2, \ - AnObject, AnInstance, AState, ACustomState, InitArgs, InitArgsWithState, \ - NewArgs, NewArgsWithState, Reduce, ReduceWithState, MyInt, MyList, MyDict, \ - FixedOffset, today, execute - - class MyLoader(yaml.Loader): - pass - class MyDumper(yaml.Dumper): - pass - - class MyTestClass1: - def __init__(self, x, y=0, z=0): - self.x = x - self.y = y - self.z = z - def __eq__(self, other): - if isinstance(other, MyTestClass1): - return self.__class__, self.__dict__ == other.__class__, other.__dict__ - else: - return False - - def construct1(constructor, node): - mapping = constructor.construct_mapping(node) - return MyTestClass1(**mapping) - def represent1(representer, native): - return representer.represent_mapping("!tag1", native.__dict__) - - yaml.add_constructor("!tag1", construct1, Loader=MyLoader) - yaml.add_representer(MyTestClass1, represent1, Dumper=MyDumper) - - class MyTestClass2(MyTestClass1, yaml.YAMLObject): - yaml_loader = MyLoader - yaml_dumper = MyDumper - yaml_tag = "!tag2" - def from_yaml(cls, constructor, node): - x = constructor.construct_yaml_int(node) - return cls(x=x) - from_yaml = classmethod(from_yaml) - def to_yaml(cls, representer, native): - return representer.represent_scalar(cls.yaml_tag, str(native.x)) - to_yaml = classmethod(to_yaml) - - class MyTestClass3(MyTestClass2): - yaml_tag = "!tag3" - def from_yaml(cls, constructor, node): - mapping = constructor.construct_mapping(node) - if '=' in mapping: - x = mapping['='] - del mapping['='] - mapping['x'] = x - return cls(**mapping) - from_yaml = classmethod(from_yaml) - def to_yaml(cls, representer, native): - return representer.represent_mapping(cls.yaml_tag, native.__dict__) - to_yaml = classmethod(to_yaml) - - class YAMLObject1(yaml.YAMLObject): - yaml_loader = MyLoader - yaml_dumper = MyDumper - yaml_tag = '!foo' - def __init__(self, my_parameter=None, my_another_parameter=None): - self.my_parameter = my_parameter - self.my_another_parameter = my_another_parameter - def __eq__(self, other): - if isinstance(other, YAMLObject1): - return self.__class__, self.__dict__ == other.__class__, other.__dict__ - else: - return False - - class YAMLObject2(yaml.YAMLObject): - yaml_loader = MyLoader - yaml_dumper = MyDumper - yaml_tag = '!bar' - def __init__(self, foo=1, bar=2, baz=3): - self.foo = foo - self.bar = bar - self.baz = baz - def __getstate__(self): - return {1: self.foo, 2: self.bar, 3: self.baz} - def __setstate__(self, state): - self.foo = state[1] - self.bar = state[2] - self.baz = state[3] - def __eq__(self, other): - if isinstance(other, YAMLObject2): - return self.__class__, self.__dict__ == other.__class__, other.__dict__ - else: - return False - - class AnObject(object): - def __new__(cls, foo=None, bar=None, baz=None): - self = object.__new__(cls) - self.foo = foo - self.bar = bar - self.baz = baz - return self - def __cmp__(self, other): - return cmp((type(self), self.foo, self.bar, self.baz), - (type(other), other.foo, other.bar, other.baz)) - def __eq__(self, other): - return type(self) is type(other) and \ - (self.foo, self.bar, self.baz) == (other.foo, other.bar, other.baz) - - class AnInstance: - def __init__(self, foo=None, bar=None, baz=None): - self.foo = foo - self.bar = bar - self.baz = baz - def __cmp__(self, other): - return cmp((type(self), self.foo, self.bar, self.baz), - (type(other), other.foo, other.bar, other.baz)) - def __eq__(self, other): - return type(self) is type(other) and \ - (self.foo, self.bar, self.baz) == (other.foo, other.bar, other.baz) - - class AState(AnInstance): - def __getstate__(self): - return { - '_foo': self.foo, - '_bar': self.bar, - '_baz': self.baz, - } - def __setstate__(self, state): - self.foo = state['_foo'] - self.bar = state['_bar'] - self.baz = state['_baz'] - - class ACustomState(AnInstance): - def __getstate__(self): - return (self.foo, self.bar, self.baz) - def __setstate__(self, state): - self.foo, self.bar, self.baz = state - - class InitArgs(AnInstance): - def __getinitargs__(self): - return (self.foo, self.bar, self.baz) - def __getstate__(self): - return {} - - class InitArgsWithState(AnInstance): - def __getinitargs__(self): - return (self.foo, self.bar) - def __getstate__(self): - return self.baz - def __setstate__(self, state): - self.baz = state - - class NewArgs(AnObject): - def __getnewargs__(self): - return (self.foo, self.bar, self.baz) - def __getstate__(self): - return {} - - class NewArgsWithState(AnObject): - def __getnewargs__(self): - return (self.foo, self.bar) - def __getstate__(self): - return self.baz - def __setstate__(self, state): - self.baz = state - - class Reduce(AnObject): - def __reduce__(self): - return self.__class__, (self.foo, self.bar, self.baz) - - class ReduceWithState(AnObject): - def __reduce__(self): - return self.__class__, (self.foo, self.bar), self.baz - def __setstate__(self, state): - self.baz = state - - class MyInt(int): - def __eq__(self, other): - return type(self) is type(other) and int(self) == int(other) - - class MyList(list): - def __init__(self, n=1): - self.extend([None]*n) - def __eq__(self, other): - return type(self) is type(other) and list(self) == list(other) - - class MyDict(dict): - def __init__(self, n=1): - for k in range(n): - self[k] = None - def __eq__(self, other): - return type(self) is type(other) and dict(self) == dict(other) - - class FixedOffset(datetime.tzinfo): - def __init__(self, offset, name): - self.__offset = datetime.timedelta(minutes=offset) - self.__name = name - def utcoffset(self, dt): - return self.__offset - def tzname(self, dt): - return self.__name - def dst(self, dt): - return datetime.timedelta(0) - - today = datetime.date.today() - -def _load_code(expression): - return eval(expression) - -def _serialize_value(data): - if isinstance(data, list): - return '[%s]' % ', '.join(map(_serialize_value, data)) - elif isinstance(data, dict): - items = [] - for key, value in data.items(): - key = _serialize_value(key) - value = _serialize_value(value) - items.append("%s: %s" % (key, value)) - items.sort() - return '{%s}' % ', '.join(items) - elif isinstance(data, datetime.datetime): - return repr(data.utctimetuple()) - elif isinstance(data, unicode): - return data.encode('utf-8') - elif isinstance(data, float) and data != data: - return '?' - else: - return str(data) - -def test_constructor_types(data_filename, code_filename, verbose=False): - _make_objects() - native1 = None - native2 = None - try: - native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader)) - if len(native1) == 1: - native1 = native1[0] - native2 = _load_code(open(code_filename, 'rb').read()) - try: - if native1 == native2: - return - except TypeError: - pass - if verbose: - print "SERIALIZED NATIVE1:" - print _serialize_value(native1) - print "SERIALIZED NATIVE2:" - print _serialize_value(native2) - assert _serialize_value(native1) == _serialize_value(native2), (native1, native2) - finally: - if verbose: - print "NATIVE1:" - pprint.pprint(native1) - print "NATIVE2:" - pprint.pprint(native2) - -test_constructor_types.unittest = ['.data', '.code'] - -if __name__ == '__main__': - import sys, test_constructor - sys.modules['test_constructor'] = sys.modules['__main__'] - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_emitter.py b/libs/PyYAML-3.10/tests/lib/test_emitter.py deleted file mode 100644 index 61fd941..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_emitter.py +++ /dev/null @@ -1,100 +0,0 @@ - -import yaml - -def _compare_events(events1, events2): - assert len(events1) == len(events2), (events1, events2) - for event1, event2 in zip(events1, events2): - assert event1.__class__ == event2.__class__, (event1, event2) - if isinstance(event1, yaml.NodeEvent): - assert event1.anchor == event2.anchor, (event1, event2) - if isinstance(event1, yaml.CollectionStartEvent): - assert event1.tag == event2.tag, (event1, event2) - if isinstance(event1, yaml.ScalarEvent): - if True not in event1.implicit+event2.implicit: - assert event1.tag == event2.tag, (event1, event2) - assert event1.value == event2.value, (event1, event2) - -def test_emitter_on_data(data_filename, canonical_filename, verbose=False): - events = list(yaml.parse(open(data_filename, 'rb'))) - output = yaml.emit(events) - if verbose: - print "OUTPUT:" - print output - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -test_emitter_on_data.unittest = ['.data', '.canonical'] - -def test_emitter_on_canonical(canonical_filename, verbose=False): - events = list(yaml.parse(open(canonical_filename, 'rb'))) - for canonical in [False, True]: - output = yaml.emit(events, canonical=canonical) - if verbose: - print "OUTPUT (canonical=%s):" % canonical - print output - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -test_emitter_on_canonical.unittest = ['.canonical'] - -def test_emitter_styles(data_filename, canonical_filename, verbose=False): - for filename in [data_filename, canonical_filename]: - events = list(yaml.parse(open(filename, 'rb'))) - for flow_style in [False, True]: - for style in ['|', '>', '"', '\'', '']: - styled_events = [] - for event in events: - if isinstance(event, yaml.ScalarEvent): - event = yaml.ScalarEvent(event.anchor, event.tag, - event.implicit, event.value, style=style) - elif isinstance(event, yaml.SequenceStartEvent): - event = yaml.SequenceStartEvent(event.anchor, event.tag, - event.implicit, flow_style=flow_style) - elif isinstance(event, yaml.MappingStartEvent): - event = yaml.MappingStartEvent(event.anchor, event.tag, - event.implicit, flow_style=flow_style) - styled_events.append(event) - output = yaml.emit(styled_events) - if verbose: - print "OUTPUT (filename=%r, flow_style=%r, style=%r)" % (filename, flow_style, style) - print output - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -test_emitter_styles.unittest = ['.data', '.canonical'] - -class EventsLoader(yaml.Loader): - - def construct_event(self, node): - if isinstance(node, yaml.ScalarNode): - mapping = {} - else: - mapping = self.construct_mapping(node) - class_name = str(node.tag[1:])+'Event' - if class_name in ['AliasEvent', 'ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']: - mapping.setdefault('anchor', None) - if class_name in ['ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']: - mapping.setdefault('tag', None) - if class_name in ['SequenceStartEvent', 'MappingStartEvent']: - mapping.setdefault('implicit', True) - if class_name == 'ScalarEvent': - mapping.setdefault('implicit', (False, True)) - mapping.setdefault('value', '') - value = getattr(yaml, class_name)(**mapping) - return value - -EventsLoader.add_constructor(None, EventsLoader.construct_event) - -def test_emitter_events(events_filename, verbose=False): - events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) - output = yaml.emit(events) - if verbose: - print "OUTPUT:" - print output - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_errors.py b/libs/PyYAML-3.10/tests/lib/test_errors.py deleted file mode 100644 index 7dc9388..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_errors.py +++ /dev/null @@ -1,67 +0,0 @@ - -import yaml, test_emitter - -def test_loader_error(error_filename, verbose=False): - try: - list(yaml.load_all(open(error_filename, 'rb'))) - except yaml.YAMLError, exc: - if verbose: - print "%s:" % exc.__class__.__name__, exc - else: - raise AssertionError("expected an exception") - -test_loader_error.unittest = ['.loader-error'] - -def test_loader_error_string(error_filename, verbose=False): - try: - list(yaml.load_all(open(error_filename, 'rb').read())) - except yaml.YAMLError, exc: - if verbose: - print "%s:" % exc.__class__.__name__, exc - else: - raise AssertionError("expected an exception") - -test_loader_error_string.unittest = ['.loader-error'] - -def test_loader_error_single(error_filename, verbose=False): - try: - yaml.load(open(error_filename, 'rb').read()) - except yaml.YAMLError, exc: - if verbose: - print "%s:" % exc.__class__.__name__, exc - else: - raise AssertionError("expected an exception") - -test_loader_error_single.unittest = ['.single-loader-error'] - -def test_emitter_error(error_filename, verbose=False): - events = list(yaml.load(open(error_filename, 'rb'), - Loader=test_emitter.EventsLoader)) - try: - yaml.emit(events) - except yaml.YAMLError, exc: - if verbose: - print "%s:" % exc.__class__.__name__, exc - else: - raise AssertionError("expected an exception") - -test_emitter_error.unittest = ['.emitter-error'] - -def test_dumper_error(error_filename, verbose=False): - code = open(error_filename, 'rb').read() - try: - import yaml - from StringIO import StringIO - exec code - except yaml.YAMLError, exc: - if verbose: - print "%s:" % exc.__class__.__name__, exc - else: - raise AssertionError("expected an exception") - -test_dumper_error.unittest = ['.dumper-error'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_input_output.py b/libs/PyYAML-3.10/tests/lib/test_input_output.py deleted file mode 100644 index 9ccc8fc..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_input_output.py +++ /dev/null @@ -1,151 +0,0 @@ - -import yaml -import codecs, StringIO, tempfile, os, os.path - -def _unicode_open(file, encoding, errors='strict'): - info = codecs.lookup(encoding) - if isinstance(info, tuple): - reader = info[2] - writer = info[3] - else: - reader = info.streamreader - writer = info.streamwriter - srw = codecs.StreamReaderWriter(file, reader, writer, errors) - srw.encoding = encoding - return srw - -def test_unicode_input(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - value = ' '.join(data.split()) - output = yaml.load(_unicode_open(StringIO.StringIO(data.encode('utf-8')), 'utf-8')) - assert output == value, (output, value) - for input in [data, data.encode('utf-8'), - codecs.BOM_UTF8+data.encode('utf-8'), - codecs.BOM_UTF16_BE+data.encode('utf-16-be'), - codecs.BOM_UTF16_LE+data.encode('utf-16-le')]: - if verbose: - print "INPUT:", repr(input[:10]), "..." - output = yaml.load(input) - assert output == value, (output, value) - output = yaml.load(StringIO.StringIO(input)) - assert output == value, (output, value) - -test_unicode_input.unittest = ['.unicode'] - -def test_unicode_input_errors(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - for input in [data.encode('latin1', 'ignore'), - data.encode('utf-16-be'), data.encode('utf-16-le'), - codecs.BOM_UTF8+data.encode('utf-16-be'), - codecs.BOM_UTF16_BE+data.encode('utf-16-le'), - codecs.BOM_UTF16_LE+data.encode('utf-8')+'!']: - try: - yaml.load(input) - except yaml.YAMLError, exc: - if verbose: - print exc - else: - raise AssertionError("expected an exception") - try: - yaml.load(StringIO.StringIO(input)) - except yaml.YAMLError, exc: - if verbose: - print exc - else: - raise AssertionError("expected an exception") - -test_unicode_input_errors.unittest = ['.unicode'] - -def test_unicode_output(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - value = ' '.join(data.split()) - for allow_unicode in [False, True]: - data1 = yaml.dump(value, allow_unicode=allow_unicode) - for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: - stream = StringIO.StringIO() - yaml.dump(value, _unicode_open(stream, 'utf-8'), encoding=encoding, allow_unicode=allow_unicode) - data2 = stream.getvalue() - data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode) - stream = StringIO.StringIO() - yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) - data4 = stream.getvalue() - for copy in [data1, data2, data3, data4]: - if allow_unicode: - try: - copy[4:].encode('ascii') - except (UnicodeDecodeError, UnicodeEncodeError), exc: - if verbose: - print exc - else: - raise AssertionError("expected an exception") - else: - copy[4:].encode('ascii') - assert isinstance(data1, str), (type(data1), encoding) - data1.decode('utf-8') - assert isinstance(data2, str), (type(data2), encoding) - data2.decode('utf-8') - if encoding is None: - assert isinstance(data3, unicode), (type(data3), encoding) - assert isinstance(data4, unicode), (type(data4), encoding) - else: - assert isinstance(data3, str), (type(data3), encoding) - data3.decode(encoding) - assert isinstance(data4, str), (type(data4), encoding) - data4.decode(encoding) - -test_unicode_output.unittest = ['.unicode'] - -def test_file_output(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - handle, filename = tempfile.mkstemp() - os.close(handle) - try: - stream = StringIO.StringIO() - yaml.dump(data, stream, allow_unicode=True) - data1 = stream.getvalue() - stream = open(filename, 'wb') - yaml.dump(data, stream, allow_unicode=True) - stream.close() - data2 = open(filename, 'rb').read() - stream = open(filename, 'wb') - yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True) - stream.close() - data3 = open(filename, 'rb').read().decode('utf-16-le')[1:].encode('utf-8') - stream = _unicode_open(open(filename, 'wb'), 'utf-8') - yaml.dump(data, stream, allow_unicode=True) - stream.close() - data4 = open(filename, 'rb').read() - assert data1 == data2, (data1, data2) - assert data1 == data3, (data1, data3) - assert data1 == data4, (data1, data4) - finally: - if os.path.exists(filename): - os.unlink(filename) - -test_file_output.unittest = ['.unicode'] - -def test_unicode_transfer(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: - input = data - if encoding is not None: - input = (u'\ufeff'+input).encode(encoding) - output1 = yaml.emit(yaml.parse(input), allow_unicode=True) - stream = StringIO.StringIO() - yaml.emit(yaml.parse(input), _unicode_open(stream, 'utf-8'), - allow_unicode=True) - output2 = stream.getvalue() - if encoding is None: - assert isinstance(output1, unicode), (type(output1), encoding) - else: - assert isinstance(output1, str), (type(output1), encoding) - output1.decode(encoding) - assert isinstance(output2, str), (type(output2), encoding) - output2.decode('utf-8') - -test_unicode_transfer.unittest = ['.unicode'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_mark.py b/libs/PyYAML-3.10/tests/lib/test_mark.py deleted file mode 100644 index f30a121..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_mark.py +++ /dev/null @@ -1,32 +0,0 @@ - -import yaml - -def test_marks(marks_filename, verbose=False): - inputs = open(marks_filename, 'rb').read().split('---\n')[1:] - for input in inputs: - index = 0 - line = 0 - column = 0 - while input[index] != '*': - if input[index] == '\n': - line += 1 - column = 0 - else: - column += 1 - index += 1 - mark = yaml.Mark(marks_filename, index, line, column, unicode(input), index) - snippet = mark.get_snippet(indent=2, max_length=79) - if verbose: - print snippet - assert isinstance(snippet, str), type(snippet) - assert snippet.count('\n') == 1, snippet.count('\n') - data, pointer = snippet.split('\n') - assert len(data) < 82, len(data) - assert data[len(pointer)-1] == '*', data[len(pointer)-1] - -test_marks.unittest = ['.marks'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_reader.py b/libs/PyYAML-3.10/tests/lib/test_reader.py deleted file mode 100644 index 3576ae6..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_reader.py +++ /dev/null @@ -1,35 +0,0 @@ - -import yaml.reader -import codecs - -def _run_reader(data, verbose): - try: - stream = yaml.reader.Reader(data) - while stream.peek() != u'\0': - stream.forward() - except yaml.reader.ReaderError, exc: - if verbose: - print exc - else: - raise AssertionError("expected an exception") - -def test_stream_error(error_filename, verbose=False): - _run_reader(open(error_filename, 'rb'), verbose) - _run_reader(open(error_filename, 'rb').read(), verbose) - for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']: - try: - data = unicode(open(error_filename, 'rb').read(), encoding) - break - except UnicodeDecodeError: - pass - else: - return - _run_reader(data, verbose) - _run_reader(codecs.open(error_filename, encoding=encoding), verbose) - -test_stream_error.unittest = ['.stream-error'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_recursive.py b/libs/PyYAML-3.10/tests/lib/test_recursive.py deleted file mode 100644 index 6707fd4..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_recursive.py +++ /dev/null @@ -1,50 +0,0 @@ - -import yaml - -class AnInstance: - - def __init__(self, foo, bar): - self.foo = foo - self.bar = bar - - def __repr__(self): - try: - return "%s(foo=%r, bar=%r)" % (self.__class__.__name__, - self.foo, self.bar) - except RuntimeError: - return "%s(foo=..., bar=...)" % self.__class__.__name__ - -class AnInstanceWithState(AnInstance): - - def __getstate__(self): - return {'attributes': [self.foo, self.bar]} - - def __setstate__(self, state): - self.foo, self.bar = state['attributes'] - -def test_recursive(recursive_filename, verbose=False): - exec open(recursive_filename, 'rb').read() - value1 = value - output1 = None - value2 = None - output2 = None - try: - output1 = yaml.dump(value1) - value2 = yaml.load(output1) - output2 = yaml.dump(value2) - assert output1 == output2, (output1, output2) - finally: - if verbose: - #print "VALUE1:", value1 - #print "VALUE2:", value2 - print "OUTPUT1:" - print output1 - print "OUTPUT2:" - print output2 - -test_recursive.unittest = ['.recursive'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_representer.py b/libs/PyYAML-3.10/tests/lib/test_representer.py deleted file mode 100644 index a82a32a..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_representer.py +++ /dev/null @@ -1,43 +0,0 @@ - -import yaml -import test_constructor -import pprint - -def test_representer_types(code_filename, verbose=False): - test_constructor._make_objects() - for allow_unicode in [False, True]: - for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']: - native1 = test_constructor._load_code(open(code_filename, 'rb').read()) - native2 = None - try: - output = yaml.dump(native1, Dumper=test_constructor.MyDumper, - allow_unicode=allow_unicode, encoding=encoding) - native2 = yaml.load(output, Loader=test_constructor.MyLoader) - try: - if native1 == native2: - continue - except TypeError: - pass - value1 = test_constructor._serialize_value(native1) - value2 = test_constructor._serialize_value(native2) - if verbose: - print "SERIALIZED NATIVE1:" - print value1 - print "SERIALIZED NATIVE2:" - print value2 - assert value1 == value2, (native1, native2) - finally: - if verbose: - print "NATIVE1:" - pprint.pprint(native1) - print "NATIVE2:" - pprint.pprint(native2) - print "OUTPUT:" - print output - -test_representer_types.unittest = ['.code'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_resolver.py b/libs/PyYAML-3.10/tests/lib/test_resolver.py deleted file mode 100644 index 5566750..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_resolver.py +++ /dev/null @@ -1,92 +0,0 @@ - -import yaml -import pprint - -def test_implicit_resolver(data_filename, detect_filename, verbose=False): - correct_tag = None - node = None - try: - correct_tag = open(detect_filename, 'rb').read().strip() - node = yaml.compose(open(data_filename, 'rb')) - assert isinstance(node, yaml.SequenceNode), node - for scalar in node.value: - assert isinstance(scalar, yaml.ScalarNode), scalar - assert scalar.tag == correct_tag, (scalar.tag, correct_tag) - finally: - if verbose: - print "CORRECT TAG:", correct_tag - if hasattr(node, 'value'): - print "CHILDREN:" - pprint.pprint(node.value) - -test_implicit_resolver.unittest = ['.data', '.detect'] - -def _make_path_loader_and_dumper(): - global MyLoader, MyDumper - - class MyLoader(yaml.Loader): - pass - class MyDumper(yaml.Dumper): - pass - - yaml.add_path_resolver(u'!root', [], - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver(u'!root/scalar', [], str, - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver(u'!root/key11/key12/*', ['key11', 'key12'], - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver(u'!root/key21/1/*', ['key21', 1], - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver(u'!root/key31/*/*/key14/map', ['key31', None, None, 'key14'], dict, - Loader=MyLoader, Dumper=MyDumper) - - return MyLoader, MyDumper - -def _convert_node(node): - if isinstance(node, yaml.ScalarNode): - return (node.tag, node.value) - elif isinstance(node, yaml.SequenceNode): - value = [] - for item in node.value: - value.append(_convert_node(item)) - return (node.tag, value) - elif isinstance(node, yaml.MappingNode): - value = [] - for key, item in node.value: - value.append((_convert_node(key), _convert_node(item))) - return (node.tag, value) - -def test_path_resolver_loader(data_filename, path_filename, verbose=False): - _make_path_loader_and_dumper() - nodes1 = list(yaml.compose_all(open(data_filename, 'rb').read(), Loader=MyLoader)) - nodes2 = list(yaml.compose_all(open(path_filename, 'rb').read())) - try: - for node1, node2 in zip(nodes1, nodes2): - data1 = _convert_node(node1) - data2 = _convert_node(node2) - assert data1 == data2, (data1, data2) - finally: - if verbose: - print yaml.serialize_all(nodes1) - -test_path_resolver_loader.unittest = ['.data', '.path'] - -def test_path_resolver_dumper(data_filename, path_filename, verbose=False): - _make_path_loader_and_dumper() - for filename in [data_filename, path_filename]: - output = yaml.serialize_all(yaml.compose_all(open(filename, 'rb')), Dumper=MyDumper) - if verbose: - print output - nodes1 = yaml.compose_all(output) - nodes2 = yaml.compose_all(open(data_filename, 'rb')) - for node1, node2 in zip(nodes1, nodes2): - data1 = _convert_node(node1) - data2 = _convert_node(node2) - assert data1 == data2, (data1, data2) - -test_path_resolver_dumper.unittest = ['.data', '.path'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_structure.py b/libs/PyYAML-3.10/tests/lib/test_structure.py deleted file mode 100644 index 61bcb80..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_structure.py +++ /dev/null @@ -1,187 +0,0 @@ - -import yaml, canonical -import pprint - -def _convert_structure(loader): - if loader.check_event(yaml.ScalarEvent): - event = loader.get_event() - if event.tag or event.anchor or event.value: - return True - else: - return None - elif loader.check_event(yaml.SequenceStartEvent): - loader.get_event() - sequence = [] - while not loader.check_event(yaml.SequenceEndEvent): - sequence.append(_convert_structure(loader)) - loader.get_event() - return sequence - elif loader.check_event(yaml.MappingStartEvent): - loader.get_event() - mapping = [] - while not loader.check_event(yaml.MappingEndEvent): - key = _convert_structure(loader) - value = _convert_structure(loader) - mapping.append((key, value)) - loader.get_event() - return mapping - elif loader.check_event(yaml.AliasEvent): - loader.get_event() - return '*' - else: - loader.get_event() - return '?' - -def test_structure(data_filename, structure_filename, verbose=False): - nodes1 = [] - nodes2 = eval(open(structure_filename, 'rb').read()) - try: - loader = yaml.Loader(open(data_filename, 'rb')) - while loader.check_event(): - if loader.check_event(yaml.StreamStartEvent, yaml.StreamEndEvent, - yaml.DocumentStartEvent, yaml.DocumentEndEvent): - loader.get_event() - continue - nodes1.append(_convert_structure(loader)) - if len(nodes1) == 1: - nodes1 = nodes1[0] - assert nodes1 == nodes2, (nodes1, nodes2) - finally: - if verbose: - print "NODES1:" - pprint.pprint(nodes1) - print "NODES2:" - pprint.pprint(nodes2) - -test_structure.unittest = ['.data', '.structure'] - -def _compare_events(events1, events2, full=False): - assert len(events1) == len(events2), (len(events1), len(events2)) - for event1, event2 in zip(events1, events2): - assert event1.__class__ == event2.__class__, (event1, event2) - if isinstance(event1, yaml.AliasEvent) and full: - assert event1.anchor == event2.anchor, (event1, event2) - if isinstance(event1, (yaml.ScalarEvent, yaml.CollectionStartEvent)): - if (event1.tag not in [None, u'!'] and event2.tag not in [None, u'!']) or full: - assert event1.tag == event2.tag, (event1, event2) - if isinstance(event1, yaml.ScalarEvent): - assert event1.value == event2.value, (event1, event2) - -def test_parser(data_filename, canonical_filename, verbose=False): - events1 = None - events2 = None - try: - events1 = list(yaml.parse(open(data_filename, 'rb'))) - events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) - _compare_events(events1, events2) - finally: - if verbose: - print "EVENTS1:" - pprint.pprint(events1) - print "EVENTS2:" - pprint.pprint(events2) - -test_parser.unittest = ['.data', '.canonical'] - -def test_parser_on_canonical(canonical_filename, verbose=False): - events1 = None - events2 = None - try: - events1 = list(yaml.parse(open(canonical_filename, 'rb'))) - events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) - _compare_events(events1, events2, full=True) - finally: - if verbose: - print "EVENTS1:" - pprint.pprint(events1) - print "EVENTS2:" - pprint.pprint(events2) - -test_parser_on_canonical.unittest = ['.canonical'] - -def _compare_nodes(node1, node2): - assert node1.__class__ == node2.__class__, (node1, node2) - assert node1.tag == node2.tag, (node1, node2) - if isinstance(node1, yaml.ScalarNode): - assert node1.value == node2.value, (node1, node2) - else: - assert len(node1.value) == len(node2.value), (node1, node2) - for item1, item2 in zip(node1.value, node2.value): - if not isinstance(item1, tuple): - item1 = (item1,) - item2 = (item2,) - for subnode1, subnode2 in zip(item1, item2): - _compare_nodes(subnode1, subnode2) - -def test_composer(data_filename, canonical_filename, verbose=False): - nodes1 = None - nodes2 = None - try: - nodes1 = list(yaml.compose_all(open(data_filename, 'rb'))) - nodes2 = list(yaml.canonical_compose_all(open(canonical_filename, 'rb'))) - assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2)) - for node1, node2 in zip(nodes1, nodes2): - _compare_nodes(node1, node2) - finally: - if verbose: - print "NODES1:" - pprint.pprint(nodes1) - print "NODES2:" - pprint.pprint(nodes2) - -test_composer.unittest = ['.data', '.canonical'] - -def _make_loader(): - global MyLoader - - class MyLoader(yaml.Loader): - def construct_sequence(self, node): - return tuple(yaml.Loader.construct_sequence(self, node)) - def construct_mapping(self, node): - pairs = self.construct_pairs(node) - pairs.sort() - return pairs - def construct_undefined(self, node): - return self.construct_scalar(node) - - MyLoader.add_constructor(u'tag:yaml.org,2002:map', MyLoader.construct_mapping) - MyLoader.add_constructor(None, MyLoader.construct_undefined) - -def _make_canonical_loader(): - global MyCanonicalLoader - - class MyCanonicalLoader(yaml.CanonicalLoader): - def construct_sequence(self, node): - return tuple(yaml.CanonicalLoader.construct_sequence(self, node)) - def construct_mapping(self, node): - pairs = self.construct_pairs(node) - pairs.sort() - return pairs - def construct_undefined(self, node): - return self.construct_scalar(node) - - MyCanonicalLoader.add_constructor(u'tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping) - MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined) - -def test_constructor(data_filename, canonical_filename, verbose=False): - _make_loader() - _make_canonical_loader() - native1 = None - native2 = None - try: - native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader)) - native2 = list(yaml.load_all(open(canonical_filename, 'rb'), Loader=MyCanonicalLoader)) - assert native1 == native2, (native1, native2) - finally: - if verbose: - print "NATIVE1:" - pprint.pprint(native1) - print "NATIVE2:" - pprint.pprint(native2) - -test_constructor.unittest = ['.data', '.canonical'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_tokens.py b/libs/PyYAML-3.10/tests/lib/test_tokens.py deleted file mode 100644 index 9613fa0..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_tokens.py +++ /dev/null @@ -1,77 +0,0 @@ - -import yaml -import pprint - -# Tokens mnemonic: -# directive: % -# document_start: --- -# document_end: ... -# alias: * -# anchor: & -# tag: ! -# scalar _ -# block_sequence_start: [[ -# block_mapping_start: {{ -# block_end: ]} -# flow_sequence_start: [ -# flow_sequence_end: ] -# flow_mapping_start: { -# flow_mapping_end: } -# entry: , -# key: ? -# value: : - -_replaces = { - yaml.DirectiveToken: '%', - yaml.DocumentStartToken: '---', - yaml.DocumentEndToken: '...', - yaml.AliasToken: '*', - yaml.AnchorToken: '&', - yaml.TagToken: '!', - yaml.ScalarToken: '_', - yaml.BlockSequenceStartToken: '[[', - yaml.BlockMappingStartToken: '{{', - yaml.BlockEndToken: ']}', - yaml.FlowSequenceStartToken: '[', - yaml.FlowSequenceEndToken: ']', - yaml.FlowMappingStartToken: '{', - yaml.FlowMappingEndToken: '}', - yaml.BlockEntryToken: ',', - yaml.FlowEntryToken: ',', - yaml.KeyToken: '?', - yaml.ValueToken: ':', -} - -def test_tokens(data_filename, tokens_filename, verbose=False): - tokens1 = [] - tokens2 = open(tokens_filename, 'rb').read().split() - try: - for token in yaml.scan(open(data_filename, 'rb')): - if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)): - tokens1.append(_replaces[token.__class__]) - finally: - if verbose: - print "TOKENS1:", ' '.join(tokens1) - print "TOKENS2:", ' '.join(tokens2) - assert len(tokens1) == len(tokens2), (tokens1, tokens2) - for token1, token2 in zip(tokens1, tokens2): - assert token1 == token2, (token1, token2) - -test_tokens.unittest = ['.data', '.tokens'] - -def test_scanner(data_filename, canonical_filename, verbose=False): - for filename in [data_filename, canonical_filename]: - tokens = [] - try: - for token in yaml.scan(open(filename, 'rb')): - tokens.append(token.__class__.__name__) - finally: - if verbose: - pprint.pprint(tokens) - -test_scanner.unittest = ['.data', '.canonical'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_yaml.py b/libs/PyYAML-3.10/tests/lib/test_yaml.py deleted file mode 100644 index 0927368..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_yaml.py +++ /dev/null @@ -1,18 +0,0 @@ - -from test_mark import * -from test_reader import * -from test_canonical import * -from test_tokens import * -from test_structure import * -from test_errors import * -from test_resolver import * -from test_constructor import * -from test_emitter import * -from test_representer import * -from test_recursive import * -from test_input_output import * - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib/test_yaml_ext.py b/libs/PyYAML-3.10/tests/lib/test_yaml_ext.py deleted file mode 100644 index bdfda3e..0000000 --- a/libs/PyYAML-3.10/tests/lib/test_yaml_ext.py +++ /dev/null @@ -1,277 +0,0 @@ - -import _yaml, yaml -import types, pprint - -yaml.PyBaseLoader = yaml.BaseLoader -yaml.PySafeLoader = yaml.SafeLoader -yaml.PyLoader = yaml.Loader -yaml.PyBaseDumper = yaml.BaseDumper -yaml.PySafeDumper = yaml.SafeDumper -yaml.PyDumper = yaml.Dumper - -old_scan = yaml.scan -def new_scan(stream, Loader=yaml.CLoader): - return old_scan(stream, Loader) - -old_parse = yaml.parse -def new_parse(stream, Loader=yaml.CLoader): - return old_parse(stream, Loader) - -old_compose = yaml.compose -def new_compose(stream, Loader=yaml.CLoader): - return old_compose(stream, Loader) - -old_compose_all = yaml.compose_all -def new_compose_all(stream, Loader=yaml.CLoader): - return old_compose_all(stream, Loader) - -old_load = yaml.load -def new_load(stream, Loader=yaml.CLoader): - return old_load(stream, Loader) - -old_load_all = yaml.load_all -def new_load_all(stream, Loader=yaml.CLoader): - return old_load_all(stream, Loader) - -old_safe_load = yaml.safe_load -def new_safe_load(stream): - return old_load(stream, yaml.CSafeLoader) - -old_safe_load_all = yaml.safe_load_all -def new_safe_load_all(stream): - return old_load_all(stream, yaml.CSafeLoader) - -old_emit = yaml.emit -def new_emit(events, stream=None, Dumper=yaml.CDumper, **kwds): - return old_emit(events, stream, Dumper, **kwds) - -old_serialize = yaml.serialize -def new_serialize(node, stream, Dumper=yaml.CDumper, **kwds): - return old_serialize(node, stream, Dumper, **kwds) - -old_serialize_all = yaml.serialize_all -def new_serialize_all(nodes, stream=None, Dumper=yaml.CDumper, **kwds): - return old_serialize_all(nodes, stream, Dumper, **kwds) - -old_dump = yaml.dump -def new_dump(data, stream=None, Dumper=yaml.CDumper, **kwds): - return old_dump(data, stream, Dumper, **kwds) - -old_dump_all = yaml.dump_all -def new_dump_all(documents, stream=None, Dumper=yaml.CDumper, **kwds): - return old_dump_all(documents, stream, Dumper, **kwds) - -old_safe_dump = yaml.safe_dump -def new_safe_dump(data, stream=None, **kwds): - return old_dump(data, stream, yaml.CSafeDumper, **kwds) - -old_safe_dump_all = yaml.safe_dump_all -def new_safe_dump_all(documents, stream=None, **kwds): - return old_dump_all(documents, stream, yaml.CSafeDumper, **kwds) - -def _set_up(): - yaml.BaseLoader = yaml.CBaseLoader - yaml.SafeLoader = yaml.CSafeLoader - yaml.Loader = yaml.CLoader - yaml.BaseDumper = yaml.CBaseDumper - yaml.SafeDumper = yaml.CSafeDumper - yaml.Dumper = yaml.CDumper - yaml.scan = new_scan - yaml.parse = new_parse - yaml.compose = new_compose - yaml.compose_all = new_compose_all - yaml.load = new_load - yaml.load_all = new_load_all - yaml.safe_load = new_safe_load - yaml.safe_load_all = new_safe_load_all - yaml.emit = new_emit - yaml.serialize = new_serialize - yaml.serialize_all = new_serialize_all - yaml.dump = new_dump - yaml.dump_all = new_dump_all - yaml.safe_dump = new_safe_dump - yaml.safe_dump_all = new_safe_dump_all - -def _tear_down(): - yaml.BaseLoader = yaml.PyBaseLoader - yaml.SafeLoader = yaml.PySafeLoader - yaml.Loader = yaml.PyLoader - yaml.BaseDumper = yaml.PyBaseDumper - yaml.SafeDumper = yaml.PySafeDumper - yaml.Dumper = yaml.PyDumper - yaml.scan = old_scan - yaml.parse = old_parse - yaml.compose = old_compose - yaml.compose_all = old_compose_all - yaml.load = old_load - yaml.load_all = old_load_all - yaml.safe_load = old_safe_load - yaml.safe_load_all = old_safe_load_all - yaml.emit = old_emit - yaml.serialize = old_serialize - yaml.serialize_all = old_serialize_all - yaml.dump = old_dump - yaml.dump_all = old_dump_all - yaml.safe_dump = old_safe_dump - yaml.safe_dump_all = old_safe_dump_all - -def test_c_version(verbose=False): - if verbose: - print _yaml.get_version() - print _yaml.get_version_string() - assert ("%s.%s.%s" % _yaml.get_version()) == _yaml.get_version_string(), \ - (_yaml.get_version(), _yaml.get_version_string()) - -def _compare_scanners(py_data, c_data, verbose): - py_tokens = list(yaml.scan(py_data, Loader=yaml.PyLoader)) - c_tokens = [] - try: - for token in yaml.scan(c_data, Loader=yaml.CLoader): - c_tokens.append(token) - assert len(py_tokens) == len(c_tokens), (len(py_tokens), len(c_tokens)) - for py_token, c_token in zip(py_tokens, c_tokens): - assert py_token.__class__ == c_token.__class__, (py_token, c_token) - if hasattr(py_token, 'value'): - assert py_token.value == c_token.value, (py_token, c_token) - if isinstance(py_token, yaml.StreamEndToken): - continue - py_start = (py_token.start_mark.index, py_token.start_mark.line, py_token.start_mark.column) - py_end = (py_token.end_mark.index, py_token.end_mark.line, py_token.end_mark.column) - c_start = (c_token.start_mark.index, c_token.start_mark.line, c_token.start_mark.column) - c_end = (c_token.end_mark.index, c_token.end_mark.line, c_token.end_mark.column) - assert py_start == c_start, (py_start, c_start) - assert py_end == c_end, (py_end, c_end) - finally: - if verbose: - print "PY_TOKENS:" - pprint.pprint(py_tokens) - print "C_TOKENS:" - pprint.pprint(c_tokens) - -def test_c_scanner(data_filename, canonical_filename, verbose=False): - _compare_scanners(open(data_filename, 'rb'), - open(data_filename, 'rb'), verbose) - _compare_scanners(open(data_filename, 'rb').read(), - open(data_filename, 'rb').read(), verbose) - _compare_scanners(open(canonical_filename, 'rb'), - open(canonical_filename, 'rb'), verbose) - _compare_scanners(open(canonical_filename, 'rb').read(), - open(canonical_filename, 'rb').read(), verbose) - -test_c_scanner.unittest = ['.data', '.canonical'] -test_c_scanner.skip = ['.skip-ext'] - -def _compare_parsers(py_data, c_data, verbose): - py_events = list(yaml.parse(py_data, Loader=yaml.PyLoader)) - c_events = [] - try: - for event in yaml.parse(c_data, Loader=yaml.CLoader): - c_events.append(event) - assert len(py_events) == len(c_events), (len(py_events), len(c_events)) - for py_event, c_event in zip(py_events, c_events): - for attribute in ['__class__', 'anchor', 'tag', 'implicit', - 'value', 'explicit', 'version', 'tags']: - py_value = getattr(py_event, attribute, None) - c_value = getattr(c_event, attribute, None) - assert py_value == c_value, (py_event, c_event, attribute) - finally: - if verbose: - print "PY_EVENTS:" - pprint.pprint(py_events) - print "C_EVENTS:" - pprint.pprint(c_events) - -def test_c_parser(data_filename, canonical_filename, verbose=False): - _compare_parsers(open(data_filename, 'rb'), - open(data_filename, 'rb'), verbose) - _compare_parsers(open(data_filename, 'rb').read(), - open(data_filename, 'rb').read(), verbose) - _compare_parsers(open(canonical_filename, 'rb'), - open(canonical_filename, 'rb'), verbose) - _compare_parsers(open(canonical_filename, 'rb').read(), - open(canonical_filename, 'rb').read(), verbose) - -test_c_parser.unittest = ['.data', '.canonical'] -test_c_parser.skip = ['.skip-ext'] - -def _compare_emitters(data, verbose): - events = list(yaml.parse(data, Loader=yaml.PyLoader)) - c_data = yaml.emit(events, Dumper=yaml.CDumper) - if verbose: - print c_data - py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader)) - c_events = list(yaml.parse(c_data, Loader=yaml.CLoader)) - try: - assert len(events) == len(py_events), (len(events), len(py_events)) - assert len(events) == len(c_events), (len(events), len(c_events)) - for event, py_event, c_event in zip(events, py_events, c_events): - for attribute in ['__class__', 'anchor', 'tag', 'implicit', - 'value', 'explicit', 'version', 'tags']: - value = getattr(event, attribute, None) - py_value = getattr(py_event, attribute, None) - c_value = getattr(c_event, attribute, None) - if attribute == 'tag' and value in [None, u'!'] \ - and py_value in [None, u'!'] and c_value in [None, u'!']: - continue - if attribute == 'explicit' and (py_value or c_value): - continue - assert value == py_value, (event, py_event, attribute) - assert value == c_value, (event, c_event, attribute) - finally: - if verbose: - print "EVENTS:" - pprint.pprint(events) - print "PY_EVENTS:" - pprint.pprint(py_events) - print "C_EVENTS:" - pprint.pprint(c_events) - -def test_c_emitter(data_filename, canonical_filename, verbose=False): - _compare_emitters(open(data_filename, 'rb').read(), verbose) - _compare_emitters(open(canonical_filename, 'rb').read(), verbose) - -test_c_emitter.unittest = ['.data', '.canonical'] -test_c_emitter.skip = ['.skip-ext'] - -def wrap_ext_function(function): - def wrapper(*args, **kwds): - _set_up() - try: - function(*args, **kwds) - finally: - _tear_down() - try: - wrapper.func_name = '%s_ext' % function.func_name - except TypeError: - pass - wrapper.unittest_name = '%s_ext' % function.func_name - wrapper.unittest = function.unittest - wrapper.skip = getattr(function, 'skip', [])+['.skip-ext'] - return wrapper - -def wrap_ext(collections): - functions = [] - if not isinstance(collections, list): - collections = [collections] - for collection in collections: - if not isinstance(collection, dict): - collection = vars(collection) - keys = collection.keys() - keys.sort() - for key in keys: - value = collection[key] - if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'): - functions.append(wrap_ext_function(value)) - for function in functions: - assert function.unittest_name not in globals() - globals()[function.unittest_name] = function - -import test_tokens, test_structure, test_errors, test_resolver, test_constructor, \ - test_emitter, test_representer, test_recursive, test_input_output -wrap_ext([test_tokens, test_structure, test_errors, test_resolver, test_constructor, - test_emitter, test_representer, test_recursive, test_input_output]) - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/canonical.py b/libs/PyYAML-3.10/tests/lib3/canonical.py deleted file mode 100644 index a8b4e3a..0000000 --- a/libs/PyYAML-3.10/tests/lib3/canonical.py +++ /dev/null @@ -1,361 +0,0 @@ - -import yaml, yaml.composer, yaml.constructor, yaml.resolver - -class CanonicalError(yaml.YAMLError): - pass - -class CanonicalScanner: - - def __init__(self, data): - if isinstance(data, bytes): - try: - data = data.decode('utf-8') - except UnicodeDecodeError: - raise CanonicalError("utf-8 stream is expected") - self.data = data+'\0' - self.index = 0 - self.tokens = [] - self.scanned = False - - def check_token(self, *choices): - if not self.scanned: - self.scan() - if self.tokens: - if not choices: - return True - for choice in choices: - if isinstance(self.tokens[0], choice): - return True - return False - - def peek_token(self): - if not self.scanned: - self.scan() - if self.tokens: - return self.tokens[0] - - def get_token(self, choice=None): - if not self.scanned: - self.scan() - token = self.tokens.pop(0) - if choice and not isinstance(token, choice): - raise CanonicalError("unexpected token "+repr(token)) - return token - - def get_token_value(self): - token = self.get_token() - return token.value - - def scan(self): - self.tokens.append(yaml.StreamStartToken(None, None)) - while True: - self.find_token() - ch = self.data[self.index] - if ch == '\0': - self.tokens.append(yaml.StreamEndToken(None, None)) - break - elif ch == '%': - self.tokens.append(self.scan_directive()) - elif ch == '-' and self.data[self.index:self.index+3] == '---': - self.index += 3 - self.tokens.append(yaml.DocumentStartToken(None, None)) - elif ch == '[': - self.index += 1 - self.tokens.append(yaml.FlowSequenceStartToken(None, None)) - elif ch == '{': - self.index += 1 - self.tokens.append(yaml.FlowMappingStartToken(None, None)) - elif ch == ']': - self.index += 1 - self.tokens.append(yaml.FlowSequenceEndToken(None, None)) - elif ch == '}': - self.index += 1 - self.tokens.append(yaml.FlowMappingEndToken(None, None)) - elif ch == '?': - self.index += 1 - self.tokens.append(yaml.KeyToken(None, None)) - elif ch == ':': - self.index += 1 - self.tokens.append(yaml.ValueToken(None, None)) - elif ch == ',': - self.index += 1 - self.tokens.append(yaml.FlowEntryToken(None, None)) - elif ch == '*' or ch == '&': - self.tokens.append(self.scan_alias()) - elif ch == '!': - self.tokens.append(self.scan_tag()) - elif ch == '"': - self.tokens.append(self.scan_scalar()) - else: - raise CanonicalError("invalid token") - self.scanned = True - - DIRECTIVE = '%YAML 1.1' - - def scan_directive(self): - if self.data[self.index:self.index+len(self.DIRECTIVE)] == self.DIRECTIVE and \ - self.data[self.index+len(self.DIRECTIVE)] in ' \n\0': - self.index += len(self.DIRECTIVE) - return yaml.DirectiveToken('YAML', (1, 1), None, None) - else: - raise CanonicalError("invalid directive") - - def scan_alias(self): - if self.data[self.index] == '*': - TokenClass = yaml.AliasToken - else: - TokenClass = yaml.AnchorToken - self.index += 1 - start = self.index - while self.data[self.index] not in ', \n\0': - self.index += 1 - value = self.data[start:self.index] - return TokenClass(value, None, None) - - def scan_tag(self): - self.index += 1 - start = self.index - while self.data[self.index] not in ' \n\0': - self.index += 1 - value = self.data[start:self.index] - if not value: - value = '!' - elif value[0] == '!': - value = 'tag:yaml.org,2002:'+value[1:] - elif value[0] == '<' and value[-1] == '>': - value = value[1:-1] - else: - value = '!'+value - return yaml.TagToken(value, None, None) - - QUOTE_CODES = { - 'x': 2, - 'u': 4, - 'U': 8, - } - - QUOTE_REPLACES = { - '\\': '\\', - '\"': '\"', - ' ': ' ', - 'a': '\x07', - 'b': '\x08', - 'e': '\x1B', - 'f': '\x0C', - 'n': '\x0A', - 'r': '\x0D', - 't': '\x09', - 'v': '\x0B', - 'N': '\u0085', - 'L': '\u2028', - 'P': '\u2029', - '_': '_', - '0': '\x00', - } - - def scan_scalar(self): - self.index += 1 - chunks = [] - start = self.index - ignore_spaces = False - while self.data[self.index] != '"': - if self.data[self.index] == '\\': - ignore_spaces = False - chunks.append(self.data[start:self.index]) - self.index += 1 - ch = self.data[self.index] - self.index += 1 - if ch == '\n': - ignore_spaces = True - elif ch in self.QUOTE_CODES: - length = self.QUOTE_CODES[ch] - code = int(self.data[self.index:self.index+length], 16) - chunks.append(chr(code)) - self.index += length - else: - if ch not in self.QUOTE_REPLACES: - raise CanonicalError("invalid escape code") - chunks.append(self.QUOTE_REPLACES[ch]) - start = self.index - elif self.data[self.index] == '\n': - chunks.append(self.data[start:self.index]) - chunks.append(' ') - self.index += 1 - start = self.index - ignore_spaces = True - elif ignore_spaces and self.data[self.index] == ' ': - self.index += 1 - start = self.index - else: - ignore_spaces = False - self.index += 1 - chunks.append(self.data[start:self.index]) - self.index += 1 - return yaml.ScalarToken(''.join(chunks), False, None, None) - - def find_token(self): - found = False - while not found: - while self.data[self.index] in ' \t': - self.index += 1 - if self.data[self.index] == '#': - while self.data[self.index] != '\n': - self.index += 1 - if self.data[self.index] == '\n': - self.index += 1 - else: - found = True - -class CanonicalParser: - - def __init__(self): - self.events = [] - self.parsed = False - - def dispose(self): - pass - - # stream: STREAM-START document* STREAM-END - def parse_stream(self): - self.get_token(yaml.StreamStartToken) - self.events.append(yaml.StreamStartEvent(None, None)) - while not self.check_token(yaml.StreamEndToken): - if self.check_token(yaml.DirectiveToken, yaml.DocumentStartToken): - self.parse_document() - else: - raise CanonicalError("document is expected, got "+repr(self.tokens[0])) - self.get_token(yaml.StreamEndToken) - self.events.append(yaml.StreamEndEvent(None, None)) - - # document: DIRECTIVE? DOCUMENT-START node - def parse_document(self): - node = None - if self.check_token(yaml.DirectiveToken): - self.get_token(yaml.DirectiveToken) - self.get_token(yaml.DocumentStartToken) - self.events.append(yaml.DocumentStartEvent(None, None)) - self.parse_node() - self.events.append(yaml.DocumentEndEvent(None, None)) - - # node: ALIAS | ANCHOR? TAG? (SCALAR|sequence|mapping) - def parse_node(self): - if self.check_token(yaml.AliasToken): - self.events.append(yaml.AliasEvent(self.get_token_value(), None, None)) - else: - anchor = None - if self.check_token(yaml.AnchorToken): - anchor = self.get_token_value() - tag = None - if self.check_token(yaml.TagToken): - tag = self.get_token_value() - if self.check_token(yaml.ScalarToken): - self.events.append(yaml.ScalarEvent(anchor, tag, (False, False), self.get_token_value(), None, None)) - elif self.check_token(yaml.FlowSequenceStartToken): - self.events.append(yaml.SequenceStartEvent(anchor, tag, None, None)) - self.parse_sequence() - elif self.check_token(yaml.FlowMappingStartToken): - self.events.append(yaml.MappingStartEvent(anchor, tag, None, None)) - self.parse_mapping() - else: - raise CanonicalError("SCALAR, '[', or '{' is expected, got "+repr(self.tokens[0])) - - # sequence: SEQUENCE-START (node (ENTRY node)*)? ENTRY? SEQUENCE-END - def parse_sequence(self): - self.get_token(yaml.FlowSequenceStartToken) - if not self.check_token(yaml.FlowSequenceEndToken): - self.parse_node() - while not self.check_token(yaml.FlowSequenceEndToken): - self.get_token(yaml.FlowEntryToken) - if not self.check_token(yaml.FlowSequenceEndToken): - self.parse_node() - self.get_token(yaml.FlowSequenceEndToken) - self.events.append(yaml.SequenceEndEvent(None, None)) - - # mapping: MAPPING-START (map_entry (ENTRY map_entry)*)? ENTRY? MAPPING-END - def parse_mapping(self): - self.get_token(yaml.FlowMappingStartToken) - if not self.check_token(yaml.FlowMappingEndToken): - self.parse_map_entry() - while not self.check_token(yaml.FlowMappingEndToken): - self.get_token(yaml.FlowEntryToken) - if not self.check_token(yaml.FlowMappingEndToken): - self.parse_map_entry() - self.get_token(yaml.FlowMappingEndToken) - self.events.append(yaml.MappingEndEvent(None, None)) - - # map_entry: KEY node VALUE node - def parse_map_entry(self): - self.get_token(yaml.KeyToken) - self.parse_node() - self.get_token(yaml.ValueToken) - self.parse_node() - - def parse(self): - self.parse_stream() - self.parsed = True - - def get_event(self): - if not self.parsed: - self.parse() - return self.events.pop(0) - - def check_event(self, *choices): - if not self.parsed: - self.parse() - if self.events: - if not choices: - return True - for choice in choices: - if isinstance(self.events[0], choice): - return True - return False - - def peek_event(self): - if not self.parsed: - self.parse() - return self.events[0] - -class CanonicalLoader(CanonicalScanner, CanonicalParser, - yaml.composer.Composer, yaml.constructor.Constructor, yaml.resolver.Resolver): - - def __init__(self, stream): - if hasattr(stream, 'read'): - stream = stream.read() - CanonicalScanner.__init__(self, stream) - CanonicalParser.__init__(self) - yaml.composer.Composer.__init__(self) - yaml.constructor.Constructor.__init__(self) - yaml.resolver.Resolver.__init__(self) - -yaml.CanonicalLoader = CanonicalLoader - -def canonical_scan(stream): - return yaml.scan(stream, Loader=CanonicalLoader) - -yaml.canonical_scan = canonical_scan - -def canonical_parse(stream): - return yaml.parse(stream, Loader=CanonicalLoader) - -yaml.canonical_parse = canonical_parse - -def canonical_compose(stream): - return yaml.compose(stream, Loader=CanonicalLoader) - -yaml.canonical_compose = canonical_compose - -def canonical_compose_all(stream): - return yaml.compose_all(stream, Loader=CanonicalLoader) - -yaml.canonical_compose_all = canonical_compose_all - -def canonical_load(stream): - return yaml.load(stream, Loader=CanonicalLoader) - -yaml.canonical_load = canonical_load - -def canonical_load_all(stream): - return yaml.load_all(stream, Loader=CanonicalLoader) - -yaml.canonical_load_all = canonical_load_all - diff --git a/libs/PyYAML-3.10/tests/lib3/test_all.py b/libs/PyYAML-3.10/tests/lib3/test_all.py deleted file mode 100644 index fec4ae4..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_all.py +++ /dev/null @@ -1,15 +0,0 @@ - -import sys, yaml, test_appliance - -def main(args=None): - collections = [] - import test_yaml - collections.append(test_yaml) - if yaml.__with_libyaml__: - import test_yaml_ext - collections.append(test_yaml_ext) - test_appliance.run(collections, args) - -if __name__ == '__main__': - main() - diff --git a/libs/PyYAML-3.10/tests/lib3/test_appliance.py b/libs/PyYAML-3.10/tests/lib3/test_appliance.py deleted file mode 100644 index 81ff00b..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_appliance.py +++ /dev/null @@ -1,145 +0,0 @@ - -import sys, os, os.path, types, traceback, pprint - -DATA = 'tests/data' - -def find_test_functions(collections): - if not isinstance(collections, list): - collections = [collections] - functions = [] - for collection in collections: - if not isinstance(collection, dict): - collection = vars(collection) - for key in sorted(collection): - value = collection[key] - if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'): - functions.append(value) - return functions - -def find_test_filenames(directory): - filenames = {} - for filename in os.listdir(directory): - if os.path.isfile(os.path.join(directory, filename)): - base, ext = os.path.splitext(filename) - if base.endswith('-py2'): - continue - filenames.setdefault(base, []).append(ext) - filenames = sorted(filenames.items()) - return filenames - -def parse_arguments(args): - if args is None: - args = sys.argv[1:] - verbose = False - if '-v' in args: - verbose = True - args.remove('-v') - if '--verbose' in args: - verbose = True - if 'YAML_TEST_VERBOSE' in os.environ: - verbose = True - include_functions = [] - if args: - include_functions.append(args.pop(0)) - if 'YAML_TEST_FUNCTIONS' in os.environ: - include_functions.extend(os.environ['YAML_TEST_FUNCTIONS'].split()) - include_filenames = [] - include_filenames.extend(args) - if 'YAML_TEST_FILENAMES' in os.environ: - include_filenames.extend(os.environ['YAML_TEST_FILENAMES'].split()) - return include_functions, include_filenames, verbose - -def execute(function, filenames, verbose): - name = function.__name__ - if verbose: - sys.stdout.write('='*75+'\n') - sys.stdout.write('%s(%s)...\n' % (name, ', '.join(filenames))) - try: - function(verbose=verbose, *filenames) - except Exception as exc: - info = sys.exc_info() - if isinstance(exc, AssertionError): - kind = 'FAILURE' - else: - kind = 'ERROR' - if verbose: - traceback.print_exc(limit=1, file=sys.stdout) - else: - sys.stdout.write(kind[0]) - sys.stdout.flush() - else: - kind = 'SUCCESS' - info = None - if not verbose: - sys.stdout.write('.') - sys.stdout.flush() - return (name, filenames, kind, info) - -def display(results, verbose): - if results and not verbose: - sys.stdout.write('\n') - total = len(results) - failures = 0 - errors = 0 - for name, filenames, kind, info in results: - if kind == 'SUCCESS': - continue - if kind == 'FAILURE': - failures += 1 - if kind == 'ERROR': - errors += 1 - sys.stdout.write('='*75+'\n') - sys.stdout.write('%s(%s): %s\n' % (name, ', '.join(filenames), kind)) - if kind == 'ERROR': - traceback.print_exception(file=sys.stdout, *info) - else: - sys.stdout.write('Traceback (most recent call last):\n') - traceback.print_tb(info[2], file=sys.stdout) - sys.stdout.write('%s: see below\n' % info[0].__name__) - sys.stdout.write('~'*75+'\n') - for arg in info[1].args: - pprint.pprint(arg, stream=sys.stdout) - for filename in filenames: - sys.stdout.write('-'*75+'\n') - sys.stdout.write('%s:\n' % filename) - data = open(filename, 'r', errors='replace').read() - sys.stdout.write(data) - if data and data[-1] != '\n': - sys.stdout.write('\n') - sys.stdout.write('='*75+'\n') - sys.stdout.write('TESTS: %s\n' % total) - if failures: - sys.stdout.write('FAILURES: %s\n' % failures) - if errors: - sys.stdout.write('ERRORS: %s\n' % errors) - -def run(collections, args=None): - test_functions = find_test_functions(collections) - test_filenames = find_test_filenames(DATA) - include_functions, include_filenames, verbose = parse_arguments(args) - results = [] - for function in test_functions: - if include_functions and function.__name__ not in include_functions: - continue - if function.unittest: - for base, exts in test_filenames: - if include_filenames and base not in include_filenames: - continue - filenames = [] - for ext in function.unittest: - if ext not in exts: - break - filenames.append(os.path.join(DATA, base+ext)) - else: - skip_exts = getattr(function, 'skip', []) - for skip_ext in skip_exts: - if skip_ext in exts: - break - else: - result = execute(function, filenames, verbose) - results.append(result) - else: - result = execute(function, [], verbose) - results.append(result) - display(results, verbose=verbose) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_build.py b/libs/PyYAML-3.10/tests/lib3/test_build.py deleted file mode 100644 index 901e8ed..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_build.py +++ /dev/null @@ -1,10 +0,0 @@ - -if __name__ == '__main__': - import sys, os, distutils.util - build_lib = 'build/lib' - build_lib_ext = os.path.join('build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3])) - sys.path.insert(0, build_lib) - sys.path.insert(0, build_lib_ext) - import test_yaml, test_appliance - test_appliance.run(test_yaml) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_build_ext.py b/libs/PyYAML-3.10/tests/lib3/test_build_ext.py deleted file mode 100644 index ff195d5..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_build_ext.py +++ /dev/null @@ -1,11 +0,0 @@ - - -if __name__ == '__main__': - import sys, os, distutils.util - build_lib = 'build/lib' - build_lib_ext = os.path.join('build', 'lib.%s-%s' % (distutils.util.get_platform(), sys.version[0:3])) - sys.path.insert(0, build_lib) - sys.path.insert(0, build_lib_ext) - import test_yaml_ext, test_appliance - test_appliance.run(test_yaml_ext) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_canonical.py b/libs/PyYAML-3.10/tests/lib3/test_canonical.py deleted file mode 100644 index a3b1153..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_canonical.py +++ /dev/null @@ -1,40 +0,0 @@ - -import yaml, canonical - -def test_canonical_scanner(canonical_filename, verbose=False): - data = open(canonical_filename, 'rb').read() - tokens = list(yaml.canonical_scan(data)) - assert tokens, tokens - if verbose: - for token in tokens: - print(token) - -test_canonical_scanner.unittest = ['.canonical'] - -def test_canonical_parser(canonical_filename, verbose=False): - data = open(canonical_filename, 'rb').read() - events = list(yaml.canonical_parse(data)) - assert events, events - if verbose: - for event in events: - print(event) - -test_canonical_parser.unittest = ['.canonical'] - -def test_canonical_error(data_filename, canonical_filename, verbose=False): - data = open(data_filename, 'rb').read() - try: - output = list(yaml.canonical_load_all(data)) - except yaml.YAMLError as exc: - if verbose: - print(exc) - else: - raise AssertionError("expected an exception") - -test_canonical_error.unittest = ['.data', '.canonical'] -test_canonical_error.skip = ['.empty'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_constructor.py b/libs/PyYAML-3.10/tests/lib3/test_constructor.py deleted file mode 100644 index 427f53c..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_constructor.py +++ /dev/null @@ -1,260 +0,0 @@ - -import yaml -import pprint - -import datetime -import yaml.tokens - -def execute(code): - global value - exec(code) - return value - -def _make_objects(): - global MyLoader, MyDumper, MyTestClass1, MyTestClass2, MyTestClass3, YAMLObject1, YAMLObject2, \ - AnObject, AnInstance, AState, ACustomState, InitArgs, InitArgsWithState, \ - NewArgs, NewArgsWithState, Reduce, ReduceWithState, MyInt, MyList, MyDict, \ - FixedOffset, today, execute - - class MyLoader(yaml.Loader): - pass - class MyDumper(yaml.Dumper): - pass - - class MyTestClass1: - def __init__(self, x, y=0, z=0): - self.x = x - self.y = y - self.z = z - def __eq__(self, other): - if isinstance(other, MyTestClass1): - return self.__class__, self.__dict__ == other.__class__, other.__dict__ - else: - return False - - def construct1(constructor, node): - mapping = constructor.construct_mapping(node) - return MyTestClass1(**mapping) - def represent1(representer, native): - return representer.represent_mapping("!tag1", native.__dict__) - - yaml.add_constructor("!tag1", construct1, Loader=MyLoader) - yaml.add_representer(MyTestClass1, represent1, Dumper=MyDumper) - - class MyTestClass2(MyTestClass1, yaml.YAMLObject): - yaml_loader = MyLoader - yaml_dumper = MyDumper - yaml_tag = "!tag2" - def from_yaml(cls, constructor, node): - x = constructor.construct_yaml_int(node) - return cls(x=x) - from_yaml = classmethod(from_yaml) - def to_yaml(cls, representer, native): - return representer.represent_scalar(cls.yaml_tag, str(native.x)) - to_yaml = classmethod(to_yaml) - - class MyTestClass3(MyTestClass2): - yaml_tag = "!tag3" - def from_yaml(cls, constructor, node): - mapping = constructor.construct_mapping(node) - if '=' in mapping: - x = mapping['='] - del mapping['='] - mapping['x'] = x - return cls(**mapping) - from_yaml = classmethod(from_yaml) - def to_yaml(cls, representer, native): - return representer.represent_mapping(cls.yaml_tag, native.__dict__) - to_yaml = classmethod(to_yaml) - - class YAMLObject1(yaml.YAMLObject): - yaml_loader = MyLoader - yaml_dumper = MyDumper - yaml_tag = '!foo' - def __init__(self, my_parameter=None, my_another_parameter=None): - self.my_parameter = my_parameter - self.my_another_parameter = my_another_parameter - def __eq__(self, other): - if isinstance(other, YAMLObject1): - return self.__class__, self.__dict__ == other.__class__, other.__dict__ - else: - return False - - class YAMLObject2(yaml.YAMLObject): - yaml_loader = MyLoader - yaml_dumper = MyDumper - yaml_tag = '!bar' - def __init__(self, foo=1, bar=2, baz=3): - self.foo = foo - self.bar = bar - self.baz = baz - def __getstate__(self): - return {1: self.foo, 2: self.bar, 3: self.baz} - def __setstate__(self, state): - self.foo = state[1] - self.bar = state[2] - self.baz = state[3] - def __eq__(self, other): - if isinstance(other, YAMLObject2): - return self.__class__, self.__dict__ == other.__class__, other.__dict__ - else: - return False - - class AnObject: - def __new__(cls, foo=None, bar=None, baz=None): - self = object.__new__(cls) - self.foo = foo - self.bar = bar - self.baz = baz - return self - def __cmp__(self, other): - return cmp((type(self), self.foo, self.bar, self.baz), - (type(other), other.foo, other.bar, other.baz)) - def __eq__(self, other): - return type(self) is type(other) and \ - (self.foo, self.bar, self.baz) == (other.foo, other.bar, other.baz) - - class AnInstance: - def __init__(self, foo=None, bar=None, baz=None): - self.foo = foo - self.bar = bar - self.baz = baz - def __cmp__(self, other): - return cmp((type(self), self.foo, self.bar, self.baz), - (type(other), other.foo, other.bar, other.baz)) - def __eq__(self, other): - return type(self) is type(other) and \ - (self.foo, self.bar, self.baz) == (other.foo, other.bar, other.baz) - - class AState(AnInstance): - def __getstate__(self): - return { - '_foo': self.foo, - '_bar': self.bar, - '_baz': self.baz, - } - def __setstate__(self, state): - self.foo = state['_foo'] - self.bar = state['_bar'] - self.baz = state['_baz'] - - class ACustomState(AnInstance): - def __getstate__(self): - return (self.foo, self.bar, self.baz) - def __setstate__(self, state): - self.foo, self.bar, self.baz = state - - class NewArgs(AnObject): - def __getnewargs__(self): - return (self.foo, self.bar, self.baz) - def __getstate__(self): - return {} - - class NewArgsWithState(AnObject): - def __getnewargs__(self): - return (self.foo, self.bar) - def __getstate__(self): - return self.baz - def __setstate__(self, state): - self.baz = state - - InitArgs = NewArgs - - InitArgsWithState = NewArgsWithState - - class Reduce(AnObject): - def __reduce__(self): - return self.__class__, (self.foo, self.bar, self.baz) - - class ReduceWithState(AnObject): - def __reduce__(self): - return self.__class__, (self.foo, self.bar), self.baz - def __setstate__(self, state): - self.baz = state - - class MyInt(int): - def __eq__(self, other): - return type(self) is type(other) and int(self) == int(other) - - class MyList(list): - def __init__(self, n=1): - self.extend([None]*n) - def __eq__(self, other): - return type(self) is type(other) and list(self) == list(other) - - class MyDict(dict): - def __init__(self, n=1): - for k in range(n): - self[k] = None - def __eq__(self, other): - return type(self) is type(other) and dict(self) == dict(other) - - class FixedOffset(datetime.tzinfo): - def __init__(self, offset, name): - self.__offset = datetime.timedelta(minutes=offset) - self.__name = name - def utcoffset(self, dt): - return self.__offset - def tzname(self, dt): - return self.__name - def dst(self, dt): - return datetime.timedelta(0) - - today = datetime.date.today() - -def _load_code(expression): - return eval(expression) - -def _serialize_value(data): - if isinstance(data, list): - return '[%s]' % ', '.join(map(_serialize_value, data)) - elif isinstance(data, dict): - items = [] - for key, value in data.items(): - key = _serialize_value(key) - value = _serialize_value(value) - items.append("%s: %s" % (key, value)) - items.sort() - return '{%s}' % ', '.join(items) - elif isinstance(data, datetime.datetime): - return repr(data.utctimetuple()) - elif isinstance(data, float) and data != data: - return '?' - else: - return str(data) - -def test_constructor_types(data_filename, code_filename, verbose=False): - _make_objects() - native1 = None - native2 = None - try: - native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader)) - if len(native1) == 1: - native1 = native1[0] - native2 = _load_code(open(code_filename, 'rb').read()) - try: - if native1 == native2: - return - except TypeError: - pass - if verbose: - print("SERIALIZED NATIVE1:") - print(_serialize_value(native1)) - print("SERIALIZED NATIVE2:") - print(_serialize_value(native2)) - assert _serialize_value(native1) == _serialize_value(native2), (native1, native2) - finally: - if verbose: - print("NATIVE1:") - pprint.pprint(native1) - print("NATIVE2:") - pprint.pprint(native2) - -test_constructor_types.unittest = ['.data', '.code'] - -if __name__ == '__main__': - import sys, test_constructor - sys.modules['test_constructor'] = sys.modules['__main__'] - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_emitter.py b/libs/PyYAML-3.10/tests/lib3/test_emitter.py deleted file mode 100644 index 90d1652..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_emitter.py +++ /dev/null @@ -1,100 +0,0 @@ - -import yaml - -def _compare_events(events1, events2): - assert len(events1) == len(events2), (events1, events2) - for event1, event2 in zip(events1, events2): - assert event1.__class__ == event2.__class__, (event1, event2) - if isinstance(event1, yaml.NodeEvent): - assert event1.anchor == event2.anchor, (event1, event2) - if isinstance(event1, yaml.CollectionStartEvent): - assert event1.tag == event2.tag, (event1, event2) - if isinstance(event1, yaml.ScalarEvent): - if True not in event1.implicit+event2.implicit: - assert event1.tag == event2.tag, (event1, event2) - assert event1.value == event2.value, (event1, event2) - -def test_emitter_on_data(data_filename, canonical_filename, verbose=False): - events = list(yaml.parse(open(data_filename, 'rb'))) - output = yaml.emit(events) - if verbose: - print("OUTPUT:") - print(output) - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -test_emitter_on_data.unittest = ['.data', '.canonical'] - -def test_emitter_on_canonical(canonical_filename, verbose=False): - events = list(yaml.parse(open(canonical_filename, 'rb'))) - for canonical in [False, True]: - output = yaml.emit(events, canonical=canonical) - if verbose: - print("OUTPUT (canonical=%s):" % canonical) - print(output) - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -test_emitter_on_canonical.unittest = ['.canonical'] - -def test_emitter_styles(data_filename, canonical_filename, verbose=False): - for filename in [data_filename, canonical_filename]: - events = list(yaml.parse(open(filename, 'rb'))) - for flow_style in [False, True]: - for style in ['|', '>', '"', '\'', '']: - styled_events = [] - for event in events: - if isinstance(event, yaml.ScalarEvent): - event = yaml.ScalarEvent(event.anchor, event.tag, - event.implicit, event.value, style=style) - elif isinstance(event, yaml.SequenceStartEvent): - event = yaml.SequenceStartEvent(event.anchor, event.tag, - event.implicit, flow_style=flow_style) - elif isinstance(event, yaml.MappingStartEvent): - event = yaml.MappingStartEvent(event.anchor, event.tag, - event.implicit, flow_style=flow_style) - styled_events.append(event) - output = yaml.emit(styled_events) - if verbose: - print("OUTPUT (filename=%r, flow_style=%r, style=%r)" % (filename, flow_style, style)) - print(output) - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -test_emitter_styles.unittest = ['.data', '.canonical'] - -class EventsLoader(yaml.Loader): - - def construct_event(self, node): - if isinstance(node, yaml.ScalarNode): - mapping = {} - else: - mapping = self.construct_mapping(node) - class_name = str(node.tag[1:])+'Event' - if class_name in ['AliasEvent', 'ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']: - mapping.setdefault('anchor', None) - if class_name in ['ScalarEvent', 'SequenceStartEvent', 'MappingStartEvent']: - mapping.setdefault('tag', None) - if class_name in ['SequenceStartEvent', 'MappingStartEvent']: - mapping.setdefault('implicit', True) - if class_name == 'ScalarEvent': - mapping.setdefault('implicit', (False, True)) - mapping.setdefault('value', '') - value = getattr(yaml, class_name)(**mapping) - return value - -EventsLoader.add_constructor(None, EventsLoader.construct_event) - -def test_emitter_events(events_filename, verbose=False): - events = list(yaml.load(open(events_filename, 'rb'), Loader=EventsLoader)) - output = yaml.emit(events) - if verbose: - print("OUTPUT:") - print(output) - new_events = list(yaml.parse(output)) - _compare_events(events, new_events) - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_errors.py b/libs/PyYAML-3.10/tests/lib3/test_errors.py deleted file mode 100644 index a3f86af..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_errors.py +++ /dev/null @@ -1,67 +0,0 @@ - -import yaml, test_emitter - -def test_loader_error(error_filename, verbose=False): - try: - list(yaml.load_all(open(error_filename, 'rb'))) - except yaml.YAMLError as exc: - if verbose: - print("%s:" % exc.__class__.__name__, exc) - else: - raise AssertionError("expected an exception") - -test_loader_error.unittest = ['.loader-error'] - -def test_loader_error_string(error_filename, verbose=False): - try: - list(yaml.load_all(open(error_filename, 'rb').read())) - except yaml.YAMLError as exc: - if verbose: - print("%s:" % exc.__class__.__name__, exc) - else: - raise AssertionError("expected an exception") - -test_loader_error_string.unittest = ['.loader-error'] - -def test_loader_error_single(error_filename, verbose=False): - try: - yaml.load(open(error_filename, 'rb').read()) - except yaml.YAMLError as exc: - if verbose: - print("%s:" % exc.__class__.__name__, exc) - else: - raise AssertionError("expected an exception") - -test_loader_error_single.unittest = ['.single-loader-error'] - -def test_emitter_error(error_filename, verbose=False): - events = list(yaml.load(open(error_filename, 'rb'), - Loader=test_emitter.EventsLoader)) - try: - yaml.emit(events) - except yaml.YAMLError as exc: - if verbose: - print("%s:" % exc.__class__.__name__, exc) - else: - raise AssertionError("expected an exception") - -test_emitter_error.unittest = ['.emitter-error'] - -def test_dumper_error(error_filename, verbose=False): - code = open(error_filename, 'rb').read() - try: - import yaml - from io import StringIO - exec(code) - except yaml.YAMLError as exc: - if verbose: - print("%s:" % exc.__class__.__name__, exc) - else: - raise AssertionError("expected an exception") - -test_dumper_error.unittest = ['.dumper-error'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_input_output.py b/libs/PyYAML-3.10/tests/lib3/test_input_output.py deleted file mode 100644 index 70a945a..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_input_output.py +++ /dev/null @@ -1,150 +0,0 @@ - -import yaml -import codecs, io, tempfile, os, os.path - -def test_unicode_input(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - value = ' '.join(data.split()) - output = yaml.load(data) - assert output == value, (output, value) - output = yaml.load(io.StringIO(data)) - assert output == value, (output, value) - for input in [data.encode('utf-8'), - codecs.BOM_UTF8+data.encode('utf-8'), - codecs.BOM_UTF16_BE+data.encode('utf-16-be'), - codecs.BOM_UTF16_LE+data.encode('utf-16-le')]: - if verbose: - print("INPUT:", repr(input[:10]), "...") - output = yaml.load(input) - assert output == value, (output, value) - output = yaml.load(io.BytesIO(input)) - assert output == value, (output, value) - -test_unicode_input.unittest = ['.unicode'] - -def test_unicode_input_errors(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - for input in [data.encode('latin1', 'ignore'), - data.encode('utf-16-be'), data.encode('utf-16-le'), - codecs.BOM_UTF8+data.encode('utf-16-be'), - codecs.BOM_UTF16_BE+data.encode('utf-16-le'), - codecs.BOM_UTF16_LE+data.encode('utf-8')+b'!']: - try: - yaml.load(input) - except yaml.YAMLError as exc: - if verbose: - print(exc) - else: - raise AssertionError("expected an exception") - try: - yaml.load(io.BytesIO(input)) - except yaml.YAMLError as exc: - if verbose: - print(exc) - else: - raise AssertionError("expected an exception") - -test_unicode_input_errors.unittest = ['.unicode'] - -def test_unicode_output(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - value = ' '.join(data.split()) - for allow_unicode in [False, True]: - data1 = yaml.dump(value, allow_unicode=allow_unicode) - for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: - stream = io.StringIO() - yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) - data2 = stream.getvalue() - data3 = yaml.dump(value, encoding=encoding, allow_unicode=allow_unicode) - if encoding is not None: - assert isinstance(data3, bytes) - data3 = data3.decode(encoding) - stream = io.BytesIO() - if encoding is None: - try: - yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) - except TypeError as exc: - if verbose: - print(exc) - data4 = None - else: - raise AssertionError("expected an exception") - else: - yaml.dump(value, stream, encoding=encoding, allow_unicode=allow_unicode) - data4 = stream.getvalue() - if verbose: - print("BYTES:", data4[:50]) - data4 = data4.decode(encoding) - for copy in [data1, data2, data3, data4]: - if copy is None: - continue - assert isinstance(copy, str) - if allow_unicode: - try: - copy[4:].encode('ascii') - except UnicodeEncodeError as exc: - if verbose: - print(exc) - else: - raise AssertionError("expected an exception") - else: - copy[4:].encode('ascii') - assert isinstance(data1, str), (type(data1), encoding) - assert isinstance(data2, str), (type(data2), encoding) - -test_unicode_output.unittest = ['.unicode'] - -def test_file_output(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - handle, filename = tempfile.mkstemp() - os.close(handle) - try: - stream = io.StringIO() - yaml.dump(data, stream, allow_unicode=True) - data1 = stream.getvalue() - stream = io.BytesIO() - yaml.dump(data, stream, encoding='utf-16-le', allow_unicode=True) - data2 = stream.getvalue().decode('utf-16-le')[1:] - stream = open(filename, 'w', encoding='utf-16-le') - yaml.dump(data, stream, allow_unicode=True) - stream.close() - data3 = open(filename, 'r', encoding='utf-16-le').read() - stream = open(filename, 'wb') - yaml.dump(data, stream, encoding='utf-8', allow_unicode=True) - stream.close() - data4 = open(filename, 'r', encoding='utf-8').read() - assert data1 == data2, (data1, data2) - assert data1 == data3, (data1, data3) - assert data1 == data4, (data1, data4) - finally: - if os.path.exists(filename): - os.unlink(filename) - -test_file_output.unittest = ['.unicode'] - -def test_unicode_transfer(unicode_filename, verbose=False): - data = open(unicode_filename, 'rb').read().decode('utf-8') - for encoding in [None, 'utf-8', 'utf-16-be', 'utf-16-le']: - input = data - if encoding is not None: - input = ('\ufeff'+input).encode(encoding) - output1 = yaml.emit(yaml.parse(input), allow_unicode=True) - if encoding is None: - stream = io.StringIO() - else: - stream = io.BytesIO() - yaml.emit(yaml.parse(input), stream, allow_unicode=True) - output2 = stream.getvalue() - assert isinstance(output1, str), (type(output1), encoding) - if encoding is None: - assert isinstance(output2, str), (type(output1), encoding) - else: - assert isinstance(output2, bytes), (type(output1), encoding) - output2.decode(encoding) - -test_unicode_transfer.unittest = ['.unicode'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_mark.py b/libs/PyYAML-3.10/tests/lib3/test_mark.py deleted file mode 100644 index 09eea2e..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_mark.py +++ /dev/null @@ -1,32 +0,0 @@ - -import yaml - -def test_marks(marks_filename, verbose=False): - inputs = open(marks_filename, 'r').read().split('---\n')[1:] - for input in inputs: - index = 0 - line = 0 - column = 0 - while input[index] != '*': - if input[index] == '\n': - line += 1 - column = 0 - else: - column += 1 - index += 1 - mark = yaml.Mark(marks_filename, index, line, column, input, index) - snippet = mark.get_snippet(indent=2, max_length=79) - if verbose: - print(snippet) - assert isinstance(snippet, str), type(snippet) - assert snippet.count('\n') == 1, snippet.count('\n') - data, pointer = snippet.split('\n') - assert len(data) < 82, len(data) - assert data[len(pointer)-1] == '*', data[len(pointer)-1] - -test_marks.unittest = ['.marks'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_reader.py b/libs/PyYAML-3.10/tests/lib3/test_reader.py deleted file mode 100644 index c07b346..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_reader.py +++ /dev/null @@ -1,34 +0,0 @@ - -import yaml.reader - -def _run_reader(data, verbose): - try: - stream = yaml.reader.Reader(data) - while stream.peek() != '\0': - stream.forward() - except yaml.reader.ReaderError as exc: - if verbose: - print(exc) - else: - raise AssertionError("expected an exception") - -def test_stream_error(error_filename, verbose=False): - _run_reader(open(error_filename, 'rb'), verbose) - _run_reader(open(error_filename, 'rb').read(), verbose) - for encoding in ['utf-8', 'utf-16-le', 'utf-16-be']: - try: - data = open(error_filename, 'rb').read().decode(encoding) - break - except UnicodeDecodeError: - pass - else: - return - _run_reader(data, verbose) - _run_reader(open(error_filename, encoding=encoding), verbose) - -test_stream_error.unittest = ['.stream-error'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_recursive.py b/libs/PyYAML-3.10/tests/lib3/test_recursive.py deleted file mode 100644 index 321a75f..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_recursive.py +++ /dev/null @@ -1,51 +0,0 @@ - -import yaml - -class AnInstance: - - def __init__(self, foo, bar): - self.foo = foo - self.bar = bar - - def __repr__(self): - try: - return "%s(foo=%r, bar=%r)" % (self.__class__.__name__, - self.foo, self.bar) - except RuntimeError: - return "%s(foo=..., bar=...)" % self.__class__.__name__ - -class AnInstanceWithState(AnInstance): - - def __getstate__(self): - return {'attributes': [self.foo, self.bar]} - - def __setstate__(self, state): - self.foo, self.bar = state['attributes'] - -def test_recursive(recursive_filename, verbose=False): - context = globals().copy() - exec(open(recursive_filename, 'rb').read(), context) - value1 = context['value'] - output1 = None - value2 = None - output2 = None - try: - output1 = yaml.dump(value1) - value2 = yaml.load(output1) - output2 = yaml.dump(value2) - assert output1 == output2, (output1, output2) - finally: - if verbose: - print("VALUE1:", value1) - print("VALUE2:", value2) - print("OUTPUT1:") - print(output1) - print("OUTPUT2:") - print(output2) - -test_recursive.unittest = ['.recursive'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_representer.py b/libs/PyYAML-3.10/tests/lib3/test_representer.py deleted file mode 100644 index 10d4a8f..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_representer.py +++ /dev/null @@ -1,43 +0,0 @@ - -import yaml -import test_constructor -import pprint - -def test_representer_types(code_filename, verbose=False): - test_constructor._make_objects() - for allow_unicode in [False, True]: - for encoding in ['utf-8', 'utf-16-be', 'utf-16-le']: - native1 = test_constructor._load_code(open(code_filename, 'rb').read()) - native2 = None - try: - output = yaml.dump(native1, Dumper=test_constructor.MyDumper, - allow_unicode=allow_unicode, encoding=encoding) - native2 = yaml.load(output, Loader=test_constructor.MyLoader) - try: - if native1 == native2: - continue - except TypeError: - pass - value1 = test_constructor._serialize_value(native1) - value2 = test_constructor._serialize_value(native2) - if verbose: - print("SERIALIZED NATIVE1:") - print(value1) - print("SERIALIZED NATIVE2:") - print(value2) - assert value1 == value2, (native1, native2) - finally: - if verbose: - print("NATIVE1:") - pprint.pprint(native1) - print("NATIVE2:") - pprint.pprint(native2) - print("OUTPUT:") - print(output) - -test_representer_types.unittest = ['.code'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_resolver.py b/libs/PyYAML-3.10/tests/lib3/test_resolver.py deleted file mode 100644 index f059dab..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_resolver.py +++ /dev/null @@ -1,92 +0,0 @@ - -import yaml -import pprint - -def test_implicit_resolver(data_filename, detect_filename, verbose=False): - correct_tag = None - node = None - try: - correct_tag = open(detect_filename, 'r').read().strip() - node = yaml.compose(open(data_filename, 'rb')) - assert isinstance(node, yaml.SequenceNode), node - for scalar in node.value: - assert isinstance(scalar, yaml.ScalarNode), scalar - assert scalar.tag == correct_tag, (scalar.tag, correct_tag) - finally: - if verbose: - print("CORRECT TAG:", correct_tag) - if hasattr(node, 'value'): - print("CHILDREN:") - pprint.pprint(node.value) - -test_implicit_resolver.unittest = ['.data', '.detect'] - -def _make_path_loader_and_dumper(): - global MyLoader, MyDumper - - class MyLoader(yaml.Loader): - pass - class MyDumper(yaml.Dumper): - pass - - yaml.add_path_resolver('!root', [], - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver('!root/scalar', [], str, - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver('!root/key11/key12/*', ['key11', 'key12'], - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver('!root/key21/1/*', ['key21', 1], - Loader=MyLoader, Dumper=MyDumper) - yaml.add_path_resolver('!root/key31/*/*/key14/map', ['key31', None, None, 'key14'], dict, - Loader=MyLoader, Dumper=MyDumper) - - return MyLoader, MyDumper - -def _convert_node(node): - if isinstance(node, yaml.ScalarNode): - return (node.tag, node.value) - elif isinstance(node, yaml.SequenceNode): - value = [] - for item in node.value: - value.append(_convert_node(item)) - return (node.tag, value) - elif isinstance(node, yaml.MappingNode): - value = [] - for key, item in node.value: - value.append((_convert_node(key), _convert_node(item))) - return (node.tag, value) - -def test_path_resolver_loader(data_filename, path_filename, verbose=False): - _make_path_loader_and_dumper() - nodes1 = list(yaml.compose_all(open(data_filename, 'rb').read(), Loader=MyLoader)) - nodes2 = list(yaml.compose_all(open(path_filename, 'rb').read())) - try: - for node1, node2 in zip(nodes1, nodes2): - data1 = _convert_node(node1) - data2 = _convert_node(node2) - assert data1 == data2, (data1, data2) - finally: - if verbose: - print(yaml.serialize_all(nodes1)) - -test_path_resolver_loader.unittest = ['.data', '.path'] - -def test_path_resolver_dumper(data_filename, path_filename, verbose=False): - _make_path_loader_and_dumper() - for filename in [data_filename, path_filename]: - output = yaml.serialize_all(yaml.compose_all(open(filename, 'rb')), Dumper=MyDumper) - if verbose: - print(output) - nodes1 = yaml.compose_all(output) - nodes2 = yaml.compose_all(open(data_filename, 'rb')) - for node1, node2 in zip(nodes1, nodes2): - data1 = _convert_node(node1) - data2 = _convert_node(node2) - assert data1 == data2, (data1, data2) - -test_path_resolver_dumper.unittest = ['.data', '.path'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_structure.py b/libs/PyYAML-3.10/tests/lib3/test_structure.py deleted file mode 100644 index 6d6f59d..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_structure.py +++ /dev/null @@ -1,187 +0,0 @@ - -import yaml, canonical -import pprint - -def _convert_structure(loader): - if loader.check_event(yaml.ScalarEvent): - event = loader.get_event() - if event.tag or event.anchor or event.value: - return True - else: - return None - elif loader.check_event(yaml.SequenceStartEvent): - loader.get_event() - sequence = [] - while not loader.check_event(yaml.SequenceEndEvent): - sequence.append(_convert_structure(loader)) - loader.get_event() - return sequence - elif loader.check_event(yaml.MappingStartEvent): - loader.get_event() - mapping = [] - while not loader.check_event(yaml.MappingEndEvent): - key = _convert_structure(loader) - value = _convert_structure(loader) - mapping.append((key, value)) - loader.get_event() - return mapping - elif loader.check_event(yaml.AliasEvent): - loader.get_event() - return '*' - else: - loader.get_event() - return '?' - -def test_structure(data_filename, structure_filename, verbose=False): - nodes1 = [] - nodes2 = eval(open(structure_filename, 'r').read()) - try: - loader = yaml.Loader(open(data_filename, 'rb')) - while loader.check_event(): - if loader.check_event(yaml.StreamStartEvent, yaml.StreamEndEvent, - yaml.DocumentStartEvent, yaml.DocumentEndEvent): - loader.get_event() - continue - nodes1.append(_convert_structure(loader)) - if len(nodes1) == 1: - nodes1 = nodes1[0] - assert nodes1 == nodes2, (nodes1, nodes2) - finally: - if verbose: - print("NODES1:") - pprint.pprint(nodes1) - print("NODES2:") - pprint.pprint(nodes2) - -test_structure.unittest = ['.data', '.structure'] - -def _compare_events(events1, events2, full=False): - assert len(events1) == len(events2), (len(events1), len(events2)) - for event1, event2 in zip(events1, events2): - assert event1.__class__ == event2.__class__, (event1, event2) - if isinstance(event1, yaml.AliasEvent) and full: - assert event1.anchor == event2.anchor, (event1, event2) - if isinstance(event1, (yaml.ScalarEvent, yaml.CollectionStartEvent)): - if (event1.tag not in [None, '!'] and event2.tag not in [None, '!']) or full: - assert event1.tag == event2.tag, (event1, event2) - if isinstance(event1, yaml.ScalarEvent): - assert event1.value == event2.value, (event1, event2) - -def test_parser(data_filename, canonical_filename, verbose=False): - events1 = None - events2 = None - try: - events1 = list(yaml.parse(open(data_filename, 'rb'))) - events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) - _compare_events(events1, events2) - finally: - if verbose: - print("EVENTS1:") - pprint.pprint(events1) - print("EVENTS2:") - pprint.pprint(events2) - -test_parser.unittest = ['.data', '.canonical'] - -def test_parser_on_canonical(canonical_filename, verbose=False): - events1 = None - events2 = None - try: - events1 = list(yaml.parse(open(canonical_filename, 'rb'))) - events2 = list(yaml.canonical_parse(open(canonical_filename, 'rb'))) - _compare_events(events1, events2, full=True) - finally: - if verbose: - print("EVENTS1:") - pprint.pprint(events1) - print("EVENTS2:") - pprint.pprint(events2) - -test_parser_on_canonical.unittest = ['.canonical'] - -def _compare_nodes(node1, node2): - assert node1.__class__ == node2.__class__, (node1, node2) - assert node1.tag == node2.tag, (node1, node2) - if isinstance(node1, yaml.ScalarNode): - assert node1.value == node2.value, (node1, node2) - else: - assert len(node1.value) == len(node2.value), (node1, node2) - for item1, item2 in zip(node1.value, node2.value): - if not isinstance(item1, tuple): - item1 = (item1,) - item2 = (item2,) - for subnode1, subnode2 in zip(item1, item2): - _compare_nodes(subnode1, subnode2) - -def test_composer(data_filename, canonical_filename, verbose=False): - nodes1 = None - nodes2 = None - try: - nodes1 = list(yaml.compose_all(open(data_filename, 'rb'))) - nodes2 = list(yaml.canonical_compose_all(open(canonical_filename, 'rb'))) - assert len(nodes1) == len(nodes2), (len(nodes1), len(nodes2)) - for node1, node2 in zip(nodes1, nodes2): - _compare_nodes(node1, node2) - finally: - if verbose: - print("NODES1:") - pprint.pprint(nodes1) - print("NODES2:") - pprint.pprint(nodes2) - -test_composer.unittest = ['.data', '.canonical'] - -def _make_loader(): - global MyLoader - - class MyLoader(yaml.Loader): - def construct_sequence(self, node): - return tuple(yaml.Loader.construct_sequence(self, node)) - def construct_mapping(self, node): - pairs = self.construct_pairs(node) - pairs.sort(key=(lambda i: str(i))) - return pairs - def construct_undefined(self, node): - return self.construct_scalar(node) - - MyLoader.add_constructor('tag:yaml.org,2002:map', MyLoader.construct_mapping) - MyLoader.add_constructor(None, MyLoader.construct_undefined) - -def _make_canonical_loader(): - global MyCanonicalLoader - - class MyCanonicalLoader(yaml.CanonicalLoader): - def construct_sequence(self, node): - return tuple(yaml.CanonicalLoader.construct_sequence(self, node)) - def construct_mapping(self, node): - pairs = self.construct_pairs(node) - pairs.sort(key=(lambda i: str(i))) - return pairs - def construct_undefined(self, node): - return self.construct_scalar(node) - - MyCanonicalLoader.add_constructor('tag:yaml.org,2002:map', MyCanonicalLoader.construct_mapping) - MyCanonicalLoader.add_constructor(None, MyCanonicalLoader.construct_undefined) - -def test_constructor(data_filename, canonical_filename, verbose=False): - _make_loader() - _make_canonical_loader() - native1 = None - native2 = None - try: - native1 = list(yaml.load_all(open(data_filename, 'rb'), Loader=MyLoader)) - native2 = list(yaml.load_all(open(canonical_filename, 'rb'), Loader=MyCanonicalLoader)) - assert native1 == native2, (native1, native2) - finally: - if verbose: - print("NATIVE1:") - pprint.pprint(native1) - print("NATIVE2:") - pprint.pprint(native2) - -test_constructor.unittest = ['.data', '.canonical'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_tokens.py b/libs/PyYAML-3.10/tests/lib3/test_tokens.py deleted file mode 100644 index 828945a..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_tokens.py +++ /dev/null @@ -1,77 +0,0 @@ - -import yaml -import pprint - -# Tokens mnemonic: -# directive: % -# document_start: --- -# document_end: ... -# alias: * -# anchor: & -# tag: ! -# scalar _ -# block_sequence_start: [[ -# block_mapping_start: {{ -# block_end: ]} -# flow_sequence_start: [ -# flow_sequence_end: ] -# flow_mapping_start: { -# flow_mapping_end: } -# entry: , -# key: ? -# value: : - -_replaces = { - yaml.DirectiveToken: '%', - yaml.DocumentStartToken: '---', - yaml.DocumentEndToken: '...', - yaml.AliasToken: '*', - yaml.AnchorToken: '&', - yaml.TagToken: '!', - yaml.ScalarToken: '_', - yaml.BlockSequenceStartToken: '[[', - yaml.BlockMappingStartToken: '{{', - yaml.BlockEndToken: ']}', - yaml.FlowSequenceStartToken: '[', - yaml.FlowSequenceEndToken: ']', - yaml.FlowMappingStartToken: '{', - yaml.FlowMappingEndToken: '}', - yaml.BlockEntryToken: ',', - yaml.FlowEntryToken: ',', - yaml.KeyToken: '?', - yaml.ValueToken: ':', -} - -def test_tokens(data_filename, tokens_filename, verbose=False): - tokens1 = [] - tokens2 = open(tokens_filename, 'r').read().split() - try: - for token in yaml.scan(open(data_filename, 'rb')): - if not isinstance(token, (yaml.StreamStartToken, yaml.StreamEndToken)): - tokens1.append(_replaces[token.__class__]) - finally: - if verbose: - print("TOKENS1:", ' '.join(tokens1)) - print("TOKENS2:", ' '.join(tokens2)) - assert len(tokens1) == len(tokens2), (tokens1, tokens2) - for token1, token2 in zip(tokens1, tokens2): - assert token1 == token2, (token1, token2) - -test_tokens.unittest = ['.data', '.tokens'] - -def test_scanner(data_filename, canonical_filename, verbose=False): - for filename in [data_filename, canonical_filename]: - tokens = [] - try: - for token in yaml.scan(open(filename, 'rb')): - tokens.append(token.__class__.__name__) - finally: - if verbose: - pprint.pprint(tokens) - -test_scanner.unittest = ['.data', '.canonical'] - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_yaml.py b/libs/PyYAML-3.10/tests/lib3/test_yaml.py deleted file mode 100644 index 0927368..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_yaml.py +++ /dev/null @@ -1,18 +0,0 @@ - -from test_mark import * -from test_reader import * -from test_canonical import * -from test_tokens import * -from test_structure import * -from test_errors import * -from test_resolver import * -from test_constructor import * -from test_emitter import * -from test_representer import * -from test_recursive import * -from test_input_output import * - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/PyYAML-3.10/tests/lib3/test_yaml_ext.py b/libs/PyYAML-3.10/tests/lib3/test_yaml_ext.py deleted file mode 100644 index 93d397b..0000000 --- a/libs/PyYAML-3.10/tests/lib3/test_yaml_ext.py +++ /dev/null @@ -1,271 +0,0 @@ - -import _yaml, yaml -import types, pprint - -yaml.PyBaseLoader = yaml.BaseLoader -yaml.PySafeLoader = yaml.SafeLoader -yaml.PyLoader = yaml.Loader -yaml.PyBaseDumper = yaml.BaseDumper -yaml.PySafeDumper = yaml.SafeDumper -yaml.PyDumper = yaml.Dumper - -old_scan = yaml.scan -def new_scan(stream, Loader=yaml.CLoader): - return old_scan(stream, Loader) - -old_parse = yaml.parse -def new_parse(stream, Loader=yaml.CLoader): - return old_parse(stream, Loader) - -old_compose = yaml.compose -def new_compose(stream, Loader=yaml.CLoader): - return old_compose(stream, Loader) - -old_compose_all = yaml.compose_all -def new_compose_all(stream, Loader=yaml.CLoader): - return old_compose_all(stream, Loader) - -old_load = yaml.load -def new_load(stream, Loader=yaml.CLoader): - return old_load(stream, Loader) - -old_load_all = yaml.load_all -def new_load_all(stream, Loader=yaml.CLoader): - return old_load_all(stream, Loader) - -old_safe_load = yaml.safe_load -def new_safe_load(stream): - return old_load(stream, yaml.CSafeLoader) - -old_safe_load_all = yaml.safe_load_all -def new_safe_load_all(stream): - return old_load_all(stream, yaml.CSafeLoader) - -old_emit = yaml.emit -def new_emit(events, stream=None, Dumper=yaml.CDumper, **kwds): - return old_emit(events, stream, Dumper, **kwds) - -old_serialize = yaml.serialize -def new_serialize(node, stream, Dumper=yaml.CDumper, **kwds): - return old_serialize(node, stream, Dumper, **kwds) - -old_serialize_all = yaml.serialize_all -def new_serialize_all(nodes, stream=None, Dumper=yaml.CDumper, **kwds): - return old_serialize_all(nodes, stream, Dumper, **kwds) - -old_dump = yaml.dump -def new_dump(data, stream=None, Dumper=yaml.CDumper, **kwds): - return old_dump(data, stream, Dumper, **kwds) - -old_dump_all = yaml.dump_all -def new_dump_all(documents, stream=None, Dumper=yaml.CDumper, **kwds): - return old_dump_all(documents, stream, Dumper, **kwds) - -old_safe_dump = yaml.safe_dump -def new_safe_dump(data, stream=None, **kwds): - return old_dump(data, stream, yaml.CSafeDumper, **kwds) - -old_safe_dump_all = yaml.safe_dump_all -def new_safe_dump_all(documents, stream=None, **kwds): - return old_dump_all(documents, stream, yaml.CSafeDumper, **kwds) - -def _set_up(): - yaml.BaseLoader = yaml.CBaseLoader - yaml.SafeLoader = yaml.CSafeLoader - yaml.Loader = yaml.CLoader - yaml.BaseDumper = yaml.CBaseDumper - yaml.SafeDumper = yaml.CSafeDumper - yaml.Dumper = yaml.CDumper - yaml.scan = new_scan - yaml.parse = new_parse - yaml.compose = new_compose - yaml.compose_all = new_compose_all - yaml.load = new_load - yaml.load_all = new_load_all - yaml.safe_load = new_safe_load - yaml.safe_load_all = new_safe_load_all - yaml.emit = new_emit - yaml.serialize = new_serialize - yaml.serialize_all = new_serialize_all - yaml.dump = new_dump - yaml.dump_all = new_dump_all - yaml.safe_dump = new_safe_dump - yaml.safe_dump_all = new_safe_dump_all - -def _tear_down(): - yaml.BaseLoader = yaml.PyBaseLoader - yaml.SafeLoader = yaml.PySafeLoader - yaml.Loader = yaml.PyLoader - yaml.BaseDumper = yaml.PyBaseDumper - yaml.SafeDumper = yaml.PySafeDumper - yaml.Dumper = yaml.PyDumper - yaml.scan = old_scan - yaml.parse = old_parse - yaml.compose = old_compose - yaml.compose_all = old_compose_all - yaml.load = old_load - yaml.load_all = old_load_all - yaml.safe_load = old_safe_load - yaml.safe_load_all = old_safe_load_all - yaml.emit = old_emit - yaml.serialize = old_serialize - yaml.serialize_all = old_serialize_all - yaml.dump = old_dump - yaml.dump_all = old_dump_all - yaml.safe_dump = old_safe_dump - yaml.safe_dump_all = old_safe_dump_all - -def test_c_version(verbose=False): - if verbose: - print(_yaml.get_version()) - print(_yaml.get_version_string()) - assert ("%s.%s.%s" % _yaml.get_version()) == _yaml.get_version_string(), \ - (_yaml.get_version(), _yaml.get_version_string()) - -def _compare_scanners(py_data, c_data, verbose): - py_tokens = list(yaml.scan(py_data, Loader=yaml.PyLoader)) - c_tokens = [] - try: - for token in yaml.scan(c_data, Loader=yaml.CLoader): - c_tokens.append(token) - assert len(py_tokens) == len(c_tokens), (len(py_tokens), len(c_tokens)) - for py_token, c_token in zip(py_tokens, c_tokens): - assert py_token.__class__ == c_token.__class__, (py_token, c_token) - if hasattr(py_token, 'value'): - assert py_token.value == c_token.value, (py_token, c_token) - if isinstance(py_token, yaml.StreamEndToken): - continue - py_start = (py_token.start_mark.index, py_token.start_mark.line, py_token.start_mark.column) - py_end = (py_token.end_mark.index, py_token.end_mark.line, py_token.end_mark.column) - c_start = (c_token.start_mark.index, c_token.start_mark.line, c_token.start_mark.column) - c_end = (c_token.end_mark.index, c_token.end_mark.line, c_token.end_mark.column) - assert py_start == c_start, (py_start, c_start) - assert py_end == c_end, (py_end, c_end) - finally: - if verbose: - print("PY_TOKENS:") - pprint.pprint(py_tokens) - print("C_TOKENS:") - pprint.pprint(c_tokens) - -def test_c_scanner(data_filename, canonical_filename, verbose=False): - _compare_scanners(open(data_filename, 'rb'), - open(data_filename, 'rb'), verbose) - _compare_scanners(open(data_filename, 'rb').read(), - open(data_filename, 'rb').read(), verbose) - _compare_scanners(open(canonical_filename, 'rb'), - open(canonical_filename, 'rb'), verbose) - _compare_scanners(open(canonical_filename, 'rb').read(), - open(canonical_filename, 'rb').read(), verbose) - -test_c_scanner.unittest = ['.data', '.canonical'] -test_c_scanner.skip = ['.skip-ext'] - -def _compare_parsers(py_data, c_data, verbose): - py_events = list(yaml.parse(py_data, Loader=yaml.PyLoader)) - c_events = [] - try: - for event in yaml.parse(c_data, Loader=yaml.CLoader): - c_events.append(event) - assert len(py_events) == len(c_events), (len(py_events), len(c_events)) - for py_event, c_event in zip(py_events, c_events): - for attribute in ['__class__', 'anchor', 'tag', 'implicit', - 'value', 'explicit', 'version', 'tags']: - py_value = getattr(py_event, attribute, None) - c_value = getattr(c_event, attribute, None) - assert py_value == c_value, (py_event, c_event, attribute) - finally: - if verbose: - print("PY_EVENTS:") - pprint.pprint(py_events) - print("C_EVENTS:") - pprint.pprint(c_events) - -def test_c_parser(data_filename, canonical_filename, verbose=False): - _compare_parsers(open(data_filename, 'rb'), - open(data_filename, 'rb'), verbose) - _compare_parsers(open(data_filename, 'rb').read(), - open(data_filename, 'rb').read(), verbose) - _compare_parsers(open(canonical_filename, 'rb'), - open(canonical_filename, 'rb'), verbose) - _compare_parsers(open(canonical_filename, 'rb').read(), - open(canonical_filename, 'rb').read(), verbose) - -test_c_parser.unittest = ['.data', '.canonical'] -test_c_parser.skip = ['.skip-ext'] - -def _compare_emitters(data, verbose): - events = list(yaml.parse(data, Loader=yaml.PyLoader)) - c_data = yaml.emit(events, Dumper=yaml.CDumper) - if verbose: - print(c_data) - py_events = list(yaml.parse(c_data, Loader=yaml.PyLoader)) - c_events = list(yaml.parse(c_data, Loader=yaml.CLoader)) - try: - assert len(events) == len(py_events), (len(events), len(py_events)) - assert len(events) == len(c_events), (len(events), len(c_events)) - for event, py_event, c_event in zip(events, py_events, c_events): - for attribute in ['__class__', 'anchor', 'tag', 'implicit', - 'value', 'explicit', 'version', 'tags']: - value = getattr(event, attribute, None) - py_value = getattr(py_event, attribute, None) - c_value = getattr(c_event, attribute, None) - if attribute == 'tag' and value in [None, '!'] \ - and py_value in [None, '!'] and c_value in [None, '!']: - continue - if attribute == 'explicit' and (py_value or c_value): - continue - assert value == py_value, (event, py_event, attribute) - assert value == c_value, (event, c_event, attribute) - finally: - if verbose: - print("EVENTS:") - pprint.pprint(events) - print("PY_EVENTS:") - pprint.pprint(py_events) - print("C_EVENTS:") - pprint.pprint(c_events) - -def test_c_emitter(data_filename, canonical_filename, verbose=False): - _compare_emitters(open(data_filename, 'rb').read(), verbose) - _compare_emitters(open(canonical_filename, 'rb').read(), verbose) - -test_c_emitter.unittest = ['.data', '.canonical'] -test_c_emitter.skip = ['.skip-ext'] - -def wrap_ext_function(function): - def wrapper(*args, **kwds): - _set_up() - try: - function(*args, **kwds) - finally: - _tear_down() - wrapper.__name__ = '%s_ext' % function.__name__ - wrapper.unittest = function.unittest - wrapper.skip = getattr(function, 'skip', [])+['.skip-ext'] - return wrapper - -def wrap_ext(collections): - functions = [] - if not isinstance(collections, list): - collections = [collections] - for collection in collections: - if not isinstance(collection, dict): - collection = vars(collection) - for key in sorted(collection): - value = collection[key] - if isinstance(value, types.FunctionType) and hasattr(value, 'unittest'): - functions.append(wrap_ext_function(value)) - for function in functions: - assert function.__name__ not in globals() - globals()[function.__name__] = function - -import test_tokens, test_structure, test_errors, test_resolver, test_constructor, \ - test_emitter, test_representer, test_recursive, test_input_output -wrap_ext([test_tokens, test_structure, test_errors, test_resolver, test_constructor, - test_emitter, test_representer, test_recursive, test_input_output]) - -if __name__ == '__main__': - import test_appliance - test_appliance.run(globals()) - diff --git a/libs/icalendar-3.6.1/CHANGES.rst b/libs/icalendar-3.6.1/CHANGES.rst deleted file mode 100644 index c796b63..0000000 --- a/libs/icalendar-3.6.1/CHANGES.rst +++ /dev/null @@ -1,391 +0,0 @@ - -Changelog -========= - -3.6.1 (2014-01-13) ------------------- - -- Open text files referenced by setup.py as utf-8, no matter what the locale - settings are set to. Fixes #122. - [sochotnicky] - -- Add tox.ini to source tarball, which simplifies testing for in distributions. - [sochotnicky] - - -3.6 (2014-01-06) ----------------- - -- Python3 (3.3+) + Python 2 (2.6+) support [geier] - -- Made sure to_ical() always returns bytes [geier] - -- Support adding lists to a component property, which value already was a list - and remove the Component.set method, which was only used by the add method. - [thet] - -- Remove ability to add property parameters via a value's params attribute when - adding via cal.add (that was only possible for custom value objects and makes - up a strange API), but support a parameter attribute on cal.add's method - signature to pass a dictionary with property parameter key/value pairs. - Fixes #116. - [thet] - -- Backport some of Regebro's changes from his regebro-refactor branch. - [thet] - -- Raise explicit error on another malformed content line case. - [hajdbo] - -- Correctly parse datetime component property values with timezone information - when parsed from ical strings. - [untitaker] - - -3.5 (2013-07-03) ----------------- - -- Let to_unicode be more graceful for non-unicode strings, as like CMFPlone's - safe_unicode does it. - [thet] - - -3.4 (2013-04-24) ----------------- - -- Switch to unicode internally. This should fix all en/decoding errors. - [thet] - -- Support for non-ascii parameter values. Fixes #88. - [warvariuc] - -- Added functions to transform chars in string with '\\' + any of r'\,;:' chars - into '%{:02X}' form to avoid splitting on chars escaped with '\\'. - [warvariuc] - -- Allow seconds in vUTCOffset properties. Fixes #55. - [thet] - -- Let ``Component.decode`` better handle vRecur and vDDDLists properties. - Fixes #70. - [thet] - -- Don't let ``Component.add`` re-encode already encoded values. This simplifies - the API, since there is no need explicitly pass ``encode=False``. Fixes #82. - [thet] - -- Rename tzinfo_from_dt to tzid_from_dt, which is what it does. - [thet] - -- More support for dateutil parsed tzinfo objects. Fixes #89. - [leo-naeka] - -- Remove python-dateutil version fix at all. Current python-dateutil has Py3 - and Py2 compatibility. - [thet] - -- Declare the required python-dateutil dependency in setup.py. Fixes #90. - [kleink] - -- Raise test coverage. - [thet] - -- Remove interfaces module, as it is unused. - [thet] - -- Remove ``test_doctests.py``, test suite already created properly in - ``test_icalendar.py``. - [rnix] - -- Transformed doctests into unittests, Test fixes and cleanup. - [warvariuc] - - -3.3 (2013-02-08) ----------------- - -- Drop support for Python < 2.6. - [thet] - -- Allow vGeo to be instantiated with list and not only tuples of geo - coordinates. Fixes #83. - [thet] - -- Don't force to pass a list to vDDDLists and allow setting individual RDATE - and EXDATE values without having to wrap them in a list. - [thet] - -- Fix encoding function to allow setting RDATE and EXDATE values and not to - have bypass encoding with an icalendar property. - [thet] - -- Allow setting of timezone for vDDDLists and support timezone properties for - RDATE and EXDATE component properties. - [thet] - -- Move setting of TZID properties to vDDDTypes, where it belongs to. - [thet] - -- Use @staticmethod decorator instead of wrapper function. - [warvariuc, thet] - -- Extend quoting of parameter values to all of those characters: ",;: ’'". - This fixes an outlook incompatibility with some characters. Fixes: #79, - Fixes: #81. - [warvariuc] - -- Define VTIMETZONE subcomponents STANDARD and DAYLIGHT for RFC5545 compliance. - [thet] - - -3.2 (2012-11-27) ----------------- - -- Documentation file layout restructuring. - [thet] - -- Fix time support. vTime events can be instantiated with a datetime.time - object, and do not inherit from datetime.time itself. - [rdunklau] - -- Correctly handle tzinfo objects parsed with dateutil. Fixes #77. - [warvariuc, thet] - -- Text values are escaped correclty. Fixes #74. - [warvariuc] - -- Returned old folding algorithm, as the current implementation fails in some - cases. Fixes #72, Fixes #73. - [warvariuc] - -- Supports to_ical() on date/time properties for dates prior to 1900. - [cdevienne] - - -3.1 (2012-09-05) ----------------- - -- Make sure parameters to certain properties propagate to the ical output. - [kanarip] - -- Re-include doctests. - [rnix] - -- Ensure correct datatype at instance creation time in ``prop.vCalAddress`` - and ``prop.vText``. - [rnix] - -- Apply TZID parameter to datetimes parsed from RECURRENCE-ID - [dbstovall] - -- Localize datetimes for timezones to avoid DST transition errors. - [dbstovall] - -- Allow UTC-OFFSET property value data types in seconds, which follows RFC5545 - specification. - [nikolaeff] - -- Remove utctz and normalized_timezone methods to simplify the codebase. The - methods were too tiny to be useful and just used at one place. - [thet] - -- When using Component.add() to add icalendar properties, force a value - conversion to UTC for CREATED, DTSTART and LAST-MODIFIED. The RFC expects UTC - for those properties. - [thet] - -- Removed last occurrences of old API (from_string). - [Rembane] - -- Add 'recursive' argument to property_items() to switch recursive listing. - For example when parsing a text/calendar text including multiple components - (e.g. a VCALENDAR with 5 VEVENTs), the previous situation required us to look - over all properties in VEVENTs even if we just want the properties under the - VCALENDAR component (VERSION, PRODID, CALSCALE, METHOD). - [dmikurube] - -- All unit tests fixed. - [mikaelfrykholm] - - -3.0.1b2 (2012-03-01) --------------------- - -- For all TZID parameters in DATE-TIME properties, use timezone identifiers - (e.g. Europe/Vienna) instead of timezone names (e.g. CET), as required by - RFC5545. Timezone names are used together with timezone identifiers in the - Timezone components. - [thet] - -- Timezone parsing, issues and test fixes. - [mikaelfrykholm, garbas, tgecho] - -- Since we use pytz for timezones, also use UTC tzinfo object from the pytz - library instead of own implementation. - [thet] - - -3.0.1b1 (2012-02-24) --------------------- - -- Update Release information. - [thet] - - -3.0 ---- - -- Add API for proper Timezone support. Allow creating ical DATE-TIME strings - with timezone information from Python datetimes with pytz based timezone - information and vice versa. - [thet] - -- Unify API to only use to_ical and from_ical and remove string casting as a - requirement for Python 3 compatibility: - New: to_ical. - Old: ical, string, as_string and string casting via __str__ and str. - New: from_ical. - Old: from_string. - [thet] - - -2.2 (2011-08-24) ----------------- - -- migration to https://github.com/collective/icalendar using svn2git preserving - tags, branches and authors. - [garbas] - -- using tox for testing on python 2.4, 2.5, 2.6, 2.6. - [garbas] - -- fixed tests so they pass also under python 2.7. - [garbas] - -- running tests on https://jenkins.plone.org/job/icalendar (only 2.6 for now) - with some other metrics (pylint, clonedigger, coverage). - [garbas] - -- review and merge changes from https://github.com/cozi/icalendar fork. - [garbas] - -- created sphinx documentation and started documenting development and goals. - [garbas] - -- hook out github repository to http://readthedocs.org service so sphinx - documentation is generated on each commit (for master). Documentation can be - visible on: http://readthedocs.org/docs/icalendar/en/latest/ - [garbas] - - -2.1 (2009-12-14) ----------------- - -- Fix deprecation warnings about ``object.__init__`` taking no parameters. - -- Set the VALUE parameter correctly for date values. - -- Long binary data would be base64 encoded with newlines, which made the - iCalendar files incorrect. (This still needs testing). - -- Correctly handle content lines which include newlines. - - -2.0.1 (2008-07-11) ------------------- - -- Made the tests run under Python 2.5+ - -- Renamed the UTC class to Utc, so it would not clash with the UTC object, - since that rendered the UTC object unpicklable. - - -2.0 (2008-07-11) ----------------- - -- EXDATE and RDATE now returns a vDDDLists object, which contains a list - of vDDDTypes objects. This is do that EXDATE and RDATE can contain - lists of dates, as per RFC. - - ***Note!***: This change is incompatible with earlier behavior, so if you - handle EXDATE and RDATE you will need to update your code. - -- When createing a vDuration of -5 hours (which in itself is nonsensical), - the ical output of that was -P1DT19H, which is correct, but ugly. Now - it's '-PT5H', which is prettier. - - -1.2 (2006-11-25) ----------------- - -- Fixed a string index out of range error in the new folding code. - - -1.1 (2006-11-23) ----------------- - -- Fixed a bug in caselessdicts popitem. (thanks to Michael Smith - ) - -- The RFC 2445 was a bit unclear on how to handle line folding when it - happened to be in the middle of a UTF-8 character. This has been clarified - in the following discussion: - http://lists.osafoundation.org/pipermail/ietf-calsify/2006-August/001126.html - And this is now implemented in iCalendar. It will not fold in the middle of - a UTF-8 character, but may fold in the middle of a UTF-8 composing character - sequence. - - -1.0 (2006-08-03) ----------------- - -- make get_inline and set_inline support non ascii codes. - -- Added support for creating a python egg distribution. - - -0.11 (2005-11-08) ------------------ - -- Changed component .from_string to use types_factory instead of hardcoding - entries to 'inline' - -- Changed UTC tzinfo to a singleton so the same one is used everywhere - -- Made the parser more strict by using regular expressions for key name, - param name and quoted/unquoted safe char as per the RFC - -- Added some tests from the schooltool icalendar parser for better coverage - -- Be more forgiving on the regex for folding lines - -- Allow for multiple top-level components on .from_string - -- Fix vWeekdays, wasn't accepting relative param (eg: -3SA vs -SA) - -- vDDDTypes didn't accept negative period (eg: -P30M) - -- 'N' is also acceptable as newline on content lines, per RFC - - -0.10 (2005-04-28) ------------------ - -- moved code to codespeak.net subversion. - -- reorganized package structure so that source code is under 'src' directory. - Non-package files remain in distribution root. - -- redid doc/.py files as doc/.txt, using more modern doctest. Before they - were .py files with big docstrings. - -- added test.py testrunner, and tests/test_icalendar.py that picks up all - doctests in source code and doc directory, and runs them, when typing:: - - python2.3 test.py - -- renamed iCalendar to lower case package name, lowercased, de-pluralized and - shorted module names, which are mostly implementation detail. - -- changed tests so they generate .ics files in a temp directory, not in the - structure itself. diff --git a/libs/icalendar-3.6.1/CONTRIBUTING.rst b/libs/icalendar-3.6.1/CONTRIBUTING.rst deleted file mode 100644 index d1279d5..0000000 --- a/libs/icalendar-3.6.1/CONTRIBUTING.rst +++ /dev/null @@ -1,18 +0,0 @@ -You want to help and contribute? Perfect! -========================================= - -These are some contribution examples ------------------------------------- - -* Reporting issues to the bugtracker. -* Submitting pull requests from a forked icalendar repo. -* Extending the documentation. -* Sponsor a Sprint (http://plone.org/events/sprints/whatis). - - -For pull requests, keep this in mind ------------------------------------- - -* Add a test which proves your fix and make it pass. -* Describe your change in CHANGES.rst -* Add yourself to the docs/credits.rst diff --git a/libs/icalendar-3.6.1/LICENSE.rst b/libs/icalendar-3.6.1/LICENSE.rst deleted file mode 100644 index 3ef0a2c..0000000 --- a/libs/icalendar-3.6.1/LICENSE.rst +++ /dev/null @@ -1,27 +0,0 @@ - -License -======= - -Copyright (c) 2012-2013, Plone Foundation -All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/libs/icalendar-3.6.1/MANIFEST.in b/libs/icalendar-3.6.1/MANIFEST.in deleted file mode 100644 index b2d0f1b..0000000 --- a/libs/icalendar-3.6.1/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -include *.rst tox.ini -graft docs -recursive-include src/icalendar * -recursive-exclude src/icalendar *.pyc diff --git a/libs/icalendar-3.6.1/PKG-INFO b/libs/icalendar-3.6.1/PKG-INFO deleted file mode 100644 index ed7c36d..0000000 --- a/libs/icalendar-3.6.1/PKG-INFO +++ /dev/null @@ -1,534 +0,0 @@ -Metadata-Version: 1.1 -Name: icalendar -Version: 3.6.1 -Summary: iCalendar parser/generator -Home-page: https://github.com/collective/icalendar -Author: Plone Foundation -Author-email: plone-developers@lists.sourceforge.net -License: BSD -Description: ========================================================== - Internet Calendaring and Scheduling (iCalendar) for Python - ========================================================== - - The `icalendar`_ package is a parser/generator of iCalendar files for use - with Python. - - ---- - - :Homepage: http://icalendar.readthedocs.org - :Code: http://github.com/collective/icalendar - :Mailing list: http://github.com/collective/icalendar/issues - :Dependencies: `setuptools`_ and since version 3.0 we depend on `pytz`_. - :Compatible with: Python 2.6, 2.7 and 3.3+ - :License: `BSD`_ - - ---- - - - Roadmap - ======= - - - 3.6: Python 3 support (current version) - - - 4.0: API refactoring - - - - Changes in version 3.0 - ====================== - - API Change - ---------- - - Since version we unified to icalendar de/serialization API to use only to_ical - (for writing an ical string from the internal representation) and from_ical - (for parsing an ical string into the internal representation). - - to_ical is now used instead of the methods ical, string, as_string and instead - of string casting via __str__ and str. - - from_ical is now used instead of from_string. - - This change is a requirement for future Python 3 compatibility. Please update - your code to reflect to the new API. - - Timezone support - ---------------- - - Timezones are now fully supported in icalendar for serialization and - deserialization. We use the pytz library for timezone components of datetime - instances. The timezone identifiers must be valid pytz respectively Olson - database timezone identifiers. This can be a problem for 'GMT' identifiers, - which are not defined in the Olson database. - - Instead of the own UTC tzinfo implementation we use pytz UTC tzinfo object now. - - - About this fork which is not a fork anymore - =========================================== - - Aim of this fork (not fork anymore, read further) was to bring this package up - to date with latest icalendar `RFC`_ specification as part of - `plone.app.event`_ project which goal is to bring recurrent evens to `Plone`_. - - After some thoughts we (Plone developers involved with `plone.app.event`_) send - a suggestion to icalendar-dev@codespeak.net to take over mainaining of - `icalendar`_. Nobody object and since version 2.2 we are back to development. - - .. _`icalendar`: http://pypi.python.org/pypi/icalendar - .. _`plone.app.event`: http://github.com/plone/plone.app.event - .. _`Plone`: http://plone.org - .. _`pytz`: http://pypi.python.org/pypi/pytz - .. _`setuptools`: http://pypi.python.org/pypi/setuptools - .. _`RFC`: http://www.ietf.org/rfc/rfc5545.txt - .. _`BSD`: https://github.com/collective/icalendar/issues/2 - - - Test Coverage Report - ==================== - - Output from coverage test:: - - Name Stmts Miss Cover - ---------------------------------------------------------------------------------- - .tox/py27/lib/python2.7/site-packages/icalendar/__init__ 5 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/cal 234 7 97% - .tox/py27/lib/python2.7/site-packages/icalendar/caselessdict 55 5 91% - .tox/py27/lib/python2.7/site-packages/icalendar/compat 1 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/parser 189 6 97% - .tox/py27/lib/python2.7/site-packages/icalendar/parser_tools 20 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/prop 533 62 88% - .tox/py27/lib/python2.7/site-packages/icalendar/tools 16 0 100% - ---------------------------------------------------------------------------------- - TOTAL 1053 80 92% - - - Changelog - ========= - - 3.6.1 (2014-01-13) - ------------------ - - - Open text files referenced by setup.py as utf-8, no matter what the locale - settings are set to. Fixes #122. - [sochotnicky] - - - Add tox.ini to source tarball, which simplifies testing for in distributions. - [sochotnicky] - - - 3.6 (2014-01-06) - ---------------- - - - Python3 (3.3+) + Python 2 (2.6+) support [geier] - - - Made sure to_ical() always returns bytes [geier] - - - Support adding lists to a component property, which value already was a list - and remove the Component.set method, which was only used by the add method. - [thet] - - - Remove ability to add property parameters via a value's params attribute when - adding via cal.add (that was only possible for custom value objects and makes - up a strange API), but support a parameter attribute on cal.add's method - signature to pass a dictionary with property parameter key/value pairs. - Fixes #116. - [thet] - - - Backport some of Regebro's changes from his regebro-refactor branch. - [thet] - - - Raise explicit error on another malformed content line case. - [hajdbo] - - - Correctly parse datetime component property values with timezone information - when parsed from ical strings. - [untitaker] - - - 3.5 (2013-07-03) - ---------------- - - - Let to_unicode be more graceful for non-unicode strings, as like CMFPlone's - safe_unicode does it. - [thet] - - - 3.4 (2013-04-24) - ---------------- - - - Switch to unicode internally. This should fix all en/decoding errors. - [thet] - - - Support for non-ascii parameter values. Fixes #88. - [warvariuc] - - - Added functions to transform chars in string with '\\' + any of r'\,;:' chars - into '%{:02X}' form to avoid splitting on chars escaped with '\\'. - [warvariuc] - - - Allow seconds in vUTCOffset properties. Fixes #55. - [thet] - - - Let ``Component.decode`` better handle vRecur and vDDDLists properties. - Fixes #70. - [thet] - - - Don't let ``Component.add`` re-encode already encoded values. This simplifies - the API, since there is no need explicitly pass ``encode=False``. Fixes #82. - [thet] - - - Rename tzinfo_from_dt to tzid_from_dt, which is what it does. - [thet] - - - More support for dateutil parsed tzinfo objects. Fixes #89. - [leo-naeka] - - - Remove python-dateutil version fix at all. Current python-dateutil has Py3 - and Py2 compatibility. - [thet] - - - Declare the required python-dateutil dependency in setup.py. Fixes #90. - [kleink] - - - Raise test coverage. - [thet] - - - Remove interfaces module, as it is unused. - [thet] - - - Remove ``test_doctests.py``, test suite already created properly in - ``test_icalendar.py``. - [rnix] - - - Transformed doctests into unittests, Test fixes and cleanup. - [warvariuc] - - - 3.3 (2013-02-08) - ---------------- - - - Drop support for Python < 2.6. - [thet] - - - Allow vGeo to be instantiated with list and not only tuples of geo - coordinates. Fixes #83. - [thet] - - - Don't force to pass a list to vDDDLists and allow setting individual RDATE - and EXDATE values without having to wrap them in a list. - [thet] - - - Fix encoding function to allow setting RDATE and EXDATE values and not to - have bypass encoding with an icalendar property. - [thet] - - - Allow setting of timezone for vDDDLists and support timezone properties for - RDATE and EXDATE component properties. - [thet] - - - Move setting of TZID properties to vDDDTypes, where it belongs to. - [thet] - - - Use @staticmethod decorator instead of wrapper function. - [warvariuc, thet] - - - Extend quoting of parameter values to all of those characters: ",;: ’'". - This fixes an outlook incompatibility with some characters. Fixes: #79, - Fixes: #81. - [warvariuc] - - - Define VTIMETZONE subcomponents STANDARD and DAYLIGHT for RFC5545 compliance. - [thet] - - - 3.2 (2012-11-27) - ---------------- - - - Documentation file layout restructuring. - [thet] - - - Fix time support. vTime events can be instantiated with a datetime.time - object, and do not inherit from datetime.time itself. - [rdunklau] - - - Correctly handle tzinfo objects parsed with dateutil. Fixes #77. - [warvariuc, thet] - - - Text values are escaped correclty. Fixes #74. - [warvariuc] - - - Returned old folding algorithm, as the current implementation fails in some - cases. Fixes #72, Fixes #73. - [warvariuc] - - - Supports to_ical() on date/time properties for dates prior to 1900. - [cdevienne] - - - 3.1 (2012-09-05) - ---------------- - - - Make sure parameters to certain properties propagate to the ical output. - [kanarip] - - - Re-include doctests. - [rnix] - - - Ensure correct datatype at instance creation time in ``prop.vCalAddress`` - and ``prop.vText``. - [rnix] - - - Apply TZID parameter to datetimes parsed from RECURRENCE-ID - [dbstovall] - - - Localize datetimes for timezones to avoid DST transition errors. - [dbstovall] - - - Allow UTC-OFFSET property value data types in seconds, which follows RFC5545 - specification. - [nikolaeff] - - - Remove utctz and normalized_timezone methods to simplify the codebase. The - methods were too tiny to be useful and just used at one place. - [thet] - - - When using Component.add() to add icalendar properties, force a value - conversion to UTC for CREATED, DTSTART and LAST-MODIFIED. The RFC expects UTC - for those properties. - [thet] - - - Removed last occurrences of old API (from_string). - [Rembane] - - - Add 'recursive' argument to property_items() to switch recursive listing. - For example when parsing a text/calendar text including multiple components - (e.g. a VCALENDAR with 5 VEVENTs), the previous situation required us to look - over all properties in VEVENTs even if we just want the properties under the - VCALENDAR component (VERSION, PRODID, CALSCALE, METHOD). - [dmikurube] - - - All unit tests fixed. - [mikaelfrykholm] - - - 3.0.1b2 (2012-03-01) - -------------------- - - - For all TZID parameters in DATE-TIME properties, use timezone identifiers - (e.g. Europe/Vienna) instead of timezone names (e.g. CET), as required by - RFC5545. Timezone names are used together with timezone identifiers in the - Timezone components. - [thet] - - - Timezone parsing, issues and test fixes. - [mikaelfrykholm, garbas, tgecho] - - - Since we use pytz for timezones, also use UTC tzinfo object from the pytz - library instead of own implementation. - [thet] - - - 3.0.1b1 (2012-02-24) - -------------------- - - - Update Release information. - [thet] - - - 3.0 - --- - - - Add API for proper Timezone support. Allow creating ical DATE-TIME strings - with timezone information from Python datetimes with pytz based timezone - information and vice versa. - [thet] - - - Unify API to only use to_ical and from_ical and remove string casting as a - requirement for Python 3 compatibility: - New: to_ical. - Old: ical, string, as_string and string casting via __str__ and str. - New: from_ical. - Old: from_string. - [thet] - - - 2.2 (2011-08-24) - ---------------- - - - migration to https://github.com/collective/icalendar using svn2git preserving - tags, branches and authors. - [garbas] - - - using tox for testing on python 2.4, 2.5, 2.6, 2.6. - [garbas] - - - fixed tests so they pass also under python 2.7. - [garbas] - - - running tests on https://jenkins.plone.org/job/icalendar (only 2.6 for now) - with some other metrics (pylint, clonedigger, coverage). - [garbas] - - - review and merge changes from https://github.com/cozi/icalendar fork. - [garbas] - - - created sphinx documentation and started documenting development and goals. - [garbas] - - - hook out github repository to http://readthedocs.org service so sphinx - documentation is generated on each commit (for master). Documentation can be - visible on: http://readthedocs.org/docs/icalendar/en/latest/ - [garbas] - - - 2.1 (2009-12-14) - ---------------- - - - Fix deprecation warnings about ``object.__init__`` taking no parameters. - - - Set the VALUE parameter correctly for date values. - - - Long binary data would be base64 encoded with newlines, which made the - iCalendar files incorrect. (This still needs testing). - - - Correctly handle content lines which include newlines. - - - 2.0.1 (2008-07-11) - ------------------ - - - Made the tests run under Python 2.5+ - - - Renamed the UTC class to Utc, so it would not clash with the UTC object, - since that rendered the UTC object unpicklable. - - - 2.0 (2008-07-11) - ---------------- - - - EXDATE and RDATE now returns a vDDDLists object, which contains a list - of vDDDTypes objects. This is do that EXDATE and RDATE can contain - lists of dates, as per RFC. - - ***Note!***: This change is incompatible with earlier behavior, so if you - handle EXDATE and RDATE you will need to update your code. - - - When createing a vDuration of -5 hours (which in itself is nonsensical), - the ical output of that was -P1DT19H, which is correct, but ugly. Now - it's '-PT5H', which is prettier. - - - 1.2 (2006-11-25) - ---------------- - - - Fixed a string index out of range error in the new folding code. - - - 1.1 (2006-11-23) - ---------------- - - - Fixed a bug in caselessdicts popitem. (thanks to Michael Smith - ) - - - The RFC 2445 was a bit unclear on how to handle line folding when it - happened to be in the middle of a UTF-8 character. This has been clarified - in the following discussion: - http://lists.osafoundation.org/pipermail/ietf-calsify/2006-August/001126.html - And this is now implemented in iCalendar. It will not fold in the middle of - a UTF-8 character, but may fold in the middle of a UTF-8 composing character - sequence. - - - 1.0 (2006-08-03) - ---------------- - - - make get_inline and set_inline support non ascii codes. - - - Added support for creating a python egg distribution. - - - 0.11 (2005-11-08) - ----------------- - - - Changed component .from_string to use types_factory instead of hardcoding - entries to 'inline' - - - Changed UTC tzinfo to a singleton so the same one is used everywhere - - - Made the parser more strict by using regular expressions for key name, - param name and quoted/unquoted safe char as per the RFC - - - Added some tests from the schooltool icalendar parser for better coverage - - - Be more forgiving on the regex for folding lines - - - Allow for multiple top-level components on .from_string - - - Fix vWeekdays, wasn't accepting relative param (eg: -3SA vs -SA) - - - vDDDTypes didn't accept negative period (eg: -P30M) - - - 'N' is also acceptable as newline on content lines, per RFC - - - 0.10 (2005-04-28) - ----------------- - - - moved code to codespeak.net subversion. - - - reorganized package structure so that source code is under 'src' directory. - Non-package files remain in distribution root. - - - redid doc/.py files as doc/.txt, using more modern doctest. Before they - were .py files with big docstrings. - - - added test.py testrunner, and tests/test_icalendar.py that picks up all - doctests in source code and doc directory, and runs them, when typing:: - - python2.3 test.py - - - renamed iCalendar to lower case package name, lowercased, de-pluralized and - shorted module names, which are mostly implementation detail. - - - changed tests so they generate .ics files in a temp directory, not in the - structure itself. - - License - ======= - - Copyright (c) 2012-2013, Plone Foundation - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Keywords: calendar calendaring ical icalendar event todo journal recurring -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent diff --git a/libs/icalendar-3.6.1/README.rst b/libs/icalendar-3.6.1/README.rst deleted file mode 100644 index f3e825a..0000000 --- a/libs/icalendar-3.6.1/README.rst +++ /dev/null @@ -1,96 +0,0 @@ -========================================================== -Internet Calendaring and Scheduling (iCalendar) for Python -========================================================== - -The `icalendar`_ package is a parser/generator of iCalendar files for use -with Python. - ----- - - :Homepage: http://icalendar.readthedocs.org - :Code: http://github.com/collective/icalendar - :Mailing list: http://github.com/collective/icalendar/issues - :Dependencies: `setuptools`_ and since version 3.0 we depend on `pytz`_. - :Compatible with: Python 2.6, 2.7 and 3.3+ - :License: `BSD`_ - ----- - - -Roadmap -======= - -- 3.6: Python 3 support (current version) - -- 4.0: API refactoring - - - -Changes in version 3.0 -====================== - -API Change ----------- - -Since version we unified to icalendar de/serialization API to use only to_ical -(for writing an ical string from the internal representation) and from_ical -(for parsing an ical string into the internal representation). - -to_ical is now used instead of the methods ical, string, as_string and instead -of string casting via __str__ and str. - -from_ical is now used instead of from_string. - -This change is a requirement for future Python 3 compatibility. Please update -your code to reflect to the new API. - -Timezone support ----------------- - -Timezones are now fully supported in icalendar for serialization and -deserialization. We use the pytz library for timezone components of datetime -instances. The timezone identifiers must be valid pytz respectively Olson -database timezone identifiers. This can be a problem for 'GMT' identifiers, -which are not defined in the Olson database. - -Instead of the own UTC tzinfo implementation we use pytz UTC tzinfo object now. - - -About this fork which is not a fork anymore -=========================================== - -Aim of this fork (not fork anymore, read further) was to bring this package up -to date with latest icalendar `RFC`_ specification as part of -`plone.app.event`_ project which goal is to bring recurrent evens to `Plone`_. - -After some thoughts we (Plone developers involved with `plone.app.event`_) send -a suggestion to icalendar-dev@codespeak.net to take over mainaining of -`icalendar`_. Nobody object and since version 2.2 we are back to development. - -.. _`icalendar`: http://pypi.python.org/pypi/icalendar -.. _`plone.app.event`: http://github.com/plone/plone.app.event -.. _`Plone`: http://plone.org -.. _`pytz`: http://pypi.python.org/pypi/pytz -.. _`setuptools`: http://pypi.python.org/pypi/setuptools -.. _`RFC`: http://www.ietf.org/rfc/rfc5545.txt -.. _`BSD`: https://github.com/collective/icalendar/issues/2 - - -Test Coverage Report -==================== - -Output from coverage test:: - - Name Stmts Miss Cover - ---------------------------------------------------------------------------------- - .tox/py27/lib/python2.7/site-packages/icalendar/__init__ 5 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/cal 234 7 97% - .tox/py27/lib/python2.7/site-packages/icalendar/caselessdict 55 5 91% - .tox/py27/lib/python2.7/site-packages/icalendar/compat 1 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/parser 189 6 97% - .tox/py27/lib/python2.7/site-packages/icalendar/parser_tools 20 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/prop 533 62 88% - .tox/py27/lib/python2.7/site-packages/icalendar/tools 16 0 100% - ---------------------------------------------------------------------------------- - TOTAL 1053 80 92% - diff --git a/libs/icalendar-3.6.1/TODO.rst b/libs/icalendar-3.6.1/TODO.rst deleted file mode 100644 index 7769d86..0000000 --- a/libs/icalendar-3.6.1/TODO.rst +++ /dev/null @@ -1,26 +0,0 @@ -TODO -==== - -- Update docs. - -- Add a __add__ method to cal.Component, so that ``cal[key] = val`` works as - expected. Currently, the value is added as is, but not converted to the - correct subcomponent, as specified in prop.TypesFactory. See also the NOTE - in: icalendar.tests.example.rst, Components, line 82. - -- Eventually implement a ``decoded`` method for all icalendar.prop properties, - so that cal.decoded doesn't call the from_ical methods but decode it into - realy python natives. We want from_ical encode a ical string into a - icalendar.prop instance, so decoding into a python native seems not to be - appropriate there. (but the vDDD-types are encoded into python natives, so - there is an inconsistence...) - -OLD TODO's -========== - -- Check and Fix VTIMEZONE component functionality and creating VTIMEZONE - components from tzinfo instances. - -- Automatic encoding and decoding of parameter values. Most of the - work is done already. Just need to get it finished. Look at line 153 - in 'src/icalendar/parser.py' diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/__init__.py b/libs/icalendar-3.6.1/build/lib/icalendar/__init__.py deleted file mode 100644 index 79bbf4d..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/__init__.py +++ /dev/null @@ -1,59 +0,0 @@ -from icalendar.cal import ( - Calendar, - Event, - Todo, - Journal, - Timezone, - TimezoneStandard, - TimezoneDaylight, - FreeBusy, - Alarm, - ComponentFactory, -) -# Property Data Value Types -from icalendar.prop import ( - vBinary, - vBoolean, - vCalAddress, - vDatetime, - vDate, - vDDDTypes, - vDuration, - vFloat, - vInt, - vPeriod, - vWeekday, - vFrequency, - vRecur, - vText, - vTime, - vUri, - vGeo, - vUTCOffset, - TypesFactory, -) -# useful tzinfo subclasses -from icalendar.prop import ( - FixedOffset, - LocalTimezone, -) -# Parameters and helper methods for splitting and joining string with escaped -# chars. -from icalendar.parser import ( - Parameters, - q_split, - q_join, -) - - -__all__ = [ - Calendar, Event, Todo, Journal, - FreeBusy, Alarm, ComponentFactory, - Timezone, TimezoneStandard, TimezoneDaylight, - vBinary, vBoolean, vCalAddress, vDatetime, vDate, - vDDDTypes, vDuration, vFloat, vInt, vPeriod, - vWeekday, vFrequency, vRecur, vText, vTime, vUri, - vGeo, vUTCOffset, TypesFactory, - FixedOffset, LocalTimezone, - Parameters, q_split, q_join, -] diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/cal.py b/libs/icalendar-3.6.1/build/lib/icalendar/cal.py deleted file mode 100644 index 4022e6d..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/cal.py +++ /dev/null @@ -1,493 +0,0 @@ -# -*- coding: utf-8 -*- -"""Calendar is a dictionary like Python object that can render itself as VCAL -files according to rfc2445. - -These are the defined components. -""" -from datetime import datetime -from icalendar.caselessdict import CaselessDict -from icalendar.parser import Contentline -from icalendar.parser import Contentlines -from icalendar.parser import Parameters -from icalendar.parser import q_join -from icalendar.parser import q_split -from icalendar.parser_tools import DEFAULT_ENCODING -from icalendar.parser_tools import data_encode -from icalendar.prop import TypesFactory -from icalendar.prop import vText, vDDDLists - -import pytz - - -###################################### -# The component factory - -class ComponentFactory(CaselessDict): - """All components defined in rfc 2445 are registered in this factory class. - To get a component you can use it like this. - """ - - def __init__(self, *args, **kwargs): - """Set keys to upper for initial dict. - """ - CaselessDict.__init__(self, *args, **kwargs) - self['VEVENT'] = Event - self['VTODO'] = Todo - self['VJOURNAL'] = Journal - self['VFREEBUSY'] = FreeBusy - self['VTIMEZONE'] = Timezone - self['STANDARD'] = TimezoneStandard - self['DAYLIGHT'] = TimezoneDaylight - self['VALARM'] = Alarm - self['VCALENDAR'] = Calendar - - -# These Properties have multiple property values inlined in one propertyline -# seperated by comma. Use CaselessDict as simple caseless set. -INLINE = CaselessDict( - [(cat, 1) for cat in ('CATEGORIES', 'RESOURCES', 'FREEBUSY')] -) - -_marker = [] - - -class Component(CaselessDict): - """Component is the base object for calendar, Event and the other - components defined in RFC 2445. normally you will not use this class - directy, but rather one of the subclasses. - """ - - name = '' # must be defined in each component - required = () # These properties are required - singletons = () # These properties must only appear once - multiple = () # may occur more than once - exclusive = () # These properties are mutually exclusive - inclusive = () # if any occurs the other(s) MUST occur - # ('duration', 'repeat') - ignore_exceptions = False # if True, and we cannot parse this - # component, we will silently ignore - # it, rather than let the exception - # propagate upwards - # not_compliant = [''] # List of non-compliant properties. - - def __init__(self, *args, **kwargs): - """Set keys to upper for initial dict. - """ - CaselessDict.__init__(self, *args, **kwargs) - # set parameters here for properties that use non-default values - self.subcomponents = [] # Components can be nested. - self.is_broken = False # True if we ignored an exception while - # parsing a property - - #def is_compliant(self, name): - # """Returns True is the given property name is compliant with the - # icalendar implementation. - # - # If the parser is too strict it might prevent parsing erroneous but - # otherwise compliant properties. So the parser is pretty lax, but it is - # possible to test for non-complience by calling this method. - # """ - # return name in not_compliant - - ############################# - # handling of property values - - def _encode(self, name, value, parameters=None, encode=1): - """Encode values to icalendar property values. - - :param name: Name of the property. - :type name: string - - :param value: Value of the property. Either of a basic Python type of - any of the icalendar's own property types. - :type value: Python native type or icalendar property type. - - :param parameters: Property parameter dictionary for the value. Only - available, if encode is set to True. - :type parameters: Dictionary - - :param encode: True, if the value should be encoded to one of - icalendar's own property types (Fallback is "vText") - or False, if not. - :type encode: Boolean - - :returns: icalendar property value - """ - if not encode: - return value - if isinstance(value, types_factory.all_types): - # Don't encode already encoded values. - return value - klass = types_factory.for_property(name) - obj = klass(value) - if parameters: - if isinstance(parameters, dict): - params = Parameters() - for key, item in parameters.items(): - params[key] = item - parameters = params - assert isinstance(parameters, Parameters) - obj.params = parameters - return obj - - def add(self, name, value, parameters=None, encode=1): - """Add a property. - - :param name: Name of the property. - :type name: string - - :param value: Value of the property. Either of a basic Python type of - any of the icalendar's own property types. - :type value: Python native type or icalendar property type. - - :param parameters: Property parameter dictionary for the value. Only - available, if encode is set to True. - :type parameters: Dictionary - - :param encode: True, if the value should be encoded to one of - icalendar's own property types (Fallback is "vText") - or False, if not. - :type encode: Boolean - - :returns: None - """ - if isinstance(value, datetime) and\ - name.lower() in ('dtstamp', 'created', 'last-modified'): - # RFC expects UTC for those... force value conversion. - if getattr(value, 'tzinfo', False) and value.tzinfo is not None: - value = value.astimezone(pytz.utc) - else: - # assume UTC for naive datetime instances - value = pytz.utc.localize(value) - - # encode value - if encode and isinstance(value, list) \ - and name.lower() not in ['rdate', 'exdate']: - # Individually convert each value to an ical type except rdate and - # exdate, where lists of dates might be passed to vDDDLists. - value = [self._encode(name, v, parameters, encode) for v in value] - else: - value = self._encode(name, value, parameters, encode) - - # set value - if name in self: - # If property already exists, append it. - #if name == 'attendee': import pdb; pdb.set_trace() - oldval = self[name] - if isinstance(oldval, list): - if isinstance(value, list): - value = oldval + value - else: - oldval.append(value) - value = oldval - else: - value = [oldval, value] - self[name] = value - - def _decode(self, name, value): - """Internal for decoding property values. - """ - - # TODO: Currently the decoded method calls the icalendar.prop instances - # from_ical. We probably want to decode properties into Python native - # types here. But when parsing from an ical string with from_ical, we - # want to encode the string into a real icalendar.prop property. - if isinstance(value, vDDDLists): - # TODO: Workaround unfinished decoding - return value - decoded = types_factory.from_ical(name, value) - # TODO: remove when proper decoded is implemented in every prop.* class - # Workaround to decode vText properly - if isinstance(decoded, vText): - decoded = decoded.encode(DEFAULT_ENCODING) - return decoded - - def decoded(self, name, default=_marker): - """Returns decoded value of property. - """ - # XXX: fail. what's this function supposed to do in the end? - # -rnix - - if name in self: - value = self[name] - if isinstance(value, list): - return [self._decode(name, v) for v in value] - return self._decode(name, value) - else: - if default is _marker: - raise KeyError(name) - else: - return default - - ######################################################################## - # Inline values. A few properties have multiple values inlined in in one - # property line. These methods are used for splitting and joining these. - - def get_inline(self, name, decode=1): - """Returns a list of values (split on comma). - """ - vals = [v.strip('" ') for v in q_split(self[name])] - if decode: - return [self._decode(name, val) for val in vals] - return vals - - def set_inline(self, name, values, encode=1): - """Converts a list of values into comma seperated string and sets value - to that. - """ - if encode: - values = [self._encode(name, value, encode=1) for value in values] - self[name] = types_factory['inline'](q_join(values)) - - ######################### - # Handling of components - - def add_component(self, component): - """Add a subcomponent to this component. - """ - self.subcomponents.append(component) - - def _walk(self, name): - """Walk to given component. - """ - result = [] - if name is None or self.name == name: - result.append(self) - for subcomponent in self.subcomponents: - result += subcomponent._walk(name) - return result - - def walk(self, name=None): - """Recursively traverses component and subcomponents. Returns sequence - of same. If name is passed, only components with name will be returned. - """ - if not name is None: - name = name.upper() - return self._walk(name) - - ##################### - # Generation - - def property_items(self, recursive=True): - """Returns properties in this component and subcomponents as: - [(name, value), ...] - """ - vText = types_factory['text'] - properties = [('BEGIN', vText(self.name).to_ical())] - property_names = self.sorted_keys() - for name in property_names: - values = self[name] - if isinstance(values, list): - # normally one property is one line - for value in values: - properties.append((name, value)) - else: - properties.append((name, values)) - if recursive: - # recursion is fun! - for subcomponent in self.subcomponents: - properties += subcomponent.property_items() - properties.append(('END', vText(self.name).to_ical())) - return properties - - @classmethod - def from_ical(cls, st, multiple=False): - """Populates the component recursively from a string. - """ - stack = [] # a stack of components - comps = [] - for line in Contentlines.from_ical(st): # raw parsing - if not line: - continue - name, params, vals = line.parts() - uname = name.upper() - # check for start of component - if uname == 'BEGIN': - # try and create one of the components defined in the spec, - # otherwise get a general Components for robustness. - c_name = vals.upper() - c_class = component_factory.get(c_name, cls) - component = c_class() - if not getattr(component, 'name', ''): # undefined components - component.name = c_name - stack.append(component) - # check for end of event - elif uname == 'END': - # we are done adding properties to this component - # so pop it from the stack and add it to the new top. - component = stack.pop() - if not stack: # we are at the end - comps.append(component) - else: - if not component.is_broken: - stack[-1].add_component(component) - # we are adding properties to the current top of the stack - else: - factory = types_factory.for_property(name) - component = stack[-1] - datetime_names = ('DTSTART', 'DTEND', 'RECURRENCE-ID', 'DUE', - 'FREEBUSY', 'RDATE', 'EXDATE') - try: - if name in datetime_names and 'TZID' in params: - vals = factory(factory.from_ical(vals, params['TZID'])) - else: - vals = factory(factory.from_ical(vals)) - except ValueError: - if not component.ignore_exceptions: - raise - component.is_broken = True - else: - vals.params = params - component.add(name, vals, encode=0) - - if multiple: - return comps - if len(comps) > 1: - raise ValueError('Found multiple components where ' - 'only one is allowed: {st!r}'.format(**locals())) - if len(comps) < 1: - raise ValueError('Found no components where ' - 'exactly one is required: ' - '{st!r}'.format(**locals())) - return comps[0] - - def __repr__(self): - return '%s(%s)' % (self.name, data_encode(self)) - - def content_line(self, name, value): - """Returns property as content line. - """ - params = getattr(value, 'params', Parameters()) - return Contentline.from_parts(name, params, value) - - def content_lines(self): - """Converts the Component and subcomponents into content lines. - """ - contentlines = Contentlines() - for name, value in self.property_items(): - cl = self.content_line(name, value) - contentlines.append(cl) - contentlines.append('') # remember the empty string in the end - return contentlines - - def to_ical(self): - content_lines = self.content_lines() - return content_lines.to_ical() - - -####################################### -# components defined in RFC 2445 - -class Event(Component): - - name = 'VEVENT' - - canonical_order = ( - 'SUMMARY', 'DTSTART', 'DTEND', 'DURATION', 'DTSTAMP', - 'UID', 'RECURRENCE-ID', 'SEQUENCE', - 'RRULE' 'EXRULE', 'RDATE', 'EXDATE', - ) - - required = ('UID',) - singletons = ( - 'CLASS', 'CREATED', 'DESCRIPTION', 'DTSTART', 'GEO', 'LAST-MODIFIED', - 'LOCATION', 'ORGANIZER', 'PRIORITY', 'DTSTAMP', 'SEQUENCE', 'STATUS', - 'SUMMARY', 'TRANSP', 'URL', 'RECURRENCE-ID', 'DTEND', 'DURATION', - 'DTSTART', - ) - exclusive = ('DTEND', 'DURATION', ) - multiple = ( - 'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE', - 'EXRULE', 'RSTATUS', 'RELATED', 'RESOURCES', 'RDATE', 'RRULE' - ) - ignore_exceptions = True - - -class Todo(Component): - - name = 'VTODO' - - required = ('UID',) - singletons = ( - 'CLASS', 'COMPLETED', 'CREATED', 'DESCRIPTION', 'DTSTAMP', 'DTSTART', - 'GEO', 'LAST-MODIFIED', 'LOCATION', 'ORGANIZER', 'PERCENT', 'PRIORITY', - 'RECURRENCE-ID', 'SEQUENCE', 'STATUS', 'SUMMARY', 'UID', 'URL', 'DUE', - 'DURATION', - ) - exclusive = ('DUE', 'DURATION',) - multiple = ( - 'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE', - 'EXRULE', 'RSTATUS', 'RELATED', 'RESOURCES', 'RDATE', 'RRULE' - ) - - -class Journal(Component): - - name = 'VJOURNAL' - - required = ('UID',) - singletons = ( - 'CLASS', 'CREATED', 'DESCRIPTION', 'DTSTART', 'DTSTAMP', - 'LAST-MODIFIED', 'ORGANIZER', 'RECURRENCE-ID', 'SEQUENCE', 'STATUS', - 'SUMMARY', 'UID', 'URL', - ) - multiple = ( - 'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE', - 'EXRULE', 'RELATED', 'RDATE', 'RRULE', 'RSTATUS', - ) - - -class FreeBusy(Component): - - name = 'VFREEBUSY' - - required = ('UID',) - singletons = ( - 'CONTACT', 'DTSTART', 'DTEND', 'DURATION', 'DTSTAMP', 'ORGANIZER', - 'UID', 'URL', - ) - multiple = ('ATTENDEE', 'COMMENT', 'FREEBUSY', 'RSTATUS',) - - -class Timezone(Component): - name = 'VTIMEZONE' - canonical_order = ('TZID', 'STANDARD', 'DAYLIGHT',) - required = ('TZID', 'STANDARD', 'DAYLIGHT',) - singletons = ('TZID', 'LAST-MODIFIED', 'TZURL',) - - -class TimezoneStandard(Component): - name = 'STANDARD' - required = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM') - singletons = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM', 'RRULE') - multiple = ('COMMENT', 'RDATE', 'TZNAME') - - -class TimezoneDaylight(Component): - name = 'DAYLIGHT' - required = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM') - singletons = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM', 'RRULE') - multiple = ('COMMENT', 'RDATE', 'TZNAME') - - -class Alarm(Component): - - name = 'VALARM' - # not quite sure about these ... - required = ('ACTION', 'TRIGGER',) - singletons = ('ATTACH', 'ACTION', 'TRIGGER', 'DURATION', 'REPEAT',) - inclusive = (('DURATION', 'REPEAT',),) - - -class Calendar(Component): - """This is the base object for an iCalendar file. - """ - name = 'VCALENDAR' - canonical_order = ('VERSION', 'PRODID', 'CALSCALE', 'METHOD',) - required = ('prodid', 'version', ) - singletons = ('prodid', 'version', ) - multiple = ('calscale', 'method', ) - -# These are read only singleton, so one instance is enough for the module -types_factory = TypesFactory() -component_factory = ComponentFactory() diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/caselessdict.py b/libs/icalendar-3.6.1/build/lib/icalendar/caselessdict.py deleted file mode 100644 index 08ec8a4..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/caselessdict.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.parser_tools import to_unicode -from icalendar.parser_tools import data_encode - - -def canonsort_keys(keys, canonical_order=None): - """Sorts leading keys according to canonical_order. Keys not specified in - canonical_order will appear alphabetically at the end. - """ - canonical_map = dict((k, i) for i, k in enumerate(canonical_order or [])) - head = [k for k in keys if k in canonical_map] - tail = [k for k in keys if k not in canonical_map] - return sorted(head, key=lambda k: canonical_map[k]) + sorted(tail) - - -def canonsort_items(dict1, canonical_order=None): - """Returns a list of items from dict1, sorted by canonical_order. - """ - return [(k, dict1[k]) for \ - k in canonsort_keys(dict1.keys(), canonical_order)] - - -class CaselessDict(dict): - """A dictionary that isn't case sensitive, and only uses strings as keys. - Values retain their case. - """ - - def __init__(self, *args, **kwargs): - """Set keys to upper for initial dict. - """ - dict.__init__(self, *args, **kwargs) - for key, value in self.items(): - key_upper = to_unicode(key).upper() - if key != key_upper: - dict.__delitem__(self, key) - self[key_upper] = value - - def __getitem__(self, key): - key = to_unicode(key) - return dict.__getitem__(self, key.upper()) - - def __setitem__(self, key, value): - key = to_unicode(key) - dict.__setitem__(self, key.upper(), value) - - def __delitem__(self, key): - key = to_unicode(key) - dict.__delitem__(self, key.upper()) - - def __contains__(self, key): - key = to_unicode(key) - return dict.__contains__(self, key.upper()) - - def get(self, key, default=None): - key = to_unicode(key) - return dict.get(self, key.upper(), default) - - def setdefault(self, key, value=None): - key = to_unicode(key) - return dict.setdefault(self, key.upper(), value) - - def pop(self, key, default=None): - key = to_unicode(key) - return dict.pop(self, key.upper(), default) - - def popitem(self): - return dict.popitem(self) - - def has_key(self, key): - key = to_unicode(key) - return dict.__contains__(self, key.upper()) - - def update(self, indict): - # Multiple keys where key1.upper() == key2.upper() will be lost. - for key, value in indict.items(): # TODO optimize in python 2 - self[key] = value - - def copy(self): - return CaselessDict(dict.copy(self)) - - def __repr__(self): - return 'CaselessDict(%s)' % data_encode(self) - - # A list of keys that must appear first in sorted_keys and sorted_items; - # must be uppercase. - canonical_order = None - - def sorted_keys(self): - """Sorts keys according to the canonical_order for the derived class. - Keys not specified in canonical_order will appear at the end. - """ - return canonsort_keys(self.keys(), self.canonical_order) - - def sorted_items(self): - """Sorts items according to the canonical_order for the derived class. - Items not specified in canonical_order will appear at the end. - """ - return canonsort_items(self, self.canonical_order) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/compat.py b/libs/icalendar-3.6.1/build/lib/icalendar/compat.py deleted file mode 100644 index 4704a14..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/compat.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys - - -if sys.version_info[0] == 2: # pragma: no cover - unicode_type = unicode - bytes_type = str - iteritems = lambda d, *args, **kwargs: iter(d.iteritems(*args, **kwargs)) -else: # pragma: no cover - unicode_type = str - bytes_type = bytes - iteritems = lambda d, *args, **kwargs: iter(d.items(*args, **kwargs)) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/parser.py b/libs/icalendar-3.6.1/build/lib/icalendar/parser.py deleted file mode 100644 index 7f57db2..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/parser.py +++ /dev/null @@ -1,371 +0,0 @@ -# -*- coding: utf-8 -*- -"""This module parses and generates contentlines as defined in RFC 2445 -(iCalendar), but will probably work for other MIME types with similar syntax. -Eg. RFC 2426 (vCard) - -It is stupid in the sense that it treats the content purely as strings. No type -conversion is attempted. -""" -from icalendar import compat -from icalendar.caselessdict import CaselessDict -from icalendar.parser_tools import DEFAULT_ENCODING -from icalendar.parser_tools import SEQUENCE_TYPES -from icalendar.parser_tools import data_encode -from icalendar.parser_tools import to_unicode - -import re - - -def escape_char(text): - """Format value according to iCalendar TEXT escaping rules. - """ - assert isinstance(text, (compat.unicode_type, compat.bytes_type)) - # NOTE: ORDER MATTERS! - return text.replace(r'\N', '\n')\ - .replace('\\', '\\\\')\ - .replace(';', r'\;')\ - .replace(',', r'\,')\ - .replace('\r\n', r'\n')\ - .replace('\n', r'\n') - - -def unescape_char(text): - assert isinstance(text, (compat.unicode_type, compat.bytes_type)) - # NOTE: ORDER MATTERS! - if isinstance(text, compat.unicode_type): - return text.replace(u'\\N', u'\\n')\ - .replace(u'\r\n', u'\n')\ - .replace(u'\\n', u'\n')\ - .replace(u'\\,', u',')\ - .replace(u'\\;', u';')\ - .replace(u'\\\\', u'\\') - elif isinstance(text, compat.bytes_type): - return text.replace(b'\N', b'\n')\ - .replace(b'\r\n', b'\n')\ - .replace(b'\n', b'\n')\ - .replace(b'\,', b',')\ - .replace(b'\;', b';')\ - .replace(b'\\\\', b'\\') - - -def tzid_from_dt(dt): - tzid = None - if hasattr(dt.tzinfo, 'zone'): - tzid = dt.tzinfo.zone # pytz implementation - elif hasattr(dt.tzinfo, 'tzname'): - try: - tzid = dt.tzinfo.tzname(dt) # dateutil implementation - except AttributeError: - # No tzid available - pass - return tzid - - -def foldline(line, limit=75, fold_sep=u'\r\n '): - """Make a string folded as defined in RFC5545 - Lines of text SHOULD NOT be longer than 75 octets, excluding the line - break. Long content lines SHOULD be split into a multiple line - representations using a line "folding" technique. That is, a long - line can be split between any two characters by inserting a CRLF - immediately followed by a single linear white-space character (i.e., - SPACE or HTAB). - """ - assert isinstance(line, compat.unicode_type) - assert u'\n' not in line - - ret_line = u'' - byte_count = 0 - for char in line: - char_byte_len = len(char.encode(DEFAULT_ENCODING)) - byte_count += char_byte_len - if byte_count >= limit: - ret_line += fold_sep - byte_count = char_byte_len - ret_line += char - - return ret_line - - -################################################################# -# Property parameter stuff - -def param_value(value): - """Returns a parameter value. - """ - if isinstance(value, SEQUENCE_TYPES): - return q_join(value) - return dquote(value) - - -# Could be improved -NAME = re.compile('[\w-]+') -UNSAFE_CHAR = re.compile('[\x00-\x08\x0a-\x1f\x7F",:;]') -QUNSAFE_CHAR = re.compile('[\x00-\x08\x0a-\x1f\x7F"]') -FOLD = re.compile(b'(\r?\n)+[ \t]') -uFOLD = re.compile(u'(\r?\n)+[ \t]') -NEWLINE = re.compile(r'\r?\n') - - -def validate_token(name): - match = NAME.findall(name) - if len(match) == 1 and name == match[0]: - return - raise ValueError(name) - - -def validate_param_value(value, quoted=True): - validator = QUNSAFE_CHAR if quoted else UNSAFE_CHAR - if validator.findall(value): - raise ValueError(value) - - -# chars presence of which in parameter value will be cause the value -# to be enclosed in double-quotes -QUOTABLE = re.compile("[,;: ’']") - - -def dquote(val): - """Enclose parameter values containing [,;:] in double quotes. - """ - # a double-quote character is forbidden to appear in a parameter value - # so replace it with a single-quote character - val = val.replace('"', "'") - if QUOTABLE.search(val): - return '"%s"' % val - return val - - -# parsing helper -def q_split(st, sep=','): - """Splits a string on char, taking double (q)uotes into considderation. - """ - result = [] - cursor = 0 - length = len(st) - inquote = 0 - for i in range(length): - ch = st[i] - if ch == '"': - inquote = not inquote - if not inquote and ch == sep: - result.append(st[cursor:i]) - cursor = i + 1 - if i + 1 == length: - result.append(st[cursor:]) - return result - - -def q_join(lst, sep=','): - """Joins a list on sep, quoting strings with QUOTABLE chars. - """ - return sep.join(dquote(itm) for itm in lst) - - -class Parameters(CaselessDict): - """Parser and generator of Property parameter strings. It knows nothing of - datatypes. Its main concern is textual structure. - """ - - def params(self): - """In rfc2445 keys are called parameters, so this is to be consitent - with the naming conventions. - """ - return self.keys() - -# TODO? -# Later, when I get more time... need to finish this off now. The last major -# thing missing. -# def _encode(self, name, value, cond=1): -# # internal, for conditional convertion of values. -# if cond: -# klass = types_factory.for_property(name) -# return klass(value) -# return value -# -# def add(self, name, value, encode=0): -# "Add a parameter value and optionally encode it." -# if encode: -# value = self._encode(name, value, encode) -# self[name] = value -# -# def decoded(self, name): -# "returns a decoded value, or list of same" - - def __repr__(self): - return 'Parameters(%s)' % data_encode(self) - - def to_ical(self): - result = [] - items = self.items() - for key, value in sorted(items): - value = param_value(value) - if isinstance(value, compat.unicode_type): - value = value.encode(DEFAULT_ENCODING) - # CaselessDict keys are always unicode - key = key.upper().encode(DEFAULT_ENCODING) - result.append(key + b'=' + value) - return b';'.join(result) - - @classmethod - def from_ical(cls, st, strict=False): - """Parses the parameter format from ical text format.""" - - # parse into strings - result = cls() - for param in q_split(st, ';'): - try: - key, val = q_split(param, '=') - validate_token(key) - # Property parameter values that are not in quoted - # strings are case insensitive. - vals = [] - for v in q_split(val, ','): - if v.startswith('"') and v.endswith('"'): - v = v.strip('"') - validate_param_value(v, quoted=True) - vals.append(v) - else: - validate_param_value(v, quoted=False) - if strict: - vals.append(v.upper()) - else: - vals.append(v) - if not vals: - result[key] = val - else: - if len(vals) == 1: - result[key] = vals[0] - else: - result[key] = vals - except ValueError as exc: - raise ValueError('%r is not a valid parameter string: %s' - % (param, exc)) - return result - - -def escape_string(val): - # '%{:02X}'.format(i) - return val.replace(r'\,', '%2C').replace(r'\:', '%3A')\ - .replace(r'\;', '%3B').replace(r'\\', '%5C') - - -def unsescape_string(val): - return val.replace('%2C', ',').replace('%3A', ':')\ - .replace('%3B', ';').replace('%5C', '\\') - - -######################################### -# parsing and generation of content lines - -class Contentline(compat.unicode_type): - """A content line is basically a string that can be folded and parsed into - parts. - """ - def __new__(cls, value, strict=False, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - assert u'\n' not in value, ('Content line can not contain unescaped ' - 'new line characters.') - self = super(Contentline, cls).__new__(cls, value) - self.strict = strict - return self - - @classmethod - def from_parts(cls, name, params, values): - """Turn a parts into a content line. - """ - assert isinstance(params, Parameters) - if hasattr(values, 'to_ical'): - values = values.to_ical() - else: - values = vText(values).to_ical() - # elif isinstance(values, basestring): - # values = escape_char(values) - - # TODO: after unicode only, remove this - # Convert back to unicode, after to_ical encoded it. - name = to_unicode(name) - values = to_unicode(values) - if params: - params = to_unicode(params.to_ical()) - return cls(u'%s;%s:%s' % (name, params, values)) - return cls(u'%s:%s' % (name, values)) - - def parts(self): - """Split the content line up into (name, parameters, values) parts. - """ - try: - st = escape_string(self) - name_split = None - value_split = None - in_quotes = False - for i, ch in enumerate(st): - if not in_quotes: - if ch in ':;' and not name_split: - name_split = i - if ch == ':' and not value_split: - value_split = i - if ch == '"': - in_quotes = not in_quotes - name = unsescape_string(st[:name_split]) - if not name: - raise ValueError('Key name is required') - validate_token(name) - if not name_split or name_split + 1 == value_split: - raise ValueError('Invalid content line') - params = Parameters.from_ical(st[name_split + 1: value_split], - strict=self.strict) - params = Parameters( - (unsescape_string(key), unsescape_string(value)) - for key, value in compat.iteritems(params) - ) - values = unsescape_string(st[value_split + 1:]) - return (name, params, values) - except ValueError as exc: - raise ValueError( - u"Content line could not be parsed into parts: %r: %s" - % (self, exc) - ) - - @classmethod - def from_ical(cls, ical, strict=False): - """Unfold the content lines in an iCalendar into long content lines. - """ - ical = to_unicode(ical) - # a fold is carriage return followed by either a space or a tab - return cls(uFOLD.sub('', ical), strict=strict) - - def to_ical(self): - """Long content lines are folded so they are less than 75 characters - wide. - """ - return foldline(self).encode(DEFAULT_ENCODING) - - -class Contentlines(list): - """I assume that iCalendar files generally are a few kilobytes in size. - Then this should be efficient. for Huge files, an iterator should probably - be used instead. - """ - def to_ical(self): - """Simply join self. - """ - return b'\r\n'.join(line.to_ical() for line in self if line) + b'\r\n' - - @classmethod - def from_ical(cls, st): - """Parses a string into content lines. - """ - st = to_unicode(st) - try: - # a fold is carriage return followed by either a space or a tab - unfolded = uFOLD.sub('', st) - lines = cls(Contentline(line) for - line in unfolded.splitlines() if line) - lines.append('') # '\r\n' at the end of every content line - return lines - except: - raise ValueError('Expected StringType with content lines') - - -# XXX: what kind of hack is this? import depends to be at end -from icalendar.prop import vText diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/parser_tools.py b/libs/icalendar-3.6.1/build/lib/icalendar/parser_tools.py deleted file mode 100644 index 1e0dade..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/parser_tools.py +++ /dev/null @@ -1,33 +0,0 @@ -from icalendar import compat - - -SEQUENCE_TYPES = (list, tuple) -DEFAULT_ENCODING = 'utf-8' - - -def to_unicode(value, encoding='utf-8'): - """Converts a value to unicode, even if it is already a unicode string. - """ - if isinstance(value, compat.unicode_type): - return value - elif isinstance(value, compat.bytes_type): - try: - value = value.decode(encoding) - except UnicodeDecodeError: - value = value.decode('utf-8', 'replace') - return value - - -def data_encode(data, encoding=DEFAULT_ENCODING): - """Encode all datastructures to the given encoding. - Currently unicode strings, dicts and lists are supported. - """ - # http://stackoverflow.com/questions/1254454/fastest-way-to-convert-a-dicts-keys-values-from-unicode-to-str - if isinstance(data, compat.unicode_type): - return data.encode(encoding) - elif isinstance(data, dict): - return dict(map(data_encode, compat.iteritems(data))) - elif isinstance(data, list) or isinstance(data, tuple): - return list(map(data_encode, data)) - else: - return data diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/prop.py b/libs/icalendar-3.6.1/build/lib/icalendar/prop.py deleted file mode 100644 index 82c9df8..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/prop.py +++ /dev/null @@ -1,993 +0,0 @@ -# -*- coding: utf-8 -*- -"""This module contains the parser/generators (or coders/encoders if you -prefer) for the classes/datatypes that are used in iCalendar: - -########################################################################### -# This module defines these property value data types and property parameters - -4.2 Defined property parameters are: - - ALTREP, CN, CUTYPE, DELEGATED-FROM, DELEGATED-TO, DIR, ENCODING, FMTTYPE, - FBTYPE, LANGUAGE, MEMBER, PARTSTAT, RANGE, RELATED, RELTYPE, ROLE, RSVP, - SENT-BY, TZID, VALUE - -4.3 Defined value data types are: - - BINARY, BOOLEAN, CAL-ADDRESS, DATE, DATE-TIME, DURATION, FLOAT, INTEGER, - PERIOD, RECUR, TEXT, TIME, URI, UTC-OFFSET - -########################################################################### - -iCalendar properties has values. The values are strongly typed. This module -defines these types, calling val.to_ical() on them, Will render them as defined -in rfc2445. - -If you pass any of these classes a Python primitive, you will have an object -that can render itself as iCalendar formatted date. - -Property Value Data Types starts with a 'v'. they all have an to_ical() and -from_ical() method. The to_ical() method generates a text string in the -iCalendar format. The from_ical() method can parse this format and return a -primitive Python datatype. So it should allways be true that: - - x == vDataType.from_ical(VDataType(x).to_ical()) - -These types are mainly used for parsing and file generation. But you can set -them directly. -""" -from datetime import date -from datetime import datetime -from datetime import time -from datetime import timedelta -from datetime import tzinfo -from dateutil.tz import tzutc -from icalendar import compat -from icalendar.caselessdict import CaselessDict -from icalendar.parser import Parameters -from icalendar.parser import escape_char -from icalendar.parser import tzid_from_dt -from icalendar.parser import unescape_char -from icalendar.parser_tools import DEFAULT_ENCODING -from icalendar.parser_tools import SEQUENCE_TYPES -from icalendar.parser_tools import to_unicode - -import base64 -import binascii -import pytz -import re -import time as _time - - -DATE_PART = r'(\d+)D' -TIME_PART = r'T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?' -DATETIME_PART = '(?:%s)?(?:%s)?' % (DATE_PART, TIME_PART) -WEEKS_PART = r'(\d+)W' -DURATION_REGEX = re.compile(r'([-+]?)P(?:%s|%s)$' - % (WEEKS_PART, DATETIME_PART)) -WEEKDAY_RULE = re.compile('(?P[+-]?)(?P[\d]?)' - '(?P[\w]{2})$') - - -#################################################### -# handy tzinfo classes you can use. -# - -ZERO = timedelta(0) -HOUR = timedelta(hours=1) -STDOFFSET = timedelta(seconds=-_time.timezone) -if _time.daylight: - DSTOFFSET = timedelta(seconds=-_time.altzone) -else: - DSTOFFSET = STDOFFSET -DSTDIFF = DSTOFFSET - STDOFFSET - - -class FixedOffset(tzinfo): - """Fixed offset in minutes east from UTC. - """ - def __init__(self, offset, name): - self.__offset = timedelta(minutes=offset) - self.__name = name - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return self.__name - - def dst(self, dt): - return ZERO - - -class LocalTimezone(tzinfo): - """Timezone of the machine where the code is running. - """ - def utcoffset(self, dt): - if self._isdst(dt): - return DSTOFFSET - else: - return STDOFFSET - - def dst(self, dt): - if self._isdst(dt): - return DSTDIFF - else: - return ZERO - - def tzname(self, dt): - return _time.tzname[self._isdst(dt)] - - def _isdst(self, dt): - tt = (dt.year, dt.month, dt.day, - dt.hour, dt.minute, dt.second, - dt.weekday(), 0, -1) - stamp = _time.mktime(tt) - tt = _time.localtime(stamp) - return tt.tm_isdst > 0 - - -class vBinary(object): - """Binary property values are base 64 encoded. - """ - - def __init__(self, obj): - self.obj = to_unicode(obj) - self.params = Parameters(encoding='BASE64', value="BINARY") - - def __repr__(self): - return "vBinary('%s')" % self.to_ical() - - def to_ical(self): - return binascii.b2a_base64(self.obj.encode('utf-8'))[:-1] - - @staticmethod - def from_ical(ical): - try: - return base64.b64decode(ical) - except UnicodeError: - raise ValueError('Not valid base 64 encoding.') - - -class vBoolean(int): - """Returns specific string according to state. - """ - BOOL_MAP = CaselessDict(true=True, false=False) - - def __new__(cls, *args, **kwargs): - self = super(vBoolean, cls).__new__(cls, *args, **kwargs) - self.params = Parameters() - return self - - def to_ical(self): - if self: - return b'TRUE' - return b'FALSE' - - @classmethod - def from_ical(cls, ical): - try: - return cls.BOOL_MAP[ical] - except: - raise ValueError("Expected 'TRUE' or 'FALSE'. Got %s" % ical) - - -class vCalAddress(compat.unicode_type): - """This just returns an unquoted string. - """ - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vCalAddress, cls).__new__(cls, value) - self.params = Parameters() - return self - - def __repr__(self): - return "vCalAddress('%s')" % self.to_ical() - - def to_ical(self): - return self.encode(DEFAULT_ENCODING) - - @classmethod - def from_ical(cls, ical): - return cls(ical) - - -class vFloat(float): - """Just a float. - """ - def __new__(cls, *args, **kwargs): - self = super(vFloat, cls).__new__(cls, *args, **kwargs) - self.params = Parameters() - return self - - def to_ical(self): - return compat.unicode_type(self).encode('utf-8') - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical) - except: - raise ValueError('Expected float value, got: %s' % ical) - - -class vInt(int): - """Just an int. - """ - def __new__(cls, *args, **kwargs): - self = super(vInt, cls).__new__(cls, *args, **kwargs) - self.params = Parameters() - return self - - def to_ical(self): - return compat.unicode_type(self).encode('utf-8') - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical) - except: - raise ValueError('Expected int, got: %s' % ical) - - -class vDDDLists(object): - """A list of vDDDTypes values. - """ - def __init__(self, dt_list): - if not hasattr(dt_list, '__iter__'): - dt_list = [dt_list] - vDDD = [] - tzid = None - for dt in dt_list: - dt = vDDDTypes(dt) - vDDD.append(dt) - if 'TZID' in dt.params: - tzid = dt.params['TZID'] - - if tzid: - # NOTE: no support for multiple timezones here! - self.params = Parameters({'TZID': tzid}) - self.dts = vDDD - - def to_ical(self): - dts_ical = (dt.to_ical() for dt in self.dts) - return b",".join(dts_ical) - - @staticmethod - def from_ical(ical, timezone=None): - out = [] - ical_dates = ical.split(",") - for ical_dt in ical_dates: - out.append(vDDDTypes.from_ical(ical_dt, timezone=timezone)) - return out - - -class vDDDTypes(object): - """A combined Datetime, Date or Duration parser/generator. Their format - cannot be confused, and often values can be of either types. - So this is practical. - """ - def __init__(self, dt): - if not isinstance(dt, (datetime, date, timedelta, time)): - raise ValueError('You must use datetime, date, timedelta or time') - if isinstance(dt, datetime): - self.params = Parameters(dict(value='DATE-TIME')) - elif isinstance(dt, date): - self.params = Parameters(dict(value='DATE')) - elif isinstance(dt, time): - self.params = Parameters(dict(value='TIME')) - - if (isinstance(dt, datetime) or isinstance(dt, time))\ - and getattr(dt, 'tzinfo', False): - tzinfo = dt.tzinfo - if tzinfo is not pytz.utc and not isinstance(tzinfo, tzutc): - # set the timezone as a parameter to the property - tzid = tzid_from_dt(dt) - if tzid: - self.params.update({'TZID': tzid}) - self.dt = dt - - def to_ical(self): - dt = self.dt - if isinstance(dt, datetime): - return vDatetime(dt).to_ical() - elif isinstance(dt, date): - return vDate(dt).to_ical() - elif isinstance(dt, timedelta): - return vDuration(dt).to_ical() - elif isinstance(dt, time): - return vTime(dt).to_ical() - else: - raise ValueError('Unknown date type') - - @classmethod - def from_ical(cls, ical, timezone=None): - if isinstance(ical, cls): - return ical.dt - u = ical.upper() - if u.startswith('-P') or u.startswith('P'): - return vDuration.from_ical(ical) - try: - return vDatetime.from_ical(ical, timezone=timezone) - except ValueError: - try: - return vDate.from_ical(ical) - except ValueError: - return vTime.from_ical(ical) - - -class vDate(object): - """Render and generates iCalendar date format. - """ - def __init__(self, dt): - if not isinstance(dt, date): - raise ValueError('Value MUST be a date instance') - self.dt = dt - self.params = Parameters(dict(value='DATE')) - - def to_ical(self): - s = "%04d%02d%02d" % (self.dt.year, self.dt.month, self.dt.day) - return s.encode('utf-8') - - @staticmethod - def from_ical(ical): - try: - timetuple = ( - int(ical[:4]), # year - int(ical[4:6]), # month - int(ical[6:8]), # day - ) - return date(*timetuple) - except: - raise ValueError('Wrong date format %s' % ical) - - -class vDatetime(object): - """Render and generates icalendar datetime format. - - vDatetime is timezone aware and uses the pytz library, an implementation of - the Olson database in Python. When a vDatetime object is created from an - ical string, you can pass a valid pytz timezone identifier. When a - vDatetime object is created from a python datetime object, it uses the - tzinfo component, if present. Otherwise an timezone-naive object is - created. Be aware that there are certain limitations with timezone naive - DATE-TIME components in the icalendar standard. - """ - def __init__(self, dt): - self.dt = dt - self.params = Parameters() - - def to_ical(self): - dt = self.dt - tzid = tzid_from_dt(dt) - - s = "%04d%02d%02dT%02d%02d%02d" % ( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second - ) - if tzid == 'UTC': - s += "Z" - elif tzid: - self.params.update({'TZID': tzid}) - return s.encode('utf-8') - - @staticmethod - def from_ical(ical, timezone=None): - tzinfo = None - if timezone: - try: - tzinfo = pytz.timezone(timezone) - except pytz.UnknownTimeZoneError: - pass - - try: - timetuple = ( - int(ical[:4]), # year - int(ical[4:6]), # month - int(ical[6:8]), # day - int(ical[9:11]), # hour - int(ical[11:13]), # minute - int(ical[13:15]), # second - ) - if tzinfo: - return tzinfo.localize(datetime(*timetuple)) - elif not ical[15:]: - return datetime(*timetuple) - elif ical[15:16] == 'Z': - return datetime(tzinfo=pytz.utc, *timetuple) - else: - raise ValueError(ical) - except: - raise ValueError('Wrong datetime format: %s' % ical) - - -class vDuration(object): - """Subclass of timedelta that renders itself in the iCalendar DURATION - format. - """ - - def __init__(self, td): - if not isinstance(td, timedelta): - raise ValueError('Value MUST be a timedelta instance') - self.td = td - self.params = Parameters() - - def to_ical(self): - sign = "" - if self.td.days < 0: - sign = "-" - self.td = -self.td - timepart = "" - if self.td.seconds: - timepart = "T" - hours = self.td.seconds // 3600 - minutes = self.td.seconds % 3600 // 60 - seconds = self.td.seconds % 60 - if hours: - timepart += "%dH" % hours - if minutes or (hours and seconds): - timepart += "%dM" % minutes - if seconds: - timepart += "%dS" % seconds - if self.td.days == 0 and timepart: - return (compat.unicode_type(sign).encode('utf-8') + b'P' + - compat.unicode_type(timepart).encode('utf-8')) - else: - return (compat.unicode_type(sign).encode('utf-8') + b'P' + - compat.unicode_type(abs(self.td.days)).encode('utf-8') + - b'D' + compat.unicode_type(timepart).encode('utf-8')) - - @staticmethod - def from_ical(ical): - try: - match = DURATION_REGEX.match(ical) - sign, weeks, days, hours, minutes, seconds = match.groups() - if weeks: - value = timedelta(weeks=int(weeks)) - else: - value = timedelta(days=int(days or 0), - hours=int(hours or 0), - minutes=int(minutes or 0), - seconds=int(seconds or 0)) - if sign == '-': - value = -value - return value - except: - raise ValueError('Invalid iCalendar duration: %s' % ical) - - -class vPeriod(object): - """A precise period of time. - """ - def __init__(self, per): - start, end_or_duration = per - if not (isinstance(start, datetime) or isinstance(start, date)): - raise ValueError('Start value MUST be a datetime or date instance') - if not (isinstance(end_or_duration, datetime) or - isinstance(end_or_duration, date) or - isinstance(end_or_duration, timedelta)): - raise ValueError('end_or_duration MUST be a datetime, ' - 'date or timedelta instance') - by_duration = 0 - if isinstance(end_or_duration, timedelta): - by_duration = 1 - duration = end_or_duration - end = start + duration - else: - end = end_or_duration - duration = end - start - if start > end: - raise ValueError("Start time is greater than end time") - - self.params = Parameters() - # set the timezone identifier - # does not support different timezones for start and end - tzid = tzid_from_dt(start) - if tzid: - self.params['TZID'] = tzid - - self.start = start - self.end = end - self.by_duration = by_duration - self.duration = duration - - def __cmp__(self, other): - if not isinstance(other, vPeriod): - raise NotImplementedError('Cannot compare vPeriod with %r' % other) - return cmp((self.start, self.end), (other.start, other.end)) - - def overlaps(self, other): - if self.start > other.start: - return other.overlaps(self) - if self.start <= other.start < self.end: - return True - return False - - def to_ical(self): - if self.by_duration: - return (vDatetime(self.start).to_ical() + b'/' + - vDuration(self.duration).to_ical()) - return (vDatetime(self.start).to_ical() + b'/' + - vDatetime(self.end).to_ical()) - - @staticmethod - def from_ical(ical): - try: - start, end_or_duration = ical.split('/') - start = vDDDTypes.from_ical(start) - end_or_duration = vDDDTypes.from_ical(end_or_duration) - return (start, end_or_duration) - except: - raise ValueError('Expected period format, got: %s' % ical) - - def __repr__(self): - if self.by_duration: - p = (self.start, self.duration) - else: - p = (self.start, self.end) - return 'vPeriod(%r)' % p - - -class vWeekday(compat.unicode_type): - """This returns an unquoted weekday abbrevation. - """ - week_days = CaselessDict({ - "SU": 0, "MO": 1, "TU": 2, "WE": 3, "TH": 4, "FR": 5, "SA": 6, - }) - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vWeekday, cls).__new__(cls, value) - match = WEEKDAY_RULE.match(self) - if match is None: - raise ValueError('Expected weekday abbrevation, got: %s' % self) - match = match.groupdict() - sign = match['signal'] - weekday = match['weekday'] - relative = match['relative'] - if not weekday in vWeekday.week_days or sign not in '+-': - raise ValueError('Expected weekday abbrevation, got: %s' % self) - self.relative = relative and int(relative) or None - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING).upper() - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical.upper()) - except: - raise ValueError('Expected weekday abbrevation, got: %s' % ical) - - -class vFrequency(compat.unicode_type): - """A simple class that catches illegal values. - """ - - frequencies = CaselessDict({ - "SECONDLY": "SECONDLY", - "MINUTELY": "MINUTELY", - "HOURLY": "HOURLY", - "DAILY": "DAILY", - "WEEKLY": "WEEKLY", - "MONTHLY": "MONTHLY", - "YEARLY": "YEARLY", - }) - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vFrequency, cls).__new__(cls, value) - if not self in vFrequency.frequencies: - raise ValueError('Expected frequency, got: %s' % self) - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING).upper() - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical.upper()) - except: - raise ValueError('Expected frequency, got: %s' % ical) - - -class vRecur(CaselessDict): - """Recurrence definition. - """ - - frequencies = ["SECONDLY", "MINUTELY", "HOURLY", "DAILY", "WEEKLY", - "MONTHLY", "YEARLY"] - - # Mac iCal ignores RRULEs where FREQ is not the first rule part. - # Sorts parts according to the order listed in RFC 5545, section 3.3.10. - canonical_order = ("FREQ", "UNTIL", "COUNT", "INTERVAL", - "BYSECOND", "BYMINUTE", "BYHOUR", "BYDAY", - "BYMONTHDAY", "BYYEARDAY", "BYWEEKNO", "BYMONTH", - "BYSETPOS", "WKST") - - types = CaselessDict({ - 'COUNT': vInt, - 'INTERVAL': vInt, - 'BYSECOND': vInt, - 'BYMINUTE': vInt, - 'BYHOUR': vInt, - 'BYMONTHDAY': vInt, - 'BYYEARDAY': vInt, - 'BYMONTH': vInt, - 'UNTIL': vDDDTypes, - 'BYSETPOS': vInt, - 'WKST': vWeekday, - 'BYDAY': vWeekday, - 'FREQ': vFrequency, - }) - - def __init__(self, *args, **kwargs): - CaselessDict.__init__(self, *args, **kwargs) - self.params = Parameters() - - def to_ical(self): - result = [] - for key, vals in self.sorted_items(): - typ = self.types[key] - if not isinstance(vals, SEQUENCE_TYPES): - vals = [vals] - vals = b','.join(typ(val).to_ical() for val in vals) - - # CaselessDict keys are always unicode - key = key.encode(DEFAULT_ENCODING) - result.append(key + b'=' + vals) - - return b';'.join(result) - - @classmethod - def parse_type(cls, key, values): - # integers - parser = cls.types.get(key, vText) - return [parser.from_ical(v) for v in values.split(',')] - - @classmethod - def from_ical(cls, ical): - if isinstance(ical, cls): - return ical - try: - recur = cls() - for pairs in ical.split(';'): - key, vals = pairs.split('=') - recur[key] = cls.parse_type(key, vals) - return dict(recur) - except: - raise ValueError('Error in recurrence rule: %s' % ical) - - -class vText(compat.unicode_type): - """Simple text. - """ - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vText, cls).__new__(cls, value) - self.encoding = encoding - self.params = Parameters() - return self - - def __repr__(self): - return "vText('%s')" % self.to_ical() - - def to_ical(self): - return escape_char(self).encode(self.encoding) - - @classmethod - def from_ical(cls, ical): - ical_unesc = unescape_char(ical) - return cls(ical_unesc) - - -class vTime(object): - """Render and generates iCalendar time format. - """ - - def __init__(self, *args): - if len(args) == 1: - if not isinstance(args[0], (time, datetime)): - raise ValueError('Expected a datetime.time, got: %s' % args[0]) - self.dt = args[0] - else: - self.dt = time(*args) - self.params = Parameters(dict(value='TIME')) - - def to_ical(self): - return self.dt.strftime("%H%M%S") - - @staticmethod - def from_ical(ical): - # TODO: timezone support - try: - timetuple = (int(ical[:2]), int(ical[2:4]), int(ical[4:6])) - return time(*timetuple) - except: - raise ValueError('Expected time, got: %s' % ical) - - -class vUri(compat.unicode_type): - """Uniform resource identifier is basically just an unquoted string. - """ - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vUri, cls).__new__(cls, value) - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING) - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical) - except: - raise ValueError('Expected , got: %s' % ical) - - -class vGeo(object): - """A special type that is only indirectly defined in the rfc. - """ - - def __init__(self, geo): - try: - latitude, longitude = (geo[0], geo[1]) - latitude = float(latitude) - longitude = float(longitude) - except: - raise ValueError('Input must be (float, float) for ' - 'latitude and longitude') - self.latitude = latitude - self.longitude = longitude - self.params = Parameters() - - def to_ical(self): - return '%s;%s' % (self.latitude, self.longitude) - - @staticmethod - def from_ical(ical): - try: - latitude, longitude = ical.split(';') - return (float(latitude), float(longitude)) - except: - raise ValueError("Expected 'float;float' , got: %s" % ical) - - -class vUTCOffset(object): - """Renders itself as a utc offset. - """ - - def __init__(self, td): - if not isinstance(td, timedelta): - raise ValueError('Offset value MUST be a timedelta instance') - self.td = td - self.params = Parameters() - - def to_ical(self): - - if self.td < timedelta(0): - sign = '-%s' - td = timedelta(0)-self.td # get timedelta relative to 0 - else: - # Google Calendar rejects '0000' but accepts '+0000' - sign = '+%s' - td = self.td - - days, seconds = td.days, td.seconds - - hours = abs(days * 24 + seconds // 3600) - minutes = abs((seconds % 3600) // 60) - seconds = abs(seconds % 60) - if seconds: - duration = '%02i%02i%02i' % (hours, minutes, seconds) - else: - duration = '%02i%02i' % (hours, minutes) - return sign % duration - - @classmethod - def from_ical(cls, ical): - if isinstance(ical, cls): - return ical.td - try: - sign, hours, minutes, seconds = (ical[0:1], - int(ical[1:3]), - int(ical[3:5]), - int(ical[5:7] or 0)) - offset = timedelta(hours=hours, minutes=minutes, seconds=seconds) - except: - raise ValueError('Expected utc offset, got: %s' % ical) - if offset >= timedelta(hours=24): - raise ValueError( - 'Offset must be less than 24 hours, was %s' % ical) - if sign == '-': - return -offset - return offset - - -class vInline(compat.unicode_type): - """This is an especially dumb class that just holds raw unparsed text and - has parameters. Conversion of inline values are handled by the Component - class, so no further processing is needed. - """ - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vInline, cls).__new__(cls, value) - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING) - - @classmethod - def from_ical(cls, ical): - return cls(ical) - - -class TypesFactory(CaselessDict): - """All Value types defined in rfc 2445 are registered in this factory - class. - - The value and parameter names don't overlap. So one factory is enough for - both kinds. - """ - - def __init__(self, *args, **kwargs): - "Set keys to upper for initial dict" - CaselessDict.__init__(self, *args, **kwargs) - self.all_types = ( - vBinary, - vBoolean, - vCalAddress, - vDDDLists, - vDDDTypes, - vDate, - vDatetime, - vDuration, - vFloat, - vFrequency, - vGeo, - vInline, - vInt, - vPeriod, - vRecur, - vText, - vTime, - vUTCOffset, - vUri, - vWeekday - ) - self['binary'] = vBinary - self['boolean'] = vBoolean - self['cal-address'] = vCalAddress - self['date'] = vDDDTypes - self['date-time'] = vDDDTypes - self['duration'] = vDDDTypes - self['float'] = vFloat - self['integer'] = vInt - self['period'] = vPeriod - self['recur'] = vRecur - self['text'] = vText - self['time'] = vTime - self['uri'] = vUri - self['utc-offset'] = vUTCOffset - self['geo'] = vGeo - self['inline'] = vInline - self['date-time-list'] = vDDDLists - - ################################################# - # Property types - - # These are the default types - types_map = CaselessDict({ - #################################### - # Property value types - # Calendar Properties - 'calscale': 'text', - 'method': 'text', - 'prodid': 'text', - 'version': 'text', - # Descriptive Component Properties - 'attach': 'uri', - 'categories': 'text', - 'class': 'text', - 'comment': 'text', - 'description': 'text', - 'geo': 'geo', - 'location': 'text', - 'percent-complete': 'integer', - 'priority': 'integer', - 'resources': 'text', - 'status': 'text', - 'summary': 'text', - # Date and Time Component Properties - 'completed': 'date-time', - 'dtend': 'date-time', - 'due': 'date-time', - 'dtstart': 'date-time', - 'duration': 'duration', - 'freebusy': 'period', - 'transp': 'text', - # Time Zone Component Properties - 'tzid': 'text', - 'tzname': 'text', - 'tzoffsetfrom': 'utc-offset', - 'tzoffsetto': 'utc-offset', - 'tzurl': 'uri', - # Relationship Component Properties - 'attendee': 'cal-address', - 'contact': 'text', - 'organizer': 'cal-address', - 'recurrence-id': 'date-time', - 'related-to': 'text', - 'url': 'uri', - 'uid': 'text', - # Recurrence Component Properties - 'exdate': 'date-time-list', - 'exrule': 'recur', - 'rdate': 'date-time-list', - 'rrule': 'recur', - # Alarm Component Properties - 'action': 'text', - 'repeat': 'integer', - 'trigger': 'duration', - # Change Management Component Properties - 'created': 'date-time', - 'dtstamp': 'date-time', - 'last-modified': 'date-time', - 'sequence': 'integer', - # Miscellaneous Component Properties - 'request-status': 'text', - #################################### - # parameter types (luckily there is no name overlap) - 'altrep': 'uri', - 'cn': 'text', - 'cutype': 'text', - 'delegated-from': 'cal-address', - 'delegated-to': 'cal-address', - 'dir': 'uri', - 'encoding': 'text', - 'fmttype': 'text', - 'fbtype': 'text', - 'language': 'text', - 'member': 'cal-address', - 'partstat': 'text', - 'range': 'text', - 'related': 'text', - 'reltype': 'text', - 'role': 'text', - 'rsvp': 'boolean', - 'sent-by': 'cal-address', - 'tzid': 'text', - 'value': 'text', - }) - - def for_property(self, name): - """Returns a the default type for a property or parameter - """ - return self[self.types_map.get(name, 'text')] - - def to_ical(self, name, value): - """Encodes a named value from a primitive python type to an icalendar - encoded string. - """ - type_class = self.for_property(name) - return type_class(value).to_ical() - - def from_ical(self, name, value): - """Decodes a named property or parameter value from an icalendar - encoded string to a primitive python type. - """ - type_class = self.for_property(name) - decoded = type_class.from_ical(value) - return decoded diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/__init__.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/__init__.py deleted file mode 100644 index aba627b..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# unittest/unittest2 importer -import unittest -if not hasattr(unittest.TestCase, 'assertIsNotNone'): - import unittest2 as unittest -unittest # pep 8 diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/encoding.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/encoding.ics deleted file mode 100644 index 5a0047e..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/encoding.ics +++ /dev/null @@ -1,16 +0,0 @@ -BEGIN:VCALENDAR -PRODID:-//Plönë.org//NONSGML plone.app.event//EN -VERSION:2.0 -X-WR-CALNAME:äöü ÄÖÜ € -X-WR-CALDESC:test non ascii: äöü ÄÖÜ € -X-WR-RELCALID:12345 -BEGIN:VEVENT -DTSTART:20101010T100000Z -DTEND:20101010T120000Z -CREATED:20101010T100000Z -UID:123456 -SUMMARY:Non-ASCII Test: ÄÖÜ äöü € -DESCRIPTION:icalendar should be able to handle non-ascii: €äüöÄÜÖ. -LOCATION:Tribstrül -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics deleted file mode 100644 index 2356cdc..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics +++ /dev/null @@ -1,48 +0,0 @@ -BEGIN:VCALENDAR -PRODID:-//Google Inc//Google Calendar 70.9054//EN -VERSION:2.0 -CALSCALE:GREGORIAN -METHOD:PUBLISH -X-WR-CALNAME:Market East -X-WR-TIMEZONE:America/New_York -X-WR-CALDESC: -BEGIN:VTIMEZONE -TZID:America/New_York -X-LIC-LOCATION:America/New_York -BEGIN:DAYLIGHT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -TZNAME:EDT -DTSTART:19700308T020000 -RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU -END:DAYLIGHT -BEGIN:STANDARD -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -TZNAME:EST -DTSTART:19701101T020000 -RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU -END:STANDARD -END:VTIMEZONE - -BEGIN:VEVENT -DTSTART;TZID=America/New_York:20130907T120000 -DTEND;TZID=America/New_York:20130907T170000 -RRULE:FREQ=WEEKLY;BYDAY=FR,SA;UNTIL=20131025T035959Z -EXDATE;TZID=America/New_York:20131012T120000 -EXDATE;TZID=America/New_York:20131011T120000 -DTSTAMP:20131021T025552Z -UID:ak30b02u7858q1oo6ji9dm4mgg@google.com -CREATED:20130903T181453Z -DESCRIPTION:The Fieldhouse and Hard Rock Cafe are working with PhillyRising - to provide live entertainment on Friday and Saturday afternoons throughout - the Summer. -LAST-MODIFIED:20131015T210927Z -LOCATION:12th and Market Streets (weather permitting) -SEQUENCE:0 -STATUS:CONFIRMED -SUMMARY:Market East Live! -TRANSP:OPAQUE -END:VEVENT - -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_114_invalid_line.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_114_invalid_line.ics deleted file mode 100644 index 53965f0..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_114_invalid_line.ics +++ /dev/null @@ -1,41 +0,0 @@ -BEGIN:VEVENT -DTSTART:20130927T130000Z -DTEND:20130927T140000Z -DTSTAMP:20131107T004757Z -ORGANIZER;CN=gxxxxxxxn@nxx.fr:mailto:gxxxxxn@nxx.fr -UID:040000008200E00074C5B7101A82E00800000000A0F3321606B6CE01000000000000000 - 010000000F09F33F0E8ED4C44B99F6027ACF588D0 -ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;CN=St - eve Bxxxxxx;X-NUM-GUESTS=0:mailto:sxxxxxt@nxx.fr -ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;CN=Boris - Hxxxxx;X-NUM-GUESTS=0:mailto:bxxxxxxk@vxxxxxxxx.com -CREATED:20130920T113409Z -DESCRIPTION:Quand : vendredi 27 septembre 2013 15:00-16:00 (UTC+01:00) Brux - elles\, Copenhague\, Madrid\, Paris.\nEmplacement : Conf-Call - 01 xx xx xx - xx\n\nRemarque : le décalage GMT ci-dessus ne tient pas compte des réglage - s de l'heure d'été.\n\n*~*~*~*~*~*~*~*~*~*\n\nComme convenu à l’instant par - e-mail\n -LAST-MODIFIED:20130920T115104Z -LOCATION:Conf-Call - 01 xx xx xx xx -SEQUENCE:0 -STATUS:CONFIRMED -SUMMARY:Nxx - Réunion lancement PxxxxxxT -TRANSP:OPAQUE -X-ALT-DESC;FMTTYPE=text/html:\n\n\n\n\n\n\n\n\n

Qu - and : vendredi 27 septembre 2013 15:00-16:00 (UTC+01:00) Bruxelles\, Copenh - ague\, Madrid\, Paris.

\n\n

Emplacement : Conf-Call - 01 xx xx xx xx

- \n\n

Remarque : le décalage - GMT ci-dessus ne tient pas compte des réglages de l'heure d'été.

\n\n

*~*~*~*~*~*~*~*~ - *~*

\n\n

Co - mme convenu à l’instant par e-mail

- \n\n\n -X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE -X-MICROSOFT-CDO-IMPORTANCE:1 -X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY -X -END:VEVENT diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_53_parsing_failure.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_53_parsing_failure.ics deleted file mode 100644 index 67f42cf..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/issue_53_parsing_failure.ics +++ /dev/null @@ -1,78 +0,0 @@ -BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Meetup//RemoteApi//EN -CALSCALE:GREGORIAN -METHOD:PUBLISH -X-ORIGINAL-URL:http://www.meetup.com/DevOpsDC/events/ical/DevOpsDC/ -X-WR-CALNAME:Events - DevOpsDC -BEGIN:VTIMEZONE -TZID:America/New_York -TZURL:http://tzurl.org/zoneinfo-outlook/America/New_York -X-LIC-LOCATION:America/New_York -BEGIN:DAYLIGHT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -TZNAME:EDT -DTSTART:19700308T020000 -RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU -END:DAYLIGHT -BEGIN:STANDARD -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -TZNAME:EST -DTSTART:19701101T020000 -RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTAMP:20120605T003759Z -DTSTART;TZID=America/New_York:20120712T183000 -DTEND;TZID=America/New_York:20120712T213000 -STATUS:CONFIRMED -SUMMARY:DevOps DC Meetup -DESCRIPTION:DevOpsDC\nThursday\, July 12 at 6:30 PM\n\nThis will be a joi - nt meetup / hack night with the DC jQuery Users Group. The idea behind - the hack night: Small teams consisting of at least 1 member...\n\nDeta - ils: http://www.meetup.com/DevOpsDC/events/47635522/ -CLASS:PUBLIC -CREATED:20120111T120339Z -GEO:38.90;-77.01 -LOCATION:Fathom Creative\, Inc. (1333 14th Street Northwest\, Washington - D.C.\, DC 20005) -URL:http://www.meetup.com/DevOpsDC/events/47635522/ -LAST-MODIFIED:20120522T174406Z -UID:event_qtkfrcyqkbnb@meetup.com -END:VEVENT -BEGIN:VEVENT -DTSTAMP:20120605T003759Z -DTSTART;TZID=America/New_York:20120911T183000 -DTEND;TZID=America/New_York:20120911T213000 -STATUS:CONFIRMED -SUMMARY:DevOps DC Meetup -DESCRIPTION:DevOpsDC\nTuesday\, September 11 at 6:30 PM\n\n \n\nDetails: - http://www.meetup.com/DevOpsDC/events/47635532/ -CLASS:PUBLIC -CREATED:20120111T120352Z -GEO:38.90;-77.01 -LOCATION:CustomInk\, LLC (7902 Westpark Drive\, McLean\, VA 22102) -URL:http://www.meetup.com/DevOpsDC/events/47635532/ -LAST-MODIFIED:20120316T202210Z -UID:event_qtkfrcyqmbpb@meetup.com -END:VEVENT -BEGIN:VEVENT -DTSTAMP:20120605T003759Z -DTSTART;TZID=America/New_York:20121113T183000 -DTEND;TZID=America/New_York:20121113T213000 -STATUS:CONFIRMED -SUMMARY:DevOps DC Meetup -DESCRIPTION:DevOpsDC\nTuesday\, November 13 at 6:30 PM\n\n \n\nDetails: h - ttp://www.meetup.com/DevOpsDC/events/47635552/ -CLASS:PUBLIC -CREATED:20120111T120402Z -GEO:38.90;-77.01 -LOCATION:CustomInk\, LLC (7902 Westpark Drive\, McLean\, VA 22102) -URL:http://www.meetup.com/DevOpsDC/events/47635552/ -LAST-MODIFIED:20120316T202210Z -UID:event_qtkfrcyqpbrb@meetup.com -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/multiple.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/multiple.ics deleted file mode 100644 index dbbde27..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/multiple.ics +++ /dev/null @@ -1,80 +0,0 @@ -BEGIN:VCALENDAR -VERSION - - :2.0 -PRODID - - :-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN -METHOD - - :PUBLISH -BEGIN:VEVENT -UID - - :956630271 -SUMMARY - - :Christmas Day -CLASS - - :PUBLIC -X-MOZILLA-ALARM-DEFAULT-UNITS - - :minutes -X-MOZILLA-ALARM-DEFAULT-LENGTH - - :15 -X-MOZILLA-RECUR-DEFAULT-UNITS - - :weeks -X-MOZILLA-RECUR-DEFAULT-INTERVAL - - :1 -DTSTART - - ;VALUE=DATE - :20031225 -DTEND - - ;VALUE=DATE - :20031226 -DTSTAMP - - :20020430T114937Z -END:VEVENT -END:VCALENDAR -BEGIN:VCALENDAR -VERSION - :2.0 -PRODID - :-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN -METHOD - :PUBLISH -BEGIN:VEVENT -UID - :911737808 -SUMMARY - :Boxing Day -CLASS - :PUBLIC -X-MOZILLA-ALARM-DEFAULT-UNITS - :minutes -X-MOZILLA-ALARM-DEFAULT-LENGTH - :15 -X-MOZILLA-RECUR-DEFAULT-UNITS - :weeks -X-MOZILLA-RECUR-DEFAULT-INTERVAL - :1 -DTSTART - ;VALUE=DATE - :20030501 -DTSTAMP - :20020430T114937Z -END:VEVENT -BEGIN:VEVENT -UID - :wh4t3v3r -DTSTART;VALUE=DATE:20031225 -SUMMARY:Christmas again! -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/recurrence.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/recurrence.ics deleted file mode 100644 index a5596a5..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/recurrence.ics +++ /dev/null @@ -1,24 +0,0 @@ -BEGIN:VCALENDAR -METHOD:Request -PRODID:-//My product//mxm.dk/ -VERSION:2.0 -BEGIN:VEVENT -DTSTART:19960401T010000 -DTEND:19960401T020000 -RRULE:FREQ=DAILY;COUNT=100 -EXDATE:19960402T010000Z,19960403T010000Z,19960404T010000Z -SUMMARY:A recurring event with exdates -END:VEVENT -BEGIN:VEVENT -DTSTART;TZID=Europe/Vienna:20120327T100000 -DTEND;TZID=Europe/Vienna:20120327T180000 -RRULE:FREQ=WEEKLY;UNTIL=20120703T080000Z;BYDAY=TU -EXDATE;TZID=Europe/Vienna:20120529T100000 -EXDATE;TZID=Europe/Vienna:20120403T100000 -EXDATE;TZID=Europe/Vienna:20120410T100000 -EXDATE;TZID=Europe/Vienna:20120501T100000 -EXDATE;TZID=Europe/Vienna:20120417T100000 -DTSTAMP:20130716T120638Z -SUMMARY:A Recurring event with multiple exdates, one per line. -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_encoding.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_encoding.py deleted file mode 100644 index 196b4a9..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_encoding.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.tests import unittest - -import datetime -import icalendar -import os -import pytz - - -class TestEncoding(unittest.TestCase): - - def test_create_from_ical(self): - directory = os.path.dirname(__file__) - data = open(os.path.join(directory, 'encoding.ics'), 'rb').read() - cal = icalendar.Calendar.from_ical(data) - - self.assertEqual(cal['prodid'].to_ical().decode('utf-8'), - u"-//Plönë.org//NONSGML plone.app.event//EN") - self.assertEqual(cal['X-WR-CALDESC'].to_ical().decode('utf-8'), - u"test non ascii: äöü ÄÖÜ €") - - event = cal.walk('VEVENT')[0] - self.assertEqual(event['SUMMARY'].to_ical().decode('utf-8'), - u'Non-ASCII Test: ÄÖÜ äöü €') - self.assertEqual( - event['DESCRIPTION'].to_ical().decode('utf-8'), - u'icalendar should be able to handle non-ascii: €äüöÄÜÖ.' - ) - self.assertEqual(event['LOCATION'].to_ical().decode('utf-8'), - u'Tribstrül') - - def test_create_to_ical(self): - cal = icalendar.Calendar() - - cal.add('prodid', u"-//Plönë.org//NONSGML plone.app.event//EN") - cal.add('version', u"2.0") - cal.add('x-wr-calname', u"äöü ÄÖÜ €") - cal.add('x-wr-caldesc', u"test non ascii: äöü ÄÖÜ €") - cal.add('x-wr-relcalid', u"12345") - - event = icalendar.Event() - event.add( - 'dtstart', - datetime.datetime(2010, 10, 10, 10, 00, 00, tzinfo=pytz.utc) - ) - event.add( - 'dtend', - datetime.datetime(2010, 10, 10, 12, 00, 00, tzinfo=pytz.utc) - ) - event.add( - 'created', - datetime.datetime(2010, 10, 10, 0, 0, 0, tzinfo=pytz.utc) - ) - event.add('uid', u'123456') - event.add('summary', u'Non-ASCII Test: ÄÖÜ äöü €') - event.add( - 'description', - u'icalendar should be able to de/serialize non-ascii.' - ) - event.add('location', u'Tribstrül') - cal.add_component(event) - - ical_lines = cal.to_ical().splitlines() - cmp = b'PRODID:-//Pl\xc3\xb6n\xc3\xab.org//NONSGML plone.app.event//EN' - self.assertTrue(cmp in ical_lines) - - def test_create_event_simple(self): - event = icalendar.Event() - event.add( - "dtstart", - datetime.datetime(2010, 10, 10, 0, 0, 0, tzinfo=pytz.utc) - ) - event.add("summary", u"åäö") - out = event.to_ical() - summary = b'SUMMARY:\xc3\xa5\xc3\xa4\xc3\xb6' - self.assertTrue(summary in out.splitlines()) - - def test_unicode_parameter_name(self): - # Test for issue #80 - cal = icalendar.Calendar() - event = icalendar.Event() - event.add(u'DESCRIPTION', u'äöüßÄÖÜ') - cal.add_component(event) - c = cal.to_ical() - self.assertEqual( - c, - b'BEGIN:VCALENDAR\r\nBEGIN:VEVENT\r\nDESCRIPTION:' - + b'\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f\xc3\x84\xc3\x96\xc3\x9c\r\n' - + b'END:VEVENT\r\nEND:VCALENDAR\r\n' - ) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_fixed_issues.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_fixed_issues.py deleted file mode 100644 index c860d71..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_fixed_issues.py +++ /dev/null @@ -1,247 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.parser_tools import to_unicode -from icalendar.tests import unittest - -import datetime -import icalendar -import os -import pytz - - -class TestIssues(unittest.TestCase): - - def test_issue_53(self): - """Issue #53 - Parsing failure on some descriptions? - https://github.com/collective/icalendar/issues/53 - """ - - directory = os.path.dirname(__file__) - ics = open(os.path.join(directory, 'issue_53_parsing_failure.ics'), - 'rb') - cal = icalendar.Calendar.from_ical(ics.read()) - ics.close() - - event = cal.walk('VEVENT')[0] - desc = event.get('DESCRIPTION') - self.assertTrue(b'July 12 at 6:30 PM' in desc.to_ical()) - - timezones = cal.walk('VTIMEZONE') - self.assertEqual(len(timezones), 1) - tz = timezones[0] - self.assertEqual(tz['tzid'].to_ical(), b"America/New_York") - - def test_issue_55(self): - """Issue #55 - Parse error on utc-offset with seconds value - https://github.com/collective/icalendar/issues/55 - """ - ical_str = """BEGIN:VTIMEZONE -TZID:America/Los Angeles -BEGIN:STANDARD -DTSTART:18831118T120702 -RDATE:18831118T120702 -TZNAME:PST -TZOFFSETFROM:-075258 -TZOFFSETTO:-0800 -END:STANDARD -END:VTIMEZONE""" - - tz = icalendar.Timezone.from_ical(ical_str) - self.assertEqual( - tz.to_ical(), - b'BEGIN:VTIMEZONE\r\nTZID:America/Los Angeles\r\n' - b'BEGIN:STANDARD\r\n' - b'DTSTART:18831118T120702\r\nRDATE:18831118T120702\r\nTZNAME:PST' - b'\r\nTZOFFSETFROM:-075258\r\nTZOFFSETTO:-0800\r\n' - b'END:STANDARD\r\n' - b'END:VTIMEZONE\r\n') - - def test_issue_58(self): - """Issue #58 - TZID on UTC DATE-TIMEs - https://github.com/collective/icalendar/issues/58 - """ - - # According to RFC 2445: "The TZID property parameter MUST NOT be - # applied to DATE-TIME or TIME properties whose time values are - # specified in UTC." - - event = icalendar.Event() - dt = pytz.utc.localize(datetime.datetime(2012, 7, 16, 0, 0, 0)) - event.add('dtstart', dt) - self.assertEqual( - event.to_ical(), - b"BEGIN:VEVENT\r\n" - b"DTSTART;VALUE=DATE-TIME:20120716T000000Z\r\n" - b"END:VEVENT\r\n" - ) - - def test_issue_64(self): - """Issue #64 - Event.to_ical() fails for unicode strings - https://github.com/collective/icalendar/issues/64 - """ - - # Non-unicode characters - event = icalendar.Event() - event.add("dtstart", datetime.datetime(2012, 9, 3, 0, 0, 0)) - event.add("summary", u"abcdef") - self.assertEqual( - event.to_ical(), - b"BEGIN:VEVENT\r\nSUMMARY:abcdef\r\nDTSTART;VALUE=DATE-TIME:" - b"20120903T000000\r\nEND:VEVENT\r\n" - ) - - # Unicode characters - event = icalendar.Event() - event.add("dtstart", datetime.datetime(2012, 9, 3, 0, 0, 0)) - event.add("summary", u"åäö") - self.assertEqual( - event.to_ical(), - b"BEGIN:VEVENT\r\nSUMMARY:\xc3\xa5\xc3\xa4\xc3\xb6\r\n" - b"DTSTART;VALUE=DATE-TIME:20120903T000000\r\nEND:VEVENT\r\n" - ) - - def test_issue_70(self): - """Issue #70 - e.decode("RRULE") causes Attribute Error - https://github.com/collective/icalendar/issues/70 - """ - - ical_str = """BEGIN:VEVENT -CREATED:20081114T072804Z -UID:D449CA84-00A3-4E55-83E1-34B58268853B -DTEND:20070220T180000 -RRULE:FREQ=WEEKLY;INTERVAL=1;UNTIL=20070619T225959 -TRANSP:OPAQUE -SUMMARY:Esb mellon phone conf -DTSTART:20070220T170000 -DTSTAMP:20070221T095412Z -SEQUENCE:0 -END:VEVENT""" - - cal = icalendar.Calendar.from_ical(ical_str) - recur = cal.decoded("RRULE") - self.assertIsInstance(recur, icalendar.vRecur) - self.assertEqual( - recur.to_ical(), - b'FREQ=WEEKLY;UNTIL=20070619T225959;INTERVAL=1' - ) - - def test_issue_82(self): - """Issue #82 - vBinary __repr__ called rather than to_ical from - container types - https://github.com/collective/icalendar/issues/82 - """ - - b = icalendar.vBinary('text') - b.params['FMTTYPE'] = 'text/plain' - self.assertEqual(b.to_ical(), b'dGV4dA==') - e = icalendar.Event() - e.add('ATTACH', b) - self.assertEqual( - e.to_ical(), - b"BEGIN:VEVENT\r\nATTACH;ENCODING=BASE64;FMTTYPE=text/plain;" - b"VALUE=BINARY:dGV4dA==\r\nEND:VEVENT\r\n" - ) - - def test_issue_100(self): - """Issue #100 - Transformed doctests into unittests, Test fixes and - cleanup. - https://github.com/collective/icalendar/pull/100 - """ - - ical_content = "BEGIN:VEVENT\r\nSUMMARY;LANGUAGE=ru:te\r\nEND:VEVENT" - icalendar.Event.from_ical(ical_content).to_ical() - - def test_issue_101(self): - """Issue #101 - icalender is choking on umlauts in ORGANIZER - - https://github.com/collective/icalendar/issues/101 - """ - ical_str = """BEGIN:VCALENDAR -VERSION:2.0 -X-WR-CALNAME:Kalender von acme\, admin -PRODID:-//The Horde Project//Horde_iCalendar Library\, Horde 3.3.5//EN -METHOD:PUBLISH -BEGIN:VEVENT -DTSTART:20130416T100000Z -DTEND:20130416T110000Z -DTSTAMP:20130416T092616Z -UID:20130416112341.10064jz0k4j7uem8@acmenet.de -CREATED:20130416T092341Z -LAST-MODIFIED:20130416T092341Z -SUMMARY:wichtiger termin 1 -ORGANIZER;CN="acme, ädmin":mailto:adm-acme@mydomain.de -LOCATION:im büro -CLASS:PUBLIC -STATUS:CONFIRMED -TRANSP:OPAQUE -END:VEVENT -END:VCALENDAR""" - - cal = icalendar.Calendar.from_ical(ical_str) - org_cn = cal.walk('VEVENT')[0]['ORGANIZER'].params['CN'] - self.assertEqual(org_cn, u'acme, ädmin') - - def test_issue_112(self): - """Issue #112 - No timezone info on EXDATE - https://github.com/collective/icalendar/issues/112 - """ - directory = os.path.dirname(__file__) - path = os.path.join(directory, - 'issue_112_missing_tzinfo_on_exdate.ics') - with open(path, 'rb') as ics: - cal = icalendar.Calendar.from_ical(ics.read()) - event = cal.walk('VEVENT')[0] - - event_ical = to_unicode(event.to_ical()) # Py3 str type doesn't - # support buffer API - # General timezone aware dates in ical string - self.assertTrue('DTSTART;TZID=America/New_York:20130907T120000' - in event_ical) - self.assertTrue('DTEND;TZID=America/New_York:20130907T170000' - in event_ical) - # Specific timezone aware exdates in ical string - self.assertTrue('EXDATE;TZID=America/New_York:20131012T120000' - in event_ical) - self.assertTrue('EXDATE;TZID=America/New_York:20131011T120000' - in event_ical) - - self.assertEqual(event['exdate'][0].dts[0].dt.tzname(), 'EDT') - - def test_issue_114(self): - """Issue #114/#115 - invalid line in event breaks the parser - https://github.com/collective/icalendar/issues/114 - """ - - directory = os.path.dirname(__file__) - ics = open(os.path.join(directory, 'issue_114_invalid_line.ics'), 'rb') - with self.assertRaises(ValueError): - cal = icalendar.Calendar.from_ical(ics.read()) - cal # pep 8 - ics.close() - - def test_issue_116(self): - """Issue #116/#117 - How to add 'X-APPLE-STRUCTURED-LOCATION' - """ - event = icalendar.Event() - event.add( - "X-APPLE-STRUCTURED-LOCATION", - "geo:-33.868900,151.207000", - parameters={ - "VALUE": "URI", - "X-ADDRESS": "367 George Street Sydney CBD NSW 2000", - "X-APPLE-RADIUS": "72", - "X-TITLE": "367 George Street" - } - ) - self.assertEqual( - event.to_ical(), - b'BEGIN:VEVENT\r\nX-APPLE-STRUCTURED-LOCATION;VALUE=URI;' - b'X-ADDRESS="367 George Street Sydney \r\n CBD NSW 2000";' - b'X-APPLE-RADIUS=72;X-TITLE="367 George Street":' - b'geo:-33.868900\r\n \\,151.207000\r\nEND:VEVENT\r\n' - ) - - # roundtrip - self.assertEqual( - event.to_ical(), - icalendar.Event.from_ical(event.to_ical()).to_ical() - ) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_icalendar.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_icalendar.py deleted file mode 100644 index 22ebd80..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_icalendar.py +++ /dev/null @@ -1,253 +0,0 @@ -# coding: utf-8 -from icalendar.tests import unittest - - -class IcalendarTestCase (unittest.TestCase): - - def test_long_lines(self): - from ..parser import Contentlines, Contentline - c = Contentlines([Contentline('BEGIN:VEVENT')]) - c.append(Contentline(''.join('123456789 ' * 10))) - self.assertEqual( - c.to_ical(), - b'BEGIN:VEVENT\r\n123456789 123456789 123456789 123456789 ' - b'123456789 123456789 123456789 1234\r\n 56789 123456789 ' - b'123456789 \r\n' - ) - - # from doctests - # Notice that there is an extra empty string in the end of the content - # lines. That is so they can be easily joined with: - # '\r\n'.join(contentlines)) - self.assertEqual(Contentlines.from_ical('A short line\r\n'), - ['A short line', '']) - self.assertEqual(Contentlines.from_ical('A faked\r\n long line\r\n'), - ['A faked long line', '']) - self.assertEqual( - Contentlines.from_ical('A faked\r\n long line\r\nAnd another ' - 'lin\r\n\te that is folded\r\n'), - ['A faked long line', 'And another line that is folded', ''] - ) - - def test_contentline_class(self): - from ..parser import Contentline, Parameters - from ..prop import vText - - self.assertEqual( - Contentline('Si meliora dies, ut vina, poemata reddit').to_ical(), - b'Si meliora dies, ut vina, poemata reddit' - ) - - # A long line gets folded - c = Contentline(''.join(['123456789 '] * 10)).to_ical() - self.assertEqual( - c, - (b'123456789 123456789 123456789 123456789 123456789 123456789 ' - b'123456789 1234\r\n 56789 123456789 123456789 ') - ) - - # A folded line gets unfolded - self.assertEqual( - Contentline.from_ical(c), - ('123456789 123456789 123456789 123456789 123456789 123456789 ' - '123456789 123456789 123456789 123456789 ') - ) - - # http://tools.ietf.org/html/rfc5545#section-3.3.11 - # An intentional formatted text line break MUST only be included in - # a "TEXT" property value by representing the line break with the - # character sequence of BACKSLASH, followed by a LATIN SMALL LETTER - # N or a LATIN CAPITAL LETTER N, that is "\n" or "\N". - - # Newlines are not allwoed in content lines - self.assertRaises(AssertionError, Contentline, b'1234\r\n\r\n1234') - - self.assertEqual( - Contentline('1234\\n\\n1234').to_ical(), - b'1234\\n\\n1234' - ) - - # We do not fold within a UTF-8 character - c = Contentline(b'This line has a UTF-8 character where it should be ' - b'folded. Make sure it g\xc3\xabts folded before that ' - b'character.') - - self.assertIn(b'\xc3\xab', c.to_ical()) - - # Another test of the above - c = Contentline(b'x' * 73 + b'\xc3\xab' + b'\\n ' + b'y' * 10) - - self.assertEqual(c.to_ical().count(b'\xc3'), 1) - - # Don't fail if we fold a line that is exactly X times 74 characters - # long - c = Contentline(''.join(['x'] * 148)).to_ical() - - # It can parse itself into parts, - # which is a tuple of (name, params, vals) - self.assertEqual( - Contentline('dtstart:20050101T120000').parts(), - ('dtstart', Parameters({}), '20050101T120000') - ) - - self.assertEqual( - Contentline('dtstart;value=datetime:20050101T120000').parts(), - ('dtstart', Parameters({'VALUE': 'datetime'}), '20050101T120000') - ) - - c = Contentline('ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:' - 'MAILTO:maxm@example.com') - self.assertEqual( - c.parts(), - ('ATTENDEE', - Parameters({'ROLE': 'REQ-PARTICIPANT', 'CN': 'Max Rasmussen'}), - 'MAILTO:maxm@example.com') - ) - self.assertEqual( - c.to_ical().decode('utf-8'), - 'ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:' - 'MAILTO:maxm@example.com' - ) - - # and back again - # NOTE: we are quoting property values with spaces in it. - parts = ('ATTENDEE', - Parameters({'ROLE': 'REQ-PARTICIPANT', - 'CN': 'Max Rasmussen'}), - 'MAILTO:maxm@example.com') - self.assertEqual( - Contentline.from_parts(*parts), - 'ATTENDEE;CN="Max Rasmussen";ROLE=REQ-PARTICIPANT:' - 'MAILTO:maxm@example.com' - ) - - # and again - parts = ('ATTENDEE', Parameters(), 'MAILTO:maxm@example.com') - self.assertEqual( - Contentline.from_parts(*parts), - 'ATTENDEE:MAILTO:maxm@example.com' - ) - - # A value can also be any of the types defined in PropertyValues - parts = ('ATTENDEE', Parameters(), vText('MAILTO:test@example.com')) - self.assertEqual( - Contentline.from_parts(*parts), - 'ATTENDEE:MAILTO:test@example.com' - ) - - # A value in UTF-8 - parts = ('SUMMARY', Parameters(), vText('INternational char æ ø å')) - self.assertEqual( - Contentline.from_parts(*parts), - u'SUMMARY:INternational char æ ø å' - ) - - # A value can also be unicode - parts = ('SUMMARY', Parameters(), vText(u'INternational char æ ø å')) - self.assertEqual( - Contentline.from_parts(*parts), - u'SUMMARY:INternational char æ ø å' - ) - - # Traversing could look like this. - name, params, vals = c.parts() - self.assertEqual(name, 'ATTENDEE') - self.assertEqual(vals, 'MAILTO:maxm@example.com') - self.assertEqual( - sorted(params.items()), - sorted([('ROLE', 'REQ-PARTICIPANT'), ('CN', 'Max Rasmussen')]) - ) - - # And the traditional failure - with self.assertRaisesRegexp( - ValueError, - 'Content line could not be parsed into parts' - ): - Contentline('ATTENDEE;maxm@example.com').parts() - - # Another failure: - with self.assertRaisesRegexp( - ValueError, - 'Content line could not be parsed into parts' - ): - Contentline(':maxm@example.com').parts() - - self.assertEqual( - Contentline('key;param=:value').parts(), - ('key', Parameters({'PARAM': ''}), 'value') - ) - - self.assertEqual( - Contentline('key;param="pvalue":value').parts(), - ('key', Parameters({'PARAM': 'pvalue'}), 'value') - ) - - # Should bomb on missing param: - with self.assertRaisesRegexp( - ValueError, - 'Content line could not be parsed into parts' - ): - Contentline.from_ical("k;:no param").parts() - - self.assertEqual( - Contentline('key;param=pvalue:value', strict=False).parts(), - ('key', Parameters({'PARAM': 'pvalue'}), 'value') - ) - - # If strict is set to True, uppercase param values that are not - # double-quoted, this is because the spec says non-quoted params are - # case-insensitive. - self.assertEqual( - Contentline('key;param=pvalue:value', strict=True).parts(), - ('key', Parameters({'PARAM': 'PVALUE'}), 'value') - ) - - self.assertEqual( - Contentline('key;param="pValue":value', strict=True).parts(), - ('key', Parameters({'PARAM': 'pValue'}), 'value') - ) - - def test_fold_line(self): - from ..parser import foldline - - self.assertEqual(foldline(u'foo'), u'foo') - self.assertEqual( - foldline(u"Lorem ipsum dolor sit amet, consectetur adipiscing " - u"elit. Vestibulum convallis imperdiet dui posuere."), - (u'Lorem ipsum dolor sit amet, consectetur adipiscing elit. ' - u'Vestibulum conval\r\n lis imperdiet dui posuere.') - ) - - # I don't really get this test - # at least just but bytes in there - # porting it to "run" under python 2 & 3 makes it not much better - with self.assertRaises(AssertionError): - foldline(u'привет'.encode('utf-8'), limit=3) - - self.assertEqual(foldline(u'foobar', limit=4), u'foo\r\n bar') - self.assertEqual( - foldline(u'Lorem ipsum dolor sit amet, consectetur adipiscing elit' - u'. Vestibulum convallis imperdiet dui posuere.'), - (u'Lorem ipsum dolor sit amet, consectetur adipiscing elit.' - u' Vestibulum conval\r\n lis imperdiet dui posuere.') - ) - self.assertEqual( - foldline(u'DESCRIPTION:АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ'), - u'DESCRIPTION:АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭ\r\n ЮЯ' - ) - - def test_value_double_quoting(self): - from ..parser import dquote - self.assertEqual(dquote('Max'), 'Max') - self.assertEqual(dquote('Rasmussen, Max'), '"Rasmussen, Max"') - self.assertEqual(dquote('name:value'), '"name:value"') - - def test_q_split(self): - from ..parser import q_split - self.assertEqual(q_split('Max,Moller,"Rasmussen, Max"'), - ['Max', 'Moller', '"Rasmussen, Max"']) - - def test_q_join(self): - from ..parser import q_join - self.assertEqual(q_join(['Max', 'Moller', 'Rasmussen, Max']), - 'Max,Moller,"Rasmussen, Max"') diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_multiple.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_multiple.py deleted file mode 100644 index c0d9dfe..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_multiple.py +++ /dev/null @@ -1,28 +0,0 @@ -from icalendar import Calendar -from icalendar.prop import vText -from icalendar.tests import unittest - -import os - - -class TestMultiple(unittest.TestCase): - """A example with multiple VCALENDAR components""" - - def test_multiple(self): - - directory = os.path.dirname(__file__) - cals = Calendar.from_ical( - open(os.path.join(directory, 'multiple.ics'), 'rb').read(), - multiple=True - ) - - self.assertEqual(len(cals), 2) - self.assertSequenceEqual([comp.name for comp in cals[0].walk()], - ['VCALENDAR', 'VEVENT']) - self.assertSequenceEqual([comp.name for comp in cals[1].walk()], - ['VCALENDAR', 'VEVENT', 'VEVENT']) - - self.assertEqual( - cals[0]['prodid'], - vText('-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN') - ) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_property_params.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_property_params.py deleted file mode 100644 index 2dfd49b..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_property_params.py +++ /dev/null @@ -1,207 +0,0 @@ -# coding: utf-8 -from icalendar import Calendar -from icalendar import Event -from icalendar import Parameters -from icalendar import vCalAddress -from icalendar.tests import unittest - -import icalendar - - -class TestPropertyParams(unittest.TestCase): - - def test_property_params(self): - # Property parameters with values containing a COLON character, a - # SEMICOLON character or a COMMA character MUST be placed in quoted - # text. - cal_address = vCalAddress('mailto:john.doe@example.org') - cal_address.params["CN"] = "Doe, John" - ical = Calendar() - ical.add('organizer', cal_address) - - ical_str = Calendar.to_ical(ical) - exp_str = b"""BEGIN:VCALENDAR\r\nORGANIZER;CN="Doe, John":"""\ - b"""mailto:john.doe@example.org\r\nEND:VCALENDAR\r\n""" - - self.assertEqual(ical_str, exp_str) - - # other way around: ensure the property parameters can be restored from - # an icalendar string. - ical2 = Calendar.from_ical(ical_str) - self.assertEqual(ical2.get('ORGANIZER').params.get('CN'), 'Doe, John') - - def test_unicode_param(self): - cal_address = vCalAddress('mailto:john.doe@example.org') - cal_address.params["CN"] = "Джон Доу" - vevent = Event() - vevent['ORGANIZER'] = cal_address - self.assertEqual( - vevent.to_ical().decode('utf-8'), - u'BEGIN:VEVENT\r\n' - u'ORGANIZER;CN="Джон Доу":mailto:john.doe@example.org\r\n' - u'END:VEVENT\r\n' - ) - - self.assertEqual(vevent['ORGANIZER'].params['CN'], - 'Джон Доу') - - def test_quoting(self): - # not double-quoted - self._test_quoting(u"Aramis", u'Aramis') - # if a space is present - enclose in double quotes - self._test_quoting(u"Aramis Alameda", u'"Aramis Alameda"') - # a single quote in parameter value - double quote the value - self._test_quoting(u"Aramis d'Alameda", u'"Aramis d\'Alameda"') - # double quote is replaced with single quote - self._test_quoting(u"Aramis d\"Alameda", u'"Aramis d\'Alameda"') - self._test_quoting(u"Арамис д'Аламеда", u'"Арамис д\'Аламеда"') - - def _test_quoting(self, cn_param, cn_quoted): - """ - @param cn_param: CN parameter value to test for quoting - @param cn_quoted: expected quoted parameter in icalendar format - """ - vevent = Event() - attendee = vCalAddress('test@mail.com') - attendee.params['CN'] = cn_param - vevent.add('ATTENDEE', attendee) - self.assertEqual( - vevent.to_ical(), - b'BEGIN:VEVENT\r\nATTENDEE;CN=' + cn_quoted.encode('utf-8') + - b':test@mail.com\r\nEND:VEVENT\r\n' - ) - - def test_escaping(self): - # verify that escaped non safe chars are decoded correctly - NON_SAFE_CHARS = u',\\;:' - for char in NON_SAFE_CHARS: - cn_escaped = u"Society\\%s 2014" % char - cn_decoded = u"Society%s 2014" % char - vevent = Event.from_ical( - u'BEGIN:VEVENT\r\n' - u'ORGANIZER;CN=%s:that\r\n' - u'END:VEVENT\r\n' % cn_escaped - ) - self.assertEqual(vevent['ORGANIZER'].params['CN'], cn_decoded) - - vevent = Event.from_ical( - 'BEGIN:VEVENT\r\n' - 'ORGANIZER;CN=that\\, that\\; %th%%at%\\\\ that\\:' - ':это\\, то\\; that\\\\ %th%%at%\\:\r\n' - 'END:VEVENT\r\n' - ) - self.assertEqual( - vevent['ORGANIZER'].params['CN'], - r'that, that; %th%%at%\ that:' - ) - self.assertEqual( - vevent['ORGANIZER'].to_ical().decode('utf-8'), - u'это, то; that\\ %th%%at%:' - ) - - def test_parameters_class(self): - - # Simple parameter:value pair - p = Parameters(parameter1='Value1') - self.assertEqual(p.to_ical(), b'PARAMETER1=Value1') - - # keys are converted to upper - self.assertEqual(list(p.keys()), ['PARAMETER1']) - - # Parameters are case insensitive - self.assertEqual(p['parameter1'], 'Value1') - self.assertEqual(p['PARAMETER1'], 'Value1') - - # Parameter with list of values must be seperated by comma - p = Parameters({'parameter1': ['Value1', 'Value2']}) - self.assertEqual(p.to_ical(), b'PARAMETER1=Value1,Value2') - - # Multiple parameters must be seperated by a semicolon - p = Parameters({'RSVP': 'TRUE', 'ROLE': 'REQ-PARTICIPANT'}) - self.assertEqual(p.to_ical(), b'ROLE=REQ-PARTICIPANT;RSVP=TRUE') - - # Parameter values containing ',;:' must be double quoted - p = Parameters({'ALTREP': 'http://www.wiz.org'}) - self.assertEqual(p.to_ical(), b'ALTREP="http://www.wiz.org"') - - # list items must be quoted seperately - p = Parameters({'MEMBER': ['MAILTO:projectA@host.com', - 'MAILTO:projectB@host.com']}) - self.assertEqual( - p.to_ical(), - b'MEMBER="MAILTO:projectA@host.com","MAILTO:projectB@host.com"' - ) - - # Now the whole sheebang - p = Parameters({'parameter1': 'Value1', - 'parameter2': ['Value2', 'Value3'], - 'ALTREP': ['http://www.wiz.org', 'value4']}) - self.assertEqual( - p.to_ical(), - (b'ALTREP="http://www.wiz.org",value4;PARAMETER1=Value1;' - b'PARAMETER2=Value2,Value3') - ) - - # We can also parse parameter strings - self.assertEqual( - Parameters.from_ical('PARAMETER1=Value 1;param2=Value 2'), - Parameters({'PARAMETER1': 'Value 1', 'PARAM2': 'Value 2'}) - ) - - # Including empty strings - self.assertEqual(Parameters.from_ical('param='), - Parameters({'PARAM': ''})) - - # We can also parse parameter strings - self.assertEqual( - Parameters.from_ical( - 'MEMBER="MAILTO:projectA@host.com","MAILTO:projectB@host.com"' - ), - Parameters({'MEMBER': ['MAILTO:projectA@host.com', - 'MAILTO:projectB@host.com']}) - ) - - # We can also parse parameter strings - self.assertEqual( - Parameters.from_ical('ALTREP="http://www.wiz.org",value4;' - 'PARAMETER1=Value1;PARAMETER2=Value2,Value3'), - Parameters({'PARAMETER1': 'Value1', - 'ALTREP': ['http://www.wiz.org', 'value4'], - 'PARAMETER2': ['Value2', 'Value3']}) - ) - - def test_parse_and_access_property_params(self): - """Parse an ics string and access some property parameters then. - This is a follow-up of a question recieved per email. - - """ - ics = """BEGIN:VCALENDAR -VERSION:2.0 -PRODID://RESEARCH IN MOTION//BIS 3.0 -METHOD:REQUEST -BEGIN:VEVENT -SEQUENCE:2 -X-RIM-REVISION:0 -SUMMARY:Test meeting from BB -X-MICROSOFT-CDO-ALLDAYEVENT:TRUE -CLASS:PUBLIC -ATTENDEE;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN="RembrandXS":MAILTO:rembrand@xs4all.nl -ATTENDEE;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN="RembrandDX":MAILTO:rembrand@daxlab.com -ATTENDEE;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN="RembrandSB":MAILTO:rembspam@xs4all.nl -UID:XRIMCAL-628059586-522954492-9750559 -DTSTART;VALUE=DATE:20120814 -DTEND;VALUE=DATE:20120815 -DESCRIPTION:Test meeting from BB -DTSTAMP:20120813T151458Z -ORGANIZER:mailto:rembrand@daxlab.com -END:VEVENT -END:VCALENDAR""" - - cal = icalendar.Calendar.from_ical(ics) - event = cal.walk("VEVENT")[0] - event['attendee'][0] - self.assertEqual(event['attendee'][0].to_ical(), - b'MAILTO:rembrand@xs4all.nl') - self.assertEqual(event['attendee'][0].params.to_ical(), - b'CN=RembrandXS;PARTSTAT=NEEDS-ACTION;RSVP=TRUE') - self.assertEqual(event['attendee'][0].params['cn'], u'RembrandXS') diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_recurrence.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_recurrence.py deleted file mode 100644 index d7ba4e3..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_recurrence.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.caselessdict import CaselessDict -from icalendar.tests import unittest - -import datetime -import icalendar -import os -import pytz - - -class TestRecurrence(unittest.TestCase): - - def setUp(self): - directory = os.path.dirname(__file__) - self.cal = icalendar.Calendar.from_ical( - open(os.path.join(directory, 'recurrence.ics'), 'rb').read() - ) - - def test_recurrence_exdates_one_line(self): - first_event = self.cal.walk('vevent')[0] - - self.assertIsInstance(first_event, CaselessDict) - self.assertEqual( - first_event['rrule'], {'COUNT': [100], 'FREQ': ['DAILY']} - ) - - self.assertEqual( - first_event['exdate'].to_ical(), - b'19960402T010000Z,19960403T010000Z,19960404T010000Z' - ) - - self.assertEqual( - first_event['exdate'].dts[0].dt, - datetime.datetime(1996, 4, 2, 1, 0, tzinfo=pytz.utc) - ) - - self.assertEqual( - first_event['exdate'].dts[1].dt, - datetime.datetime(1996, 4, 3, 1, 0, tzinfo=pytz.utc) - ) - - self.assertEqual( - first_event['exdate'].dts[2].dt, - datetime.datetime(1996, 4, 4, 1, 0, tzinfo=pytz.utc) - ) - - def test_recurrence_exdates_multiple_lines(self): - event = self.cal.walk('vevent')[1] - - exdate = event['exdate'] - - # TODO: DOCUMENT BETTER! - # In this case we have multiple EXDATE definitions, one per line. - # Icalendar makes a list out of this instead of zipping it into one - # vDDDLists object. Actually, this feels correct for me, as it also - # allows to define different timezones per exdate line - but client - # code has to handle this as list and not blindly expecting to be able - # to call event['EXDATE'].to_ical() on it: - self.assertEqual(isinstance(exdate, list), True) # multiple EXDATE - self.assertEqual(exdate[0].to_ical(), b'20120529T100000') - - # TODO: test for embedded timezone information! diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_time.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_time.py deleted file mode 100644 index 51312e5..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_time.py +++ /dev/null @@ -1,29 +0,0 @@ -from icalendar.tests import unittest - -import datetime -import icalendar -import os - - -class TestTime(unittest.TestCase): - - def setUp(self): - icalendar.cal.types_factory.types_map['X-SOMETIME'] = 'time' - - def tearDown(self): - icalendar.cal.types_factory.types_map.pop('X-SOMETIME') - - def test_create_from_ical(self): - directory = os.path.dirname(__file__) - ics = open(os.path.join(directory, 'time.ics'), 'rb') - cal = icalendar.Calendar.from_ical(ics.read()) - ics.close() - - self.assertEqual(cal['X-SOMETIME'].dt, datetime.time(17, 20, 10)) - self.assertEqual(cal['X-SOMETIME'].to_ical(), '172010') - - def test_create_to_ical(self): - cal = icalendar.Calendar() - cal.add('X-SOMETIME', datetime.time(17, 20, 10)) - self.assertTrue(b'X-SOMETIME;VALUE=TIME:172010' in - cal.to_ical().splitlines()) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_timezoned.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_timezoned.py deleted file mode 100644 index 8c42cc7..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_timezoned.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.tests import unittest - -import datetime -import dateutil.parser -import icalendar -import os -import pytz - - -class TestTimezoned(unittest.TestCase): - - def test_create_from_ical(self): - directory = os.path.dirname(__file__) - cal = icalendar.Calendar.from_ical( - open(os.path.join(directory, 'timezoned.ics'), 'rb').read() - ) - - self.assertEqual( - cal['prodid'].to_ical(), - b"-//Plone.org//NONSGML plone.app.event//EN" - ) - - timezones = cal.walk('VTIMEZONE') - self.assertEqual(len(timezones), 1) - - tz = timezones[0] - self.assertEqual(tz['tzid'].to_ical(), b"Europe/Vienna") - - std = tz.walk('STANDARD')[0] - self.assertEqual( - std.decoded('TZOFFSETFROM'), - datetime.timedelta(0, 7200) - ) - - ev1 = cal.walk('VEVENT')[0] - self.assertEqual( - ev1.decoded('DTSTART'), - datetime.datetime(2012, 2, 13, 10, 0, 0, - tzinfo=pytz.timezone('Europe/Vienna'))) - self.assertEqual( - ev1.decoded('DTSTAMP'), - datetime.datetime(2010, 10, 10, 9, 10, 10, tzinfo=pytz.utc)) - - def test_create_to_ical(self): - cal = icalendar.Calendar() - - cal.add('prodid', u"-//Plone.org//NONSGML plone.app.event//EN") - cal.add('version', u"2.0") - cal.add('x-wr-calname', u"test create calendar") - cal.add('x-wr-caldesc', u"icalendar tests") - cal.add('x-wr-relcalid', u"12345") - cal.add('x-wr-timezone', u"Europe/Vienna") - - tzc = icalendar.Timezone() - tzc.add('tzid', 'Europe/Vienna') - tzc.add('x-lic-location', 'Europe/Vienna') - - tzs = icalendar.TimezoneStandard() - tzs.add('tzname', 'CET') - tzs.add('dtstart', datetime.datetime(1970, 10, 25, 3, 0, 0)) - tzs.add('rrule', {'freq': 'yearly', 'bymonth': 10, 'byday': '-1su'}) - tzs.add('TZOFFSETFROM', datetime.timedelta(hours=2)) - tzs.add('TZOFFSETTO', datetime.timedelta(hours=1)) - - tzd = icalendar.TimezoneDaylight() - tzd.add('tzname', 'CEST') - tzd.add('dtstart', datetime.datetime(1970, 3, 29, 2, 0, 0)) - tzs.add('rrule', {'freq': 'yearly', 'bymonth': 3, 'byday': '-1su'}) - tzd.add('TZOFFSETFROM', datetime.timedelta(hours=1)) - tzd.add('TZOFFSETTO', datetime.timedelta(hours=2)) - - tzc.add_component(tzs) - tzc.add_component(tzd) - cal.add_component(tzc) - - event = icalendar.Event() - tz = pytz.timezone("Europe/Vienna") - event.add( - 'dtstart', - datetime.datetime(2012, 2, 13, 10, 00, 00, tzinfo=tz)) - event.add( - 'dtend', - datetime.datetime(2012, 2, 17, 18, 00, 00, tzinfo=tz)) - event.add( - 'dtstamp', - datetime.datetime(2010, 10, 10, 10, 10, 10, tzinfo=tz)) - event.add( - 'created', - datetime.datetime(2010, 10, 10, 10, 10, 10, tzinfo=tz)) - event.add('uid', u'123456') - event.add( - 'last-modified', - datetime.datetime(2010, 10, 10, 10, 10, 10, tzinfo=tz)) - event.add('summary', u'artsprint 2012') - # event.add('rrule', u'FREQ=YEARLY;INTERVAL=1;COUNT=10') - event.add('description', u'sprinting at the artsprint') - event.add('location', u'aka bild, wien') - event.add('categories', u'first subject') - event.add('categories', u'second subject') - event.add('attendee', u'häns') - event.add('attendee', u'franz') - event.add('attendee', u'sepp') - event.add('contact', u'Max Mustermann, 1010 Wien') - event.add('url', u'http://plone.org') - cal.add_component(event) - - test_out = b'|'.join(cal.to_ical().splitlines()) - test_out = test_out.decode('utf-8') - - vtimezone_lines = "BEGIN:VTIMEZONE|TZID:Europe/Vienna|X-LIC-LOCATION:" - "Europe/Vienna|BEGIN:STANDARD|DTSTART;VALUE=DATE-TIME:19701025T03" - "0000|RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10|RRULE:FREQ=YEARLY;B" - "YDAY=-1SU;BYMONTH=3|TZNAME:CET|TZOFFSETFROM:+0200|TZOFFSETTO:+01" - "00|END:STANDARD|BEGIN:DAYLIGHT|DTSTART;VALUE=DATE-TIME:19700329T" - "020000|TZNAME:CEST|TZOFFSETFROM:+0100|TZOFFSETTO:+0200|END:DAYLI" - "GHT|END:VTIMEZONE" - self.assertTrue(vtimezone_lines in test_out) - - test_str = "DTSTART;TZID=Europe/Vienna;VALUE=DATE-TIME:20120213T100000" - self.assertTrue(test_str in test_out) - self.assertTrue("ATTENDEE:sepp" in test_out) - - # ical standard expects DTSTAMP and CREATED in UTC - self.assertTrue("DTSTAMP;VALUE=DATE-TIME:20101010T091010Z" in test_out) - self.assertTrue("CREATED;VALUE=DATE-TIME:20101010T091010Z" in test_out) - - def test_tzinfo_dateutil(self): - # Test for issues #77, #63 - # references: #73,7430b66862346fe3a6a100ab25e35a8711446717 - - date = dateutil.parser.parse('2012-08-30T22:41:00Z') - date2 = dateutil.parser.parse('2012-08-30T22:41:00 +02:00') - self.assertTrue(date.tzinfo.__module__ == 'dateutil.tz') - self.assertTrue(date2.tzinfo.__module__ == 'dateutil.tz') - - # make sure, it's parsed properly and doesn't throw an error - self.assertTrue(icalendar.vDDDTypes(date).to_ical() - == b'20120830T224100Z') - self.assertTrue(icalendar.vDDDTypes(date2).to_ical() - == b'20120830T224100') diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_cal.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_cal.py deleted file mode 100644 index 4419f21..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_cal.py +++ /dev/null @@ -1,349 +0,0 @@ -from datetime import datetime -from datetime import timedelta -from icalendar.tests import unittest - -import icalendar -import pytz - - -class TestCalComponent(unittest.TestCase): - - def test_cal_Component(self): - from icalendar.cal import Component, Calendar, Event - from icalendar import prop - - # A component is like a dictionary with extra methods and attributes. - c = Component() - c.name = 'VCALENDAR' - - # Every key defines a property.A property can consist of either a - # single item. This can be set with a single value... - c['prodid'] = '-//max m//icalendar.mxm.dk/' - self.assertEqual( - c, - Calendar({'PRODID': '-//max m//icalendar.mxm.dk/'}) - ) - - # or with a list - c['ATTENDEE'] = ['Max M', 'Rasmussen'] - self.assertEqual( - c, - Calendar({'ATTENDEE': ['Max M', 'Rasmussen'], - 'PRODID': '-//max m//icalendar.mxm.dk/'}) - ) - - ### ADD MULTIPLE VALUES TO A PROPERTY - - # if you use the add method you don't have to considder if a value is - # a list or not. - c = Component() - c.name = 'VEVENT' - - # add multiple values at once - c.add('attendee', - ['test@test.com', 'test2@test.com']) - - # or add one per line - c.add('attendee', 'maxm@mxm.dk') - c.add('attendee', 'test@example.dk') - - # add again multiple values at once to very concatenaton of lists - c.add('attendee', - ['test3@test.com', 'test4@test.com']) - - self.assertEqual( - c, - Event({'ATTENDEE': [ - prop.vCalAddress('test@test.com'), - prop.vCalAddress('test2@test.com'), - prop.vCalAddress('maxm@mxm.dk'), - prop.vCalAddress('test@example.dk'), - prop.vCalAddress('test3@test.com'), - prop.vCalAddress('test4@test.com') - ]}) - ) - - ### - - # You can get the values back directly ... - c.add('prodid', '-//my product//') - self.assertEqual(c['prodid'], prop.vText(u'-//my product//')) - - # ... or decoded to a python type - self.assertEqual(c.decoded('prodid'), b'-//my product//') - - # With default values for non existing properties - self.assertEqual(c.decoded('version', 'No Version'), 'No Version') - - c.add('rdate', [datetime(2013, 3, 28), datetime(2013, 3, 27)]) - self.assertTrue(isinstance(c.decoded('rdate'), prop.vDDDLists)) - - # The component can render itself in the RFC 2445 format. - c = Component() - c.name = 'VCALENDAR' - c.add('attendee', 'Max M') - self.assertEqual( - c.to_ical(), - b'BEGIN:VCALENDAR\r\nATTENDEE:Max M\r\nEND:VCALENDAR\r\n' - ) - - # Components can be nested, so You can add a subcompont. Eg a calendar - # holds events. - e = Component(summary='A brief history of time') - e.name = 'VEVENT' - e.add('dtend', '20000102T000000', encode=0) - e.add('dtstart', '20000101T000000', encode=0) - self.assertEqual( - e.to_ical(), - b'BEGIN:VEVENT\r\nDTEND:20000102T000000\r\n' - + b'DTSTART:20000101T000000\r\nSUMMARY:A brief history of time\r' - + b'\nEND:VEVENT\r\n' - ) - - c.add_component(e) - self.assertEqual( - c.subcomponents, - [Event({'DTEND': '20000102T000000', 'DTSTART': '20000101T000000', - 'SUMMARY': 'A brief history of time'})] - ) - - # We can walk over nested componentes with the walk method. - self.assertEqual([i.name for i in c.walk()], ['VCALENDAR', 'VEVENT']) - - # We can also just walk over specific component types, by filtering - # them on their name. - self.assertEqual([i.name for i in c.walk('VEVENT')], ['VEVENT']) - - self.assertEqual( - [i['dtstart'] for i in c.walk('VEVENT')], - ['20000101T000000'] - ) - - # We can enumerate property items recursively with the property_items - # method. - self.assertEqual( - c.property_items(), - [('BEGIN', b'VCALENDAR'), ('ATTENDEE', prop.vCalAddress('Max M')), - ('BEGIN', b'VEVENT'), ('DTEND', '20000102T000000'), - ('DTSTART', '20000101T000000'), - ('SUMMARY', 'A brief history of time'), ('END', b'VEVENT'), - ('END', b'VCALENDAR')] - ) - - # We can also enumerate property items just under the component. - self.assertEqual( - c.property_items(recursive=False), - [('BEGIN', b'VCALENDAR'), - ('ATTENDEE', prop.vCalAddress('Max M')), - ('END', b'VCALENDAR')] - ) - - sc = c.subcomponents[0] - self.assertEqual( - sc.property_items(recursive=False), - [('BEGIN', b'VEVENT'), ('DTEND', '20000102T000000'), - ('DTSTART', '20000101T000000'), - ('SUMMARY', 'A brief history of time'), ('END', b'VEVENT')] - ) - - # Text fields which span multiple mulitple lines require proper - # indenting - c = Calendar() - c['description'] = u'Paragraph one\n\nParagraph two' - self.assertEqual( - c.to_ical(), - b'BEGIN:VCALENDAR\r\nDESCRIPTION:Paragraph one\\n\\nParagraph two' - + b'\r\nEND:VCALENDAR\r\n' - ) - - # INLINE properties have their values on one property line. Note the - # double quoting of the value with a colon in it. - c = Calendar() - c['resources'] = 'Chair, Table, "Room: 42"' - self.assertEqual( - c, - Calendar({'RESOURCES': 'Chair, Table, "Room: 42"'}) - ) - - self.assertEqual( - c.to_ical(), - b'BEGIN:VCALENDAR\r\nRESOURCES:Chair\\, Table\\, "Room: 42"\r\n' - + b'END:VCALENDAR\r\n' - ) - - # The inline values must be handled by the get_inline() and - # set_inline() methods. - self.assertEqual( - c.get_inline('resources', decode=0), - [u'Chair', u'Table', u'Room: 42'] - ) - - # These can also be decoded - self.assertEqual( - c.get_inline('resources', decode=1), - [b'Chair', b'Table', b'Room: 42'] - ) - - # You can set them directly ... - c.set_inline('resources', ['A', 'List', 'of', 'some, recources'], - encode=1) - self.assertEqual(c['resources'], 'A,List,of,"some, recources"') - - # ... and back again - self.assertEqual( - c.get_inline('resources', decode=0), - ['A', 'List', 'of', 'some, recources'] - ) - - c['freebusy'] = '19970308T160000Z/PT3H,19970308T200000Z/PT1H,'\ - + '19970308T230000Z/19970309T000000Z' - self.assertEqual( - c.get_inline('freebusy', decode=0), - ['19970308T160000Z/PT3H', '19970308T200000Z/PT1H', - '19970308T230000Z/19970309T000000Z'] - ) - - freebusy = c.get_inline('freebusy', decode=1) - self.assertTrue(isinstance(freebusy[0][0], datetime)) - self.assertTrue(isinstance(freebusy[0][1], timedelta)) - - def test_cal_Component_add(self): - # Test the for timezone correctness: dtstart should preserve it's - # timezone, crated, dtstamp and last-modified must be in UTC. - Component = icalendar.cal.Component - comp = Component() - comp.add('dtstart', datetime(2010, 10, 10, 10, 0, 0, - tzinfo=pytz.timezone("Europe/Vienna"))) - comp.add('created', datetime(2010, 10, 10, 12, 0, 0)) - comp.add('dtstamp', datetime(2010, 10, 10, 14, 0, 0, - tzinfo=pytz.timezone("Europe/Vienna"))) - comp.add('last-modified', datetime(2010, 10, 10, 16, 0, 0, - tzinfo=pytz.utc)) - - lines = comp.to_ical().splitlines() - self.assertTrue( - b"DTSTART;TZID=Europe/Vienna;VALUE=DATE-TIME:20101010T100000" - in lines) - self.assertTrue(b"CREATED;VALUE=DATE-TIME:20101010T120000Z" in lines) - self.assertTrue(b"DTSTAMP;VALUE=DATE-TIME:20101010T130000Z" in lines) - self.assertTrue( - b"LAST-MODIFIED;VALUE=DATE-TIME:20101010T160000Z" in lines - ) - - def test_cal_Component_add_no_reencode(self): - """Already encoded values should not be re-encoded. - """ - from icalendar import cal, prop - comp = cal.Component() - comp.add('ATTACH', 'me') - - comp.add('ATTACH', 'you', encode=False) - binary = prop.vBinary('us') - comp.add('ATTACH', binary) - - self.assertEqual(comp['ATTACH'], [u'me', 'you', binary]) - - def test_cal_Component_add_property_parameter(self): - # Test the for timezone correctness: dtstart should preserve it's - # timezone, crated, dtstamp and last-modified must be in UTC. - Component = icalendar.cal.Component - comp = Component() - comp.add('X-TEST-PROP', 'tryout.', - parameters={'prop1': 'val1', 'prop2': 'val2'}) - lines = comp.to_ical().splitlines() - self.assertTrue(b"X-TEST-PROP;PROP1=val1;PROP2=val2:tryout." in lines) - - def test_cal_Component_from_ical(self): - # Check for proper handling of TZID parameter of datetime properties - Component = icalendar.cal.Component - for component_name, property_name in ( - ('VEVENT', 'DTSTART'), - ('VEVENT', 'DTEND'), - ('VEVENT', 'RECURRENCE-ID'), - ('VTODO', 'DUE') - ): - component_str = 'BEGIN:' + component_name + '\n' - component_str += property_name + ';TZID=America/Denver:' - component_str += '20120404T073000\nEND:' + component_name - component = Component.from_ical(component_str) - self.assertEqual(str(component[property_name].dt.tzinfo.zone), - "America/Denver") - - component_str = 'BEGIN:' + component_name + '\n' - component_str += property_name + ':' - component_str += '20120404T073000\nEND:' + component_name - component = Component.from_ical(component_str) - self.assertEqual(component[property_name].dt.tzinfo, - None) - - -class TestCal(unittest.TestCase): - - def test_cal_ComponentFactory(self): - ComponentFactory = icalendar.cal.ComponentFactory - factory = ComponentFactory() - component = factory['VEVENT'] - event = component(dtstart='19700101') - self.assertEqual( - event.to_ical(), - b'BEGIN:VEVENT\r\nDTSTART:19700101\r\nEND:VEVENT\r\n' - ) - - self.assertEqual( - factory.get('VCALENDAR', icalendar.cal.Component), - icalendar.cal.Calendar) - - def test_cal_Calendar(self): - # Setting up a minimal calendar component looks like this - cal = icalendar.cal.Calendar() - - # Some properties are required to be compliant - cal['prodid'] = '-//My calendar product//mxm.dk//' - cal['version'] = '2.0' - - # We also need at least one subcomponent for a calendar to be compliant - event = icalendar.cal.Event() - event['summary'] = 'Python meeting about calendaring' - event['uid'] = '42' - event.add('dtstart', datetime(2005, 4, 4, 8, 0, 0)) - cal.add_component(event) - self.assertEqual( - cal.subcomponents[0].to_ical(), - b'BEGIN:VEVENT\r\nSUMMARY:Python meeting about calendaring\r\n' - + b'DTSTART;VALUE=DATE-TIME:20050404T080000\r\nUID:42\r\n' - + b'END:VEVENT\r\n') - - # Write to disc - import tempfile - import os - directory = tempfile.mkdtemp() - open(os.path.join(directory, 'test.ics'), 'wb').write(cal.to_ical()) - - # Parsing a complete calendar from a string will silently ignore bogus - # events. The bogosity in the following is the third EXDATE: it has an - # empty DATE. - s = '\r\n'.join(('BEGIN:VCALENDAR', - 'PRODID:-//Google Inc//Google Calendar 70.9054//EN', - 'VERSION:2.0', - 'CALSCALE:GREGORIAN', - 'METHOD:PUBLISH', - 'BEGIN:VEVENT', - 'DESCRIPTION:Perfectly OK event', - 'DTSTART;VALUE=DATE:20080303', - 'DTEND;VALUE=DATE:20080304', - 'RRULE:FREQ=DAILY;UNTIL=20080323T235959Z', - 'EXDATE;VALUE=DATE:20080311', - 'END:VEVENT', - 'BEGIN:VEVENT', - 'DESCRIPTION:Bogus event', - 'DTSTART;VALUE=DATE:20080303', - 'DTEND;VALUE=DATE:20080304', - 'RRULE:FREQ=DAILY;UNTIL=20080323T235959Z', - 'EXDATE;VALUE=DATE:20080311', - 'EXDATE;VALUE=DATE:', - 'END:VEVENT', - 'END:VCALENDAR')) - self.assertEqual( - [e['DESCRIPTION'].to_ical() - for e in icalendar.cal.Calendar.from_ical(s).walk('VEVENT')], - [b'Perfectly OK event']) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_caselessdict.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_caselessdict.py deleted file mode 100644 index 929fa6c..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_caselessdict.py +++ /dev/null @@ -1,90 +0,0 @@ -from icalendar.tests import unittest - -import icalendar - - -class TestCaselessdict(unittest.TestCase): - - def test_caselessdict_canonsort_keys(self): - canonsort_keys = icalendar.caselessdict.canonsort_keys - - keys = ['DTEND', 'DTSTAMP', 'DTSTART', 'UID', 'SUMMARY', 'LOCATION'] - - out = canonsort_keys(keys) - self.assertEqual( - out, - ['DTEND', 'DTSTAMP', 'DTSTART', 'LOCATION', 'SUMMARY', 'UID'] - ) - - out = canonsort_keys(keys, ('SUMMARY', 'DTSTART', 'DTEND', )) - self.assertEqual( - out, - ['SUMMARY', 'DTSTART', 'DTEND', 'DTSTAMP', 'LOCATION', 'UID'] - ) - - out = canonsort_keys(keys, ('UID', 'DTSTART', 'DTEND', )) - self.assertEqual( - out, - ['UID', 'DTSTART', 'DTEND', 'DTSTAMP', 'LOCATION', 'SUMMARY'] - ) - - out = canonsort_keys( - keys, - ('UID', 'DTSTART', 'DTEND', 'RRULE', 'EXDATE') - ) - self.assertEqual( - out, - ['UID', 'DTSTART', 'DTEND', 'DTSTAMP', 'LOCATION', 'SUMMARY'] - ) - - def test_caselessdict_canonsort_items(self): - canonsort_items = icalendar.caselessdict.canonsort_items - - d = dict(i=7, c='at', a=3.5, l=(2, 3), e=[4, 5], n=13, d={'x': 'y'}, - r=1.0) - - out = canonsort_items(d) - self.assertEqual( - out, - [('a', 3.5), ('c', 'at'), ('d', {'x': 'y'}), ('e', [4, 5]), - ('i', 7), ('l', (2, 3)), ('n', 13), ('r', 1.0)] - ) - - out = canonsort_items(d, ('i', 'c', 'a')) - self.assertTrue( - out, - [('i', 7), ('c', 'at'), ('a', 3.5), ('d', {'x': 'y'}), - ('e', [4, 5]), ('l', (2, 3)), ('n', 13), ('r', 1.0)] - ) - - def test_CaselessDict(self): - CaselessDict = icalendar.caselessdict.CaselessDict - - ncd = CaselessDict(key1='val1', key2='val2') - self.assertEqual( - ncd, - CaselessDict({'KEY2': 'val2', 'KEY1': 'val1'}) - ) - - self.assertEqual(ncd['key1'], 'val1') - self.assertEqual(ncd['KEY1'], 'val1') - - ncd['KEY3'] = 'val3' - self.assertEqual(ncd['key3'], 'val3') - - self.assertEqual(ncd.setdefault('key3', 'FOUND'), 'val3') - self.assertEqual(ncd.setdefault('key4', 'NOT FOUND'), 'NOT FOUND') - self.assertEqual(ncd['key4'], 'NOT FOUND') - self.assertEqual(ncd.get('key1'), 'val1') - self.assertEqual(ncd.get('key3', 'NOT FOUND'), 'val3') - self.assertEqual(ncd.get('key4', 'NOT FOUND'), 'NOT FOUND') - self.assertTrue('key4' in ncd) - - del ncd['key4'] - self.assertFalse('key4' in ncd) - - ncd.update({'key5': 'val5', 'KEY6': 'val6', 'KEY5': 'val7'}) - self.assertEqual(ncd['key6'], 'val6') - - keys = sorted(ncd.keys()) - self.assertEqual(keys, ['KEY1', 'KEY2', 'KEY3', 'KEY5', 'KEY6']) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_parser_tools.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_parser_tools.py deleted file mode 100644 index ff9de9c..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_parser_tools.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.parser_tools import data_encode -from icalendar.parser_tools import to_unicode -from icalendar.tests import unittest - - -class TestParserTools(unittest.TestCase): - - def test_parser_tools_to_unicode(self): - - self.assertEqual(to_unicode('spam'), u'spam') - self.assertEqual(to_unicode(u'spam'), u'spam') - self.assertEqual(to_unicode(u'spam'.encode('utf-8')), u'spam') - self.assertEqual(to_unicode(b'\xc6\xb5'), u'\u01b5') - self.assertEqual(to_unicode(u'\xc6\xb5'.encode('iso-8859-1')), - u'\u01b5') - self.assertEqual(to_unicode(b'\xc6\xb5', encoding='ascii'), u'\u01b5') - self.assertEqual(to_unicode(1), 1) - self.assertEqual(to_unicode(None), None) - - def test_parser_tools_data_encode(self): - - data1 = { - u'k1': u'v1', 'k2': 'v2', u'k3': u'v3', - 'li1': ['it1', u'it2', {'k4': u'v4', u'k5': 'v5'}, 123] - } - res = {b'k3': b'v3', b'k2': b'v2', b'k1': b'v1', - b'li1': [b'it1', b'it2', {b'k5': b'v5', b'k4': b'v4'}, 123]} - self.assertEqual(data_encode(data1), res) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_prop.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_prop.py deleted file mode 100644 index 30dd84c..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_prop.py +++ /dev/null @@ -1,497 +0,0 @@ -# -*- coding: utf-8 -*- -from datetime import date -from datetime import datetime -from datetime import time -from datetime import timedelta -from icalendar.parser import Parameters -from icalendar.tests import unittest - -import pytz - - -class TestProp(unittest.TestCase): - - def test_prop_vBinary(self): - from ..prop import vBinary - - txt = b'This is gibberish' - txt_ical = b'VGhpcyBpcyBnaWJiZXJpc2g=' - self.assertEqual(vBinary(txt).to_ical(), txt_ical) - self.assertEqual(vBinary.from_ical(txt_ical), txt) - - # The roundtrip test - txt = b'Binary data \x13 \x56' - txt_ical = b'QmluYXJ5IGRhdGEgEyBW' - self.assertEqual(vBinary(txt).to_ical(), txt_ical) - self.assertEqual(vBinary.from_ical(txt_ical), txt) - - self.assertIsInstance(vBinary('txt').params, Parameters) - self.assertEqual( - vBinary('txt').params, {'VALUE': 'BINARY', 'ENCODING': 'BASE64'} - ) - - # Long data should not have line breaks, as that would interfere - txt = b'a' * 99 - txt_ical = b'YWFh' * 33 - self.assertEqual(vBinary(txt).to_ical(), txt_ical) - self.assertEqual(vBinary.from_ical(txt_ical), txt) - - def test_prop_vBoolean(self): - from ..prop import vBoolean - - self.assertEqual(vBoolean(True).to_ical(), b'TRUE') - self.assertEqual(vBoolean(0).to_ical(), b'FALSE') - - # The roundtrip test - self.assertEqual(vBoolean.from_ical(vBoolean(True).to_ical()), True) - self.assertEqual(vBoolean.from_ical('true'), True) - - def test_prop_vCalAddress(self): - from ..prop import vCalAddress - txt = b'MAILTO:maxm@mxm.dk' - a = vCalAddress(txt) - a.params['cn'] = 'Max M' - - self.assertEqual(a.to_ical(), txt) - self.assertIsInstance(a.params, Parameters) - self.assertEqual(a.params, {'CN': 'Max M'}) - self.assertEqual(vCalAddress.from_ical(txt), 'MAILTO:maxm@mxm.dk') - - def test_prop_vFloat(self): - from ..prop import vFloat - self.assertEqual(vFloat(1.0).to_ical(), b'1.0') - self.assertEqual(vFloat.from_ical('42'), 42.0) - self.assertEqual(vFloat(42).to_ical(), b'42.0') - - def test_prop_vInt(self): - from ..prop import vInt - self.assertEqual(vInt(42).to_ical(), b'42') - self.assertEqual(vInt.from_ical('13'), 13) - self.assertRaises(ValueError, vInt.from_ical, '1s3') - - def test_prop_vDDDLists(self): - from ..prop import vDDDLists - - dt_list = vDDDLists.from_ical('19960402T010000Z') - self.assertTrue(isinstance(dt_list, list)) - self.assertEqual(len(dt_list), 1) - self.assertTrue(isinstance(dt_list[0], datetime)) - self.assertEqual(str(dt_list[0]), '1996-04-02 01:00:00+00:00') - - p = '19960402T010000Z,19960403T010000Z,19960404T010000Z' - dt_list = vDDDLists.from_ical(p) - self.assertEqual(len(dt_list), 3) - self.assertEqual(str(dt_list[0]), '1996-04-02 01:00:00+00:00') - self.assertEqual(str(dt_list[2]), '1996-04-04 01:00:00+00:00') - - dt_list = vDDDLists([]) - self.assertEqual(dt_list.to_ical(), b'') - - dt_list = vDDDLists([datetime(2000, 1, 1)]) - self.assertEqual(dt_list.to_ical(), b'20000101T000000') - - dt_list = vDDDLists([datetime(2000, 1, 1), datetime(2000, 11, 11)]) - self.assertEqual(dt_list.to_ical(), b'20000101T000000,20001111T000000') - - def test_prop_vDDDTypes(self): - from ..prop import vDDDTypes - - self.assertTrue(isinstance(vDDDTypes.from_ical('20010101T123000'), - datetime)) - - self.assertEqual(vDDDTypes.from_ical('20010101T123000Z'), - datetime(2001, 1, 1, 12, 30, tzinfo=pytz.utc)) - - self.assertTrue(isinstance(vDDDTypes.from_ical('20010101'), date)) - - self.assertEqual(vDDDTypes.from_ical('P31D'), timedelta(31)) - - self.assertEqual(vDDDTypes.from_ical('-P31D'), timedelta(-31)) - - # Bad input - self.assertRaises(ValueError, vDDDTypes, 42) - - def test_prop_vDate(self): - from ..prop import vDate - - self.assertEqual(vDate(date(2001, 1, 1)).to_ical(), b'20010101') - self.assertEqual(vDate(date(1899, 1, 1)).to_ical(), b'18990101') - - self.assertEqual(vDate.from_ical('20010102'), date(2001, 1, 2)) - - self.assertRaises(ValueError, vDate, 'd') - - def test_prop_vDatetime(self): - from ..prop import vDatetime - - dt = datetime(2001, 1, 1, 12, 30, 0) - self.assertEqual(vDatetime(dt).to_ical(), b'20010101T123000') - - self.assertEqual(vDatetime.from_ical('20000101T120000'), - datetime(2000, 1, 1, 12, 0)) - - dutc = datetime(2001, 1, 1, 12, 30, 0, tzinfo=pytz.utc) - self.assertEqual(vDatetime(dutc).to_ical(), b'20010101T123000Z') - - dutc = datetime(1899, 1, 1, 12, 30, 0, tzinfo=pytz.utc) - self.assertEqual(vDatetime(dutc).to_ical(), b'18990101T123000Z') - - self.assertEqual(vDatetime.from_ical('20010101T000000'), - datetime(2001, 1, 1, 0, 0)) - - self.assertRaises(ValueError, vDatetime.from_ical, '20010101T000000A') - - utc = vDatetime.from_ical('20010101T000000Z') - self.assertEqual(vDatetime(utc).to_ical(), b'20010101T000000Z') - - # 1 minute before transition to DST - dat = vDatetime.from_ical('20120311T015959', 'America/Denver') - self.assertEqual(dat.strftime('%Y%m%d%H%M%S %z'), - '20120311015959 -0700') - - # After transition to DST - dat = vDatetime.from_ical('20120311T030000', 'America/Denver') - self.assertEqual(dat.strftime('%Y%m%d%H%M%S %z'), - '20120311030000 -0600') - - dat = vDatetime.from_ical('20101010T000000', 'Europe/Vienna') - self.assertEqual(vDatetime(dat).to_ical(), b'20101010T000000') - - def test_prop_vDuration(self): - from ..prop import vDuration - - self.assertEqual(vDuration(timedelta(11)).to_ical(), b'P11D') - self.assertEqual(vDuration(timedelta(-14)).to_ical(), b'-P14D') - self.assertEqual( - vDuration(timedelta(1, 7384)).to_ical(), - b'P1DT2H3M4S' - ) - self.assertEqual(vDuration(timedelta(1, 7380)).to_ical(), b'P1DT2H3M') - self.assertEqual(vDuration(timedelta(1, 7200)).to_ical(), b'P1DT2H') - self.assertEqual(vDuration(timedelta(0, 7200)).to_ical(), b'PT2H') - self.assertEqual(vDuration(timedelta(0, 7384)).to_ical(), b'PT2H3M4S') - self.assertEqual(vDuration(timedelta(0, 184)).to_ical(), b'PT3M4S') - self.assertEqual(vDuration(timedelta(0, 22)).to_ical(), b'PT22S') - self.assertEqual(vDuration(timedelta(0, 3622)).to_ical(), b'PT1H0M22S') - self.assertEqual(vDuration(timedelta(days=1, hours=5)).to_ical(), - b'P1DT5H') - self.assertEqual(vDuration(timedelta(hours=-5)).to_ical(), b'-PT5H') - self.assertEqual(vDuration(timedelta(days=-1, hours=-5)).to_ical(), - b'-P1DT5H') - - # How does the parsing work? - self.assertEqual(vDuration.from_ical('PT1H0M22S'), timedelta(0, 3622)) - - self.assertRaises(ValueError, vDuration.from_ical, 'kox') - - self.assertEqual(vDuration.from_ical('-P14D'), timedelta(-14)) - - self.assertRaises(ValueError, vDuration, 11) - - def test_prop_vPeriod(self): - from ..prop import vPeriod - - # One day in exact datetimes - per = (datetime(2000, 1, 1), datetime(2000, 1, 2)) - self.assertEqual(vPeriod(per).to_ical(), - b'20000101T000000/20000102T000000') - - per = (datetime(2000, 1, 1), timedelta(days=31)) - self.assertEqual(vPeriod(per).to_ical(), b'20000101T000000/P31D') - - # Roundtrip - p = vPeriod.from_ical('20000101T000000/20000102T000000') - self.assertEqual( - p, - (datetime(2000, 1, 1, 0, 0), datetime(2000, 1, 2, 0, 0)) - ) - self.assertEqual(vPeriod(p).to_ical(), - b'20000101T000000/20000102T000000') - - self.assertEqual(vPeriod.from_ical('20000101T000000/P31D'), - (datetime(2000, 1, 1, 0, 0), timedelta(31))) - - # Roundtrip with absolute time - p = vPeriod.from_ical('20000101T000000Z/20000102T000000Z') - self.assertEqual(vPeriod(p).to_ical(), - b'20000101T000000Z/20000102T000000Z') - - # And an error - self.assertRaises(ValueError, - vPeriod.from_ical, '20000101T000000/Psd31D') - - # Timezoned - dk = pytz.timezone('Europe/Copenhagen') - start = datetime(2000, 1, 1, tzinfo=dk) - end = datetime(2000, 1, 2, tzinfo=dk) - per = (start, end) - self.assertEqual(vPeriod(per).to_ical(), - b'20000101T000000/20000102T000000') - self.assertEqual(vPeriod(per).params['TZID'], - 'Europe/Copenhagen') - - p = vPeriod((datetime(2000, 1, 1, tzinfo=dk), timedelta(days=31))) - self.assertEqual(p.to_ical(), b'20000101T000000/P31D') - - def test_prop_vWeekday(self): - from ..prop import vWeekday - - self.assertEqual(vWeekday('mo').to_ical(), b'MO') - self.assertRaises(ValueError, vWeekday, 'erwer') - self.assertEqual(vWeekday.from_ical('mo'), 'MO') - self.assertEqual(vWeekday.from_ical('+3mo'), '+3MO') - self.assertRaises(ValueError, vWeekday.from_ical, 'Saturday') - self.assertEqual(vWeekday('+mo').to_ical(), b'+MO') - self.assertEqual(vWeekday('+3mo').to_ical(), b'+3MO') - self.assertEqual(vWeekday('-tu').to_ical(), b'-TU') - - def test_prop_vFrequency(self): - from ..prop import vFrequency - - self.assertRaises(ValueError, vFrequency, 'bad test') - self.assertEqual(vFrequency('daily').to_ical(), b'DAILY') - self.assertEqual(vFrequency('daily').from_ical('MONTHLY'), 'MONTHLY') - - def test_prop_vRecur(self): - from ..prop import vRecur - - # Let's see how close we can get to one from the rfc: - # FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;BYMINUTE=30 - - r = dict(freq='yearly', interval=2) - r.update({ - 'bymonth': 1, - 'byday': 'su', - 'byhour': [8, 9], - 'byminute': 30 - }) - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=SU;BYMONTH=1' - ) - - r = vRecur(FREQ='yearly', INTERVAL=2) - r.update({ - 'BYMONTH': 1, - 'BYDAY': 'su', - 'BYHOUR': [8, 9], - 'BYMINUTE': 30, - }) - self.assertEqual( - r.to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=SU;BYMONTH=1' - ) - - r = vRecur(freq='DAILY', count=10) - r['bysecond'] = [0, 15, 30, 45] - self.assertEqual(r.to_ical(), - b'FREQ=DAILY;COUNT=10;BYSECOND=0,15,30,45') - - r = vRecur(freq='DAILY', until=datetime(2005, 1, 1, 12, 0, 0)) - self.assertEqual(r.to_ical(), b'FREQ=DAILY;UNTIL=20050101T120000') - - # How do we fare with regards to parsing? - r = vRecur.from_ical('FREQ=DAILY;INTERVAL=2;COUNT=10') - self.assertEqual(r, - {'COUNT': [10], 'FREQ': ['DAILY'], 'INTERVAL': [2]}) - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=DAILY;COUNT=10;INTERVAL=2' - ) - - r = vRecur.from_ical('FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=-SU;' - 'BYHOUR=8,9;BYMINUTE=30') - self.assertEqual( - r, - {'BYHOUR': [8, 9], 'BYDAY': ['-SU'], 'BYMINUTE': [30], - 'BYMONTH': [1], 'FREQ': ['YEARLY'], 'INTERVAL': [2]} - ) - - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=-SU;' - b'BYMONTH=1' - ) - - # Some examples from the spec - r = vRecur.from_ical('FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR;BYSETPOS=-1') - self.assertEqual(vRecur(r).to_ical(), - b'FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR;BYSETPOS=-1') - - p = 'FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;BYMINUTE=30' - r = vRecur.from_ical(p) - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=SU;BYMONTH=1' - ) - - # and some errors - self.assertRaises(ValueError, vRecur.from_ical, 'BYDAY=12') - - def test_prop_vText(self): - from ..prop import vText - - self.assertEqual(vText(u'Simple text').to_ical(), b'Simple text') - - # Escaped text - t = vText('Text ; with escaped, chars') - self.assertEqual(t.to_ical(), b'Text \\; with escaped\\, chars') - - # Escaped newlines - self.assertEqual(vText('Text with escaped\\N chars').to_ical(), - b'Text with escaped\\n chars') - - # If you pass a unicode object, it will be utf-8 encoded. As this is - # the (only) standard that RFC 2445 support. - t = vText(u'international chars \xe4\xf6\xfc') - self.assertEqual(t.to_ical(), - b'international chars \xc3\xa4\xc3\xb6\xc3\xbc') - - # and parsing? - self.assertEqual(vText.from_ical('Text \\; with escaped\\, chars'), - u'Text ; with escaped, chars') - - t = vText.from_ical('A string with\\; some\\\\ characters in\\it') - self.assertEqual(t, "A string with; some\\ characters in\it") - - # We are forgiving to utf-8 encoding errors: - # We intentionally use a string with unexpected encoding - # - self.assertEqual(vText.from_ical(b'Ol\xe9'), u'Ol\ufffd') - - # Notice how accented E character, encoded with latin-1, got replaced - # with the official U+FFFD REPLACEMENT CHARACTER. - - def test_prop_vTime(self): - from ..prop import vTime - - self.assertEqual(vTime(12, 30, 0).to_ical(), '123000') - self.assertEqual(vTime.from_ical('123000'), time(12, 30)) - - # We should also fail, right? - self.assertRaises(ValueError, vTime.from_ical, '263000') - - def test_prop_vUri(self): - from ..prop import vUri - - self.assertEqual(vUri('http://www.example.com/').to_ical(), - b'http://www.example.com/') - self.assertEqual(vUri.from_ical('http://www.example.com/'), - 'http://www.example.com/') - - def test_prop_vGeo(self): - from ..prop import vGeo - - # Pass a list - self.assertEqual(vGeo([1.2, 3.0]).to_ical(), '1.2;3.0') - - # Pass a tuple - self.assertEqual(vGeo((1.2, 3.0)).to_ical(), '1.2;3.0') - - g = vGeo.from_ical('37.386013;-122.082932') - self.assertEqual(g, (float('37.386013'), float('-122.082932'))) - - self.assertEqual(vGeo(g).to_ical(), '37.386013;-122.082932') - - self.assertRaises(ValueError, vGeo, 'g') - - def test_prop_vUTCOffset(self): - from ..prop import vUTCOffset - - self.assertEqual(vUTCOffset(timedelta(hours=2)).to_ical(), '+0200') - - self.assertEqual(vUTCOffset(timedelta(hours=-5)).to_ical(), '-0500') - - self.assertEqual(vUTCOffset(timedelta()).to_ical(), '+0000') - - self.assertEqual(vUTCOffset(timedelta(minutes=-30)).to_ical(), - '-0030') - - self.assertEqual( - vUTCOffset(timedelta(hours=2, minutes=-30)).to_ical(), - '+0130' - ) - - self.assertEqual(vUTCOffset(timedelta(hours=1, minutes=30)).to_ical(), - '+0130') - - # Support seconds - self.assertEqual(vUTCOffset(timedelta(hours=1, - minutes=30, - seconds=7)).to_ical(), '+013007') - - # Parsing - - self.assertEqual(vUTCOffset.from_ical('0000'), timedelta(0)) - self.assertEqual(vUTCOffset.from_ical('-0030'), timedelta(-1, 84600)) - self.assertEqual(vUTCOffset.from_ical('+0200'), timedelta(0, 7200)) - self.assertEqual(vUTCOffset.from_ical('+023040'), timedelta(0, 9040)) - - self.assertEqual(vUTCOffset(vUTCOffset.from_ical('+0230')).to_ical(), - '+0230') - - # And a few failures - self.assertRaises(ValueError, vUTCOffset.from_ical, '+323k') - - self.assertRaises(ValueError, vUTCOffset.from_ical, '+2400') - - def test_prop_vInline(self): - from ..prop import vInline - - self.assertEqual(vInline('Some text'), 'Some text') - self.assertEqual(vInline.from_ical('Some text'), 'Some text') - - t2 = vInline('other text') - t2.params['cn'] = 'Test Osterone' - self.assertIsInstance(t2.params, Parameters) - self.assertEqual(t2.params, {'CN': 'Test Osterone'}) - - def test_prop_TypesFactory(self): - from ..prop import TypesFactory - - # To get a type you can use it like this. - factory = TypesFactory() - datetime_parser = factory['date-time'] - self.assertEqual(datetime_parser(datetime(2001, 1, 1)).to_ical(), - b'20010101T000000') - - # A typical use is when the parser tries to find a content type and use - # text as the default - value = '20050101T123000' - value_type = 'date-time' - self.assertEqual(factory.get(value_type, 'text').from_ical(value), - datetime(2005, 1, 1, 12, 30)) - - # It can also be used to directly encode property and parameter values - self.assertEqual( - factory.to_ical('comment', u'by Rasmussen, Max M\xfcller'), - b'by Rasmussen\\, Max M\xc3\xbcller' - ) - self.assertEqual(factory.to_ical('priority', 1), b'1') - self.assertEqual(factory.to_ical('cn', u'Rasmussen, Max M\xfcller'), - b'Rasmussen\\, Max M\xc3\xbcller') - self.assertEqual( - factory.from_ical('cn', b'Rasmussen\\, Max M\xc3\xb8ller'), - u'Rasmussen, Max M\xf8ller' - ) - - -class TestPropertyValues(unittest.TestCase): - - def test_vDDDLists_timezone(self): - """Test vDDDLists with timezone information. - """ - from .. import Event - vevent = Event() - at = pytz.timezone('Europe/Vienna') - dt1 = at.localize(datetime(2013, 1, 1)) - dt2 = at.localize(datetime(2013, 1, 2)) - dt3 = at.localize(datetime(2013, 1, 3)) - vevent.add('rdate', [dt1, dt2]) - vevent.add('exdate', dt3) - ical = vevent.to_ical() - - self.assertTrue( - b'RDATE;TZID=Europe/Vienna:20130101T000000,20130102T000000' in ical - ) - self.assertTrue(b'EXDATE;TZID=Europe/Vienna:20130103T000000' in ical) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_tools.py b/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_tools.py deleted file mode 100644 index be2e41e..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/test_unit_tools.py +++ /dev/null @@ -1,28 +0,0 @@ -from icalendar.tests import unittest -from icalendar.tools import UIDGenerator - - -class TestTools(unittest.TestCase): - - def test_tools_UIDGenerator(self): - - # Automatic semi-random uid - g = UIDGenerator() - uid = g.uid() - - txt = uid.to_ical() - length = 15 + 1 + 16 + 1 + 11 - self.assertTrue(len(txt) == length) - self.assertTrue(b'@example.com' in txt) - - # You should at least insert your own hostname to be more compliant - uid = g.uid('Example.ORG') - txt = uid.to_ical() - self.assertTrue(len(txt) == length) - self.assertTrue(b'@Example.ORG' in txt) - - # You can also insert a path or similar - uid = g.uid('Example.ORG', '/path/to/content') - txt = uid.to_ical() - self.assertTrue(len(txt) == length) - self.assertTrue(b'-/path/to/content@Example.ORG' in txt) diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/time.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/time.ics deleted file mode 100644 index d730a4c..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/time.ics +++ /dev/null @@ -1,3 +0,0 @@ -BEGIN:VCALENDAR -X-SOMETIME;VALUE=TIME:172010 -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tests/timezoned.ics b/libs/icalendar-3.6.1/build/lib/icalendar/tests/timezoned.ics deleted file mode 100644 index 5878b72..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tests/timezoned.ics +++ /dev/null @@ -1,36 +0,0 @@ -BEGIN:VCALENDAR -PRODID:-//Plone.org//NONSGML plone.app.event//EN -VERSION:2.0 -X-WR-CALNAME:test create calendar -X-WR-CALDESC:icalendar test -X-WR-RELCALID:12345 -X-WR-TIMEZONE:Europe/Vienna -BEGIN:VTIMEZONE -TZID:Europe/Vienna -X-LIC-LOCATION:Europe/Vienna -BEGIN:DAYLIGHT -TZOFFSETFROM:+0100 -TZOFFSETTO:+0200 -TZNAME:CEST -DTSTART:19700329T020000 -RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU -END:DAYLIGHT -BEGIN:STANDARD -TZOFFSETFROM:+0200 -TZOFFSETTO:+0100 -TZNAME:CET -DTSTART:19701025T030000 -RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTART;TZID=Europe/Vienna:20120213T100000 -DTEND;TZID=Europe/Vienna:20120217T180000 -DTSTAMP:20101010T091010Z -CREATED:20101010T091010Z -UID:123456 -SUMMARY:artsprint 2012 -DESCRIPTION:sprinting at the artsprint -LOCATION:aka bild, wien -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/build/lib/icalendar/tools.py b/libs/icalendar-3.6.1/build/lib/icalendar/tools.py deleted file mode 100644 index 387c43a..0000000 --- a/libs/icalendar-3.6.1/build/lib/icalendar/tools.py +++ /dev/null @@ -1,33 +0,0 @@ -from datetime import datetime -from icalendar.parser_tools import to_unicode -from icalendar.prop import vDatetime -from icalendar.prop import vText -from string import ascii_letters -from string import digits - -import random - - -class UIDGenerator(object): - """If you are too lazy to create real uid's. - - """ - chars = list(ascii_letters + digits) - - def rnd_string(self, length=16): - """Generates a string with random characters of length. - """ - return ''.join([random.choice(self.chars) for _ in range(length)]) - - def uid(self, host_name='example.com', unique=''): - """Generates a unique id consisting of: - datetime-uniquevalue@host. - Like: - 20050105T225746Z-HKtJMqUgdO0jDUwm@example.com - """ - host_name = to_unicode(host_name) - unique = unique or self.rnd_string() - today = to_unicode(vDatetime(datetime.today()).to_ical()) - return vText('%s-%s@%s' % (today, - unique, - host_name)) diff --git a/libs/icalendar-3.6.1/dist/icalendar-3.6.1-py3.3.egg b/libs/icalendar-3.6.1/dist/icalendar-3.6.1-py3.3.egg deleted file mode 100644 index 1eaac3e..0000000 Binary files a/libs/icalendar-3.6.1/dist/icalendar-3.6.1-py3.3.egg and /dev/null differ diff --git a/libs/icalendar-3.6.1/docs/Makefile b/libs/icalendar-3.6.1/docs/Makefile deleted file mode 100644 index c123868..0000000 --- a/libs/icalendar-3.6.1/docs/Makefile +++ /dev/null @@ -1,130 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = _build - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - -rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/icalendar.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/icalendar.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/icalendar" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/icalendar" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - make -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." diff --git a/libs/icalendar-3.6.1/docs/_themes/icalendar/layout.html b/libs/icalendar-3.6.1/docs/_themes/icalendar/layout.html deleted file mode 100644 index c931918..0000000 --- a/libs/icalendar-3.6.1/docs/_themes/icalendar/layout.html +++ /dev/null @@ -1,14 +0,0 @@ -{# - sphinxdoc/layout.html - ~~~~~~~~~~~~~~~~~~~~~ - - Sphinx layout template for the sphinxdoc theme. - - :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - :license: BSD, see LICENSE for details. -#} -{% extends "basic/layout.html" %} - -{# put the sidebar before the body #} -{% block sidebar1 %}{{ sidebar() }}{% endblock %} -{% block sidebar2 %}{% endblock %} diff --git a/libs/icalendar-3.6.1/docs/_themes/icalendar/static/icalendar.css b/libs/icalendar-3.6.1/docs/_themes/icalendar/static/icalendar.css deleted file mode 100644 index e20b32e..0000000 --- a/libs/icalendar-3.6.1/docs/_themes/icalendar/static/icalendar.css +++ /dev/null @@ -1,428 +0,0 @@ -/* - * sphinxdoc.css_t - * ~~~~~~~~~~~~~~~ - * - * Sphinx stylesheet -- sphinxdoc theme. Originally created by - * Armin Ronacher for Werkzeug. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: Arial, sans-serif, serif; - font-size: 15px; - letter-spacing: -0.05em; - line-height: 1.3em; - background-color: #F5F4EF; - padding: 0; - margin: 0 auto; - width: 950px; - -moz-box-shadow: 0px 0px 15px #bbb; - -webkit-box-shadow: 0px 0px 15px #bbb; - box-shadow: 0px 0px 15px #bbb; -} - -div.document { - background-color: white; - text-align: left; -} - -div.bodywrapper { - margin: 0 240px 0 0; - border-right: 1px solid #ccc; -} - -div.body { - margin: 0; - padding: 0.5em 20px 20px 20px; -} - - -/*Banner*/ -#banner { - height: 100px; - background: #fff; -} - -#vlinux-logo { - margin-top: 10px; - margin-left: 10px; -} - -#nav { - background: #48443d url('menu_bg.png') repeat-x; - height: 30px; -} - -#nav ul { - list-style: none; - padding: 0; - margin: 0; -} - -#nav li { - display: inline; - float: left; - margin: 0; -} - -#nav a:link, #nav a:visited { - color: #fff; - display: inline-block; - padding: 5px 20px; - height: 20px; - text-decoration: none; - font-size: 0.8em; - border-bottom: none; -} - -#nav a:hover, #nav a:active, #nav .active a:link, #nav .active a:visited { - background: #89c912; - color: #fff; - text-shadow: #666 2px 2px 2px; -} - - -div.related { - font-size: 0.8em; -} - -div.related ul { - /*background-image: url(navigation.png);*/ - background: #fafafa; - height: 2em; - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; -} - -div.related ul li { - margin: 0; - padding: 0; - height: 2em; - float: left; -} - -div.related ul li.right { - float: right; - margin-right: 5px; -} - -div.related ul li a { - margin: 0; - padding: 0 5px 0 5px; - line-height: 1.75em; - color: #48443d; - border-bottom: #48443d 1px dotted; -} - -div.related ul li a:hover { - color: #89c912; - text-decoration: none; - border-bottom: #89c912 1px dotted; - -} - -div.sphinxsidebarwrapper { - padding: 0; -} - -div.sphinxsidebar { - margin: 0; - padding: 0.5em 15px 15px 0; - width: 210px; - float: right; - font-size: 0.8em; - text-align: left; -} - -div.sphinxsidebar h3, div.sphinxsidebar h4 { - margin: 1em 0; - font-size: 1.8em; - line-height: 1.3em; - padding: 0.2em 0.5em; - color: white; - /*border: 1px solid #86989B;*/ - background-color: #000000; -} - -div.sphinxsidebar h3 a { - color: white; -} - -div.sphinxsidebar ul { - padding-left: 1.5em; - margin-top: 7px; - padding: 0; - line-height: 130%; - font-size: 14px; - margin-left: 1em; -} - -div.sphinxsidebar ul ul { - margin-left: 20px; -} - -div.footer { - display: none; - background-color: #FAFAFA; - border-bottom: 1px solid #DDDDDD; - border-top: 1px solid #DDDDDD; - clear: both; - height: 2em; - line-height: 1.75em; - padding: 0; - text-align: center; -} - -div.footer a, -div.footer a:hover { - border-bottom: 1px dotted #48443D; - text-decoration: none; -} - -/* -- body styles ----------------------------------------------------------- */ - -p { - margin: 0.8em 0 0.5em 0; -} - -a { - color: #48443d; - text-decoration: none; - border-bottom: #48443d 1px dotted; - font-weight: bold; -} - -a:hover { - color: #89c912; - text-decoration: none; - border-bottom: #89c912 1px dotted; -} - -/*div.body a { - text-decoration: underline; -}*/ - -h1 { - margin: 0; - padding: 0.7em 0 0.3em 0; - font-size: 2.5em; - line-height: 1em; - color: #48443d; -} - -h2 { - margin: 1.3em 0 0.2em 0; - font-size: 2em; - line-height: 0.8em; - padding: 0; - color: #48443d; -} - -h3 { - margin: 1em 0 -0.3em 0; - font-size: 1.5em; - color: #48443d; -} - -div.body h1 a, div.body h2 a, div.body h3 a, div.body h4 a, div.body h5 a, div.body h6 a { - color: #48443d; -} - -h1 a.anchor, h2 a.anchor, h3 a.anchor, h4 a.anchor, h5 a.anchor, h6 a.anchor { - display: none; - margin: 0 0 0 0.3em; - padding: 0 0.2em 0 0.2em; - color: #aaa!important; -} - -h1:hover a.anchor, h2:hover a.anchor, h3:hover a.anchor, h4:hover a.anchor, -h5:hover a.anchor, h6:hover a.anchor { - display: inline; -} - -h1 a.anchor:hover, h2 a.anchor:hover, h3 a.anchor:hover, h4 a.anchor:hover, -h5 a.anchor:hover, h6 a.anchor:hover { - color: #777; - background-color: #eee; -} - -a.headerlink { - color: #c60f0f!important; - font-size: 1em; - margin-left: 6px; - padding: 0 4px 0 4px; - text-decoration: none!important; -} - -a.headerlink:hover { - background-color: #ccc; - color: white!important; -} - -cite, code, tt { - font-family: 'Consolas', 'Deja Vu Sans Mono', - 'Bitstream Vera Sans Mono', monospace; - font-size: 0.95em; - letter-spacing: 0.01em; -} - -tt { - background-color: #f2f2f2; - border-bottom: 1px solid #ddd; - color: #333; -} - -tt.descname, tt.descclassname, tt.xref { - border: 0; -} - -hr { - display: none; -} -.body hr { - border: 1px solid #abc; - margin: 2em; -} - -a tt { - border: 0; - color: #CA7900; -} - -a tt:hover { - color: #2491CF; -} - -pre { - font-family: 'Consolas', 'Deja Vu Sans Mono', - 'Bitstream Vera Sans Mono', monospace; - font-size: 0.95em; - letter-spacing: 0.015em; - line-height: 120%; - padding: 0.5em; - border: 1px solid #ccc; - background-color: #f8f8f8; -} - -pre a { - color: inherit; - text-decoration: underline; -} - -td.linenos pre { - padding: 0.5em 0; -} - -div.quotebar { - background-color: #f8f8f8; - max-width: 250px; - float: right; - padding: 2px 7px; - border: 1px solid #ccc; -} - -div.topic { - background-color: #f8f8f8; -} - -table { - border-collapse: collapse; - margin: 0 -0.5em 0 -0.5em; -} - -table td, table th { - padding: 0.2em 0.5em 0.2em 0.5em; -} - -div.admonition, div.warning { - font-size: 0.9em; - margin: 1em 0 1em 0; - border: 1px solid #86989B; - background-color: #f7f7f7; - padding: 0; -} - -div.admonition p, div.warning p { - margin: 0.5em 1em 0.5em 1em; - padding: 0; -} - -div.admonition pre, div.warning pre { - margin: 0.4em 1em 0.4em 1em; -} - -div.admonition p.admonition-title, -div.warning p.admonition-title { - margin: 0; - padding: 0.1em 0 0.1em 0.5em; - color: white; - border-bottom: 1px solid #86989B; - font-weight: bold; - background-color: #AFC1C4; -} - -div.warning { - border: 1px solid #940000; -} - -div.warning p.admonition-title { - background-color: #CF0000; - border-bottom-color: #940000; -} - -div.admonition ul, div.admonition ol, -div.warning ul, div.warning ol { - margin: 0.1em 0.5em 0.5em 3em; - padding: 0; -} - -div.versioninfo { - margin: 1em 0 0 0; - border: 1px solid #ccc; - background-color: #DDEAF0; - padding: 8px; - line-height: 1.3em; - font-size: 0.9em; -} - -.viewcode-back { - font-family: 'Lucida Grande', 'Lucida Sans Unicode', 'Geneva', - 'Verdana', sans-serif; -} - -div.viewcode-block:target { - background-color: #f4debf; - border-top: 1px solid #ac9; - border-bottom: 1px solid #ac9; -} - - -/* custom */ -#twitter a { - border-bottom: none; -} - -#twitter a:hover { - border-bottom: none; -} - -.twtr-widget { - font-family: font-family: 'Droid Sans', serif !important; -} - -#disqus_thread { - padding: 0.5em 20px 20px 20px; - width: 670px; - float: left; -} - -p#comments { - padding: 0.5em 20px 20px 20px; -} diff --git a/libs/icalendar-3.6.1/docs/_themes/icalendar/theme.conf b/libs/icalendar-3.6.1/docs/_themes/icalendar/theme.conf deleted file mode 100644 index 3c3ed23..0000000 --- a/libs/icalendar-3.6.1/docs/_themes/icalendar/theme.conf +++ /dev/null @@ -1,4 +0,0 @@ -[theme] -inherit = basic -stylesheet = icalendar.css -pygments_style = friendly diff --git a/libs/icalendar-3.6.1/docs/about.rst b/libs/icalendar-3.6.1/docs/about.rst deleted file mode 100644 index 61d374a..0000000 --- a/libs/icalendar-3.6.1/docs/about.rst +++ /dev/null @@ -1,14 +0,0 @@ -About -===== - -`Max M`_ had often needed to parse and generate iCalendar files. Finally he got -tired of writing ad-hoc tools. This package is his attempt at making an -iCalendar package for Python. The inspiration has come from the email package -in the standard lib, which he thinks is pretty simple, yet efficient and -powerful. - -At the time of writing this, last version was released more then 2 years ago. -Since then many things have changes. For one, `RFC 2445`_ was updated by `RFC -5545`_ which makes this package. So in some sense this package became outdated. - -.. _`Max M`: http://www.mxm.dk diff --git a/libs/icalendar-3.6.1/docs/changelog.rst b/libs/icalendar-3.6.1/docs/changelog.rst deleted file mode 100644 index d9e113e..0000000 --- a/libs/icalendar-3.6.1/docs/changelog.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../CHANGES.rst diff --git a/libs/icalendar-3.6.1/docs/conf.py b/libs/icalendar-3.6.1/docs/conf.py deleted file mode 100644 index a3bcc35..0000000 --- a/libs/icalendar-3.6.1/docs/conf.py +++ /dev/null @@ -1,221 +0,0 @@ -# -*- coding: utf-8 -*- -# -# icalendar documentation build configuration file, created by -# sphinx-quickstart on Wed Aug 17 00:40:41 2011. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys, os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.insert(0, os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.viewcode'] - -# Add any paths that contain templates here, relative to this directory. -# templates_path = [] - -# The suffix of source filenames. -source_suffix = '.rst' - -# The encoding of source files. -#source_encoding = 'utf-8-sig' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = u'icalendar' -copyright = u'2011, MaxM' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = '3.0' -# The full version, including alpha/beta/rc tags. -release = '3.0dev' - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -exclude_patterns = ['_build'] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -html_theme = 'icalendar' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['_themes'] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -#html_title = None - -# A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = [] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -#html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -html_sidebars = { - '**': [ - 'globaltoc.html', - 'searchbox.html', - ], -} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -#html_domain_indices = True - -# If false, no index is generated. -#html_use_index = True - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True - -# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None - -# Output file base name for HTML help builder. -htmlhelp_basename = 'icalendardoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'icalendar.tex', u'icalendar Documentation', - u'MaxM', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# If true, show page references after internal links. -#latex_show_pagerefs = False - -# If true, show URL addresses after external links. -#latex_show_urls = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_domain_indices = True - - -# -- Options for manual page output -------------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'icalendar', u'icalendar Documentation', - [u'MaxM'], 1) -] diff --git a/libs/icalendar-3.6.1/docs/credits.rst b/libs/icalendar-3.6.1/docs/credits.rst deleted file mode 100644 index 82e689d..0000000 --- a/libs/icalendar-3.6.1/docs/credits.rst +++ /dev/null @@ -1,37 +0,0 @@ -icalendar contributors -====================== - -- Johannes Raggam (Maintainer) -- Max M (Original author) -- Andreas Zeidler -- Andrey Nikolaev -- Barak Michener -- Christophe de Vienne -- Christian Geier -- Dai MIKURUBE -- Dan Stovall -- Eric Hanchrow -- Erik Simmler -- George V. Reilly -- Jannis Leidel -- Jeroen van Meeuwen (Kolab Systems) -- Lennart Regebro -- Marc Egli -- Martijn Faassen -- Martin Melin -- Michael Smith -- Mikael Frykholm -- Olivier Grisel -- Pavel Repin -- Pedro Ferreira -- Rembane -- Robert Niederreiter -- Rok Garbas -- Ronan Dunklau -- Sidnei da Silva -- Stanislav Ochotnicky -- Stefan Schwarzer -- Victor Varvaryuk -- Wichert Akkerman -- spanktar -- tgecho diff --git a/libs/icalendar-3.6.1/docs/index.rst b/libs/icalendar-3.6.1/docs/index.rst deleted file mode 100644 index 6f72476..0000000 --- a/libs/icalendar-3.6.1/docs/index.rst +++ /dev/null @@ -1,16 +0,0 @@ - -.. include:: ../README.rst - -Contents -======== - -.. toctree:: - :maxdepth: 2 - - about - install - usage - RFC 5545 - changelog - credits - license diff --git a/libs/icalendar-3.6.1/docs/install.rst b/libs/icalendar-3.6.1/docs/install.rst deleted file mode 100644 index 2bc4a05..0000000 --- a/libs/icalendar-3.6.1/docs/install.rst +++ /dev/null @@ -1,11 +0,0 @@ -Installing iCalendar -==================== - -To install the icalendar package, use:: - - python setup.py install - -If installation is successful, you be able to import the iCalendar -package, like this:: - - >>> import icalendar diff --git a/libs/icalendar-3.6.1/docs/license.rst b/libs/icalendar-3.6.1/docs/license.rst deleted file mode 100644 index 68c5792..0000000 --- a/libs/icalendar-3.6.1/docs/license.rst +++ /dev/null @@ -1 +0,0 @@ -.. include:: ../LICENSE.rst diff --git a/libs/icalendar-3.6.1/docs/usage.rst b/libs/icalendar-3.6.1/docs/usage.rst deleted file mode 100644 index 22fbe4e..0000000 --- a/libs/icalendar-3.6.1/docs/usage.rst +++ /dev/null @@ -1,324 +0,0 @@ -iCalendar package -================= - -This package is used for parsing and generating iCalendar files following the -standard in RFC 2445. - -It should be fully compliant, but it is possible to generate and parse invalid -files if you really want to. - - -File structure --------------- - -An iCalendar file is a text file (utf-8) with a special format. Basically it -consists of content lines. - -Each content line defines a property that has 3 parts (name, parameters, -values). Parameters are optional. - -A simple content line with only name and value could look like this:: - - BEGIN:VCALENDAR - -A content line with parameters can look like this:: - - ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:MAILTO:example@example.com - -And the parts are:: - - Name: ATTENDEE - Params: CN=Max Rasmussen;ROLE=REQ-PARTICIPANT - Value: MAILTO:example@example.com - -Long content lines are usually "folded" to less than 75 character, but the -package takes care of that. - - -Overview --------- - -On a higher level iCalendar files consists of components. Components can have -sub components. - -The root component is the VCALENDAR:: - - BEGIN:VCALENDAR - ... vcalendar properties ... - END:VCALENDAR - -The most frequent subcomponent to a VCALENDAR is a VEVENT. They are -nested like this:: - - BEGIN:VCALENDAR - ... vcalendar properties ... - BEGIN:VEVENT - ... vevent properties ... - END:VEVENT - END:VCALENDAR - -Inside the components there are properties with values. The values -have special types. like integer, text, datetime etc. These values are -encoded in a special text format in an iCalendar file. - -There are methods for converting to and from these encodings in the package. - -These are the most important imports:: - - >>> from icalendar import Calendar, Event - - -Components ----------- - -Components are like (Case Insensitive) dicts. So if you want to set a property -you do it like this. The calendar is a component:: - - >>> cal = Calendar() - >>> cal['dtstart'] = '20050404T080000' - >>> cal['summary'] = 'Python meeting about calendaring' - >>> for k,v in cal.items(): - ... k,v - (u'DTSTART', '20050404T080000') - (u'SUMMARY', 'Python meeting about calendaring') - -NOTE: the recommended way to add components to the calendar is to use -create the subcomponent and add it via Calendar.add! The example above adds a -string, but not a vText component. - - -You can generate a string for a file with the to_ical() method:: - - >>> cal.to_ical() - 'BEGIN:VCALENDAR\r\nDTSTART:20050404T080000\r\nSUMMARY:Python meeting about calendaring\r\nEND:VCALENDAR\r\n' - -The rendered view is easier to read:: - - BEGIN:VCALENDAR - DTSTART:20050404T080000 - SUMMARY:Python meeting about calendaring - END:VCALENDAR - -So, let's define a function so we can easily display to_ical() output:: - - >>> def display(cal): - ... return cal.to_ical().replace('\r\n', '\n').strip() - -You can set multiple properties like this:: - - >>> cal = Calendar() - >>> cal['attendee'] = ['MAILTO:maxm@mxm.dk','MAILTO:test@example.com'] - >>> print display(cal) - BEGIN:VCALENDAR - ATTENDEE:MAILTO:maxm@mxm.dk - ATTENDEE:MAILTO:test@example.com - END:VCALENDAR - -If you don't want to care about whether a property value is a list or -a single value, just use the add() method. It will automatically -convert the property to a list of values if more than one value is -added. Here is an example:: - - >>> cal = Calendar() - >>> cal.add('attendee', 'MAILTO:maxm@mxm.dk') - >>> cal.add('attendee', 'MAILTO:test@example.com') - >>> print display(cal) - BEGIN:VCALENDAR - ATTENDEE:MAILTO:maxm@mxm.dk - ATTENDEE:MAILTO:test@example.com - END:VCALENDAR - -Note: this version doesn't check for compliance, so you should look in -the RFC 2445 spec for legal properties for each component, or look in -the icalendar/calendar.py file, where it is at least defined for each -component. - - -Subcomponents -------------- - -Any component can have subcomponents. Eg. inside a calendar there can -be events. They can be arbitrarily nested. First by making a new -component:: - - >>> event = Event() - >>> event['uid'] = '42' - >>> event['dtstart'] = '20050404T080000' - -And then appending it to a "parent":: - - >>> cal.add_component(event) - >>> print display(cal) - BEGIN:VCALENDAR - ATTENDEE:MAILTO:maxm@mxm.dk - ATTENDEE:MAILTO:test@example.com - BEGIN:VEVENT - DTSTART:20050404T080000 - UID:42 - END:VEVENT - END:VCALENDAR - -Subcomponents are appended to the subcomponents property on the component:: - - >>> cal.subcomponents - [VEVENT({'DTSTART': '20050404T080000', 'UID': '42'})] - - -Value types ------------ - -Property values are utf-8 encoded strings. - -This is impractical if you want to use the data for further -computation. Eg. the datetime format looks like this: -'20050404T080000'. But the package makes it simple to Parse and -generate iCalendar formatted strings. - -Basically you can make the add() method do the thinking, or you can do it -yourself. - -To add a datetime value, you can use Pythons built in datetime types, -and the set the encode parameter to true, and it will convert to the -type defined in the spec:: - - >>> from datetime import datetime - >>> cal.add('dtstart', datetime(2005,4,4,8,0,0)) - >>> cal['dtstart'].to_ical() - '20050404T080000' - -If that doesn't work satisfactorily for some reason, you can also do it -manually. - -In 'icalendar.prop', all the iCalendar data types are defined. Each -type has a class that can parse and encode the type. - -So if you want to do it manually:: - - >>> from icalendar import vDatetime - >>> now = datetime(2005,4,4,8,0,0) - >>> vDatetime(now).to_ical() - '20050404T080000' - -So the drill is to initialise the object with a python built in type, -and then call the "to_ical()" method on the object. That will return an -ical encoded string. - -You can do it the other way around too. To parse an encoded string, just call -the "from_ical()" method, and it will return an instance of the corresponding -Python type:: - - >>> vDatetime.from_ical('20050404T080000') - datetime.datetime(2005, 4, 4, 8, 0) - - >>> dt = vDatetime.from_ical('20050404T080000Z') - >>> repr(dt)[:62] - 'datetime.datetime(2005, 4, 4, 8, 0, tzinfo=)' - -You can also choose to use the decoded() method, which will return a decoded -value directly:: - - >>> cal = Calendar() - >>> cal.add('dtstart', datetime(2005,4,4,8,0,0)) - >>> cal['dtstart'].to_ical() - '20050404T080000' - >>> cal.decoded('dtstart') - datetime.datetime(2005, 4, 4, 8, 0) - - -Property parameters -------------------- - -Property parameters are automatically added, depending on the input value. For -example, for date/time related properties, the value type and timezone -identifier (if applicable) are automatically added here:: - - >>> event = Event() - >>> event.add('dtstart', datetime(2010, 10, 10, 10, 0, 0, - ... tzinfo=pytz.timezone("Europe/Vienna"))) - - >>> lines = event.to_ical().splitlines() - >>> self.assertTrue( - ... b"DTSTART;TZID=Europe/Vienna;VALUE=DATE-TIME:20101010T100000" - ... in lines) - - -You can also add arbitrary property parameters by passing a parameters -dictionary to the add method like so:: - - >>> event = Event() - >>> event.add('X-TEST-PROP', 'tryout.', - .... parameters={'prop1': 'val1', 'prop2': 'val2'}) - >>> lines = event.to_ical().splitlines() - >>> self.assertTrue(b"X-TEST-PROP;PROP1=val1;PROP2=val2:tryout." in lines) - - -Example -------- - -Here is an example generating a complete iCal calendar file with a -single event that can be loaded into the Mozilla calendar - -Init the calendar:: - - >>> cal = Calendar() - >>> from datetime import datetime - -Some properties are required to be compliant:: - - >>> cal.add('prodid', '-//My calendar product//mxm.dk//') - >>> cal.add('version', '2.0') - -We need at least one subcomponent for a calendar to be compliant:: - - >>> import pytz - >>> event = Event() - >>> event.add('summary', 'Python meeting about calendaring') - >>> event.add('dtstart', datetime(2005,4,4,8,0,0,tzinfo=pytz.utc)) - >>> event.add('dtend', datetime(2005,4,4,10,0,0,tzinfo=pytz.utc)) - >>> event.add('dtstamp', datetime(2005,4,4,0,10,0,tzinfo=pytz.utc)) - -A property with parameters. Notice that they are an attribute on the value:: - - >>> from icalendar import vCalAddress, vText - >>> organizer = vCalAddress('MAILTO:noone@example.com') - -Automatic encoding is not yet implemented for parameter values, so you -must use the 'v*' types you can import from the icalendar package -(they're defined in ``icalendar.prop``):: - - >>> organizer.params['cn'] = vText('Max Rasmussen') - >>> organizer.params['role'] = vText('CHAIR') - >>> event['organizer'] = organizer - >>> event['location'] = vText('Odense, Denmark') - - >>> event['uid'] = '20050115T101010/27346262376@mxm.dk' - >>> event.add('priority', 5) - - >>> attendee = vCalAddress('MAILTO:maxm@example.com') - >>> attendee.params['cn'] = vText('Max Rasmussen') - >>> attendee.params['ROLE'] = vText('REQ-PARTICIPANT') - >>> event.add('attendee', attendee, encode=0) - - >>> attendee = vCalAddress('MAILTO:the-dude@example.com') - >>> attendee.params['cn'] = vText('The Dude') - >>> attendee.params['ROLE'] = vText('REQ-PARTICIPANT') - >>> event.add('attendee', attendee, encode=0) - -Add the event to the calendar:: - - >>> cal.add_component(event) - -Write to disk:: - - >>> import tempfile, os - >>> directory = tempfile.mkdtemp() - >>> f = open(os.path.join(directory, 'example.ics'), 'wb') - >>> f.write(cal.to_ical()) - >>> f.close() - - -More documentation -================== - -Have a look at the tests of this package to get more examples. -All modules and classes docstrings, which document how they work. diff --git a/libs/icalendar-3.6.1/setup.cfg b/libs/icalendar-3.6.1/setup.cfg deleted file mode 100644 index 861a9f5..0000000 --- a/libs/icalendar-3.6.1/setup.cfg +++ /dev/null @@ -1,5 +0,0 @@ -[egg_info] -tag_build = -tag_date = 0 -tag_svn_revision = 0 - diff --git a/libs/icalendar-3.6.1/setup.py b/libs/icalendar-3.6.1/setup.py deleted file mode 100644 index 1dd500c..0000000 --- a/libs/icalendar-3.6.1/setup.py +++ /dev/null @@ -1,53 +0,0 @@ -import codecs -import setuptools -import sys - - -version = '3.6.1' -shortdesc = 'iCalendar parser/generator' -longdesc = codecs.open('README.rst', encoding='utf-8').read() -longdesc += codecs.open('CHANGES.rst', encoding='utf-8').read() -longdesc += codecs.open('LICENSE.rst', encoding='utf-8').read() - - -tests_require = [] -if sys.version_info[:2] == (2, 6): - # Python unittest2 only needed for Python 2.6 - tests_require = ['unittest2'] - - -setuptools.setup( - name='icalendar', - version=version, - description=shortdesc, - long_description=longdesc, - classifiers=[ - 'Development Status :: 5 - Production/Stable', - 'Intended Audience :: Developers', - "Programming Language :: Python", - "Programming Language :: Python :: 2.6", - "Programming Language :: Python :: 2.7", - "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.3", - 'License :: OSI Approved :: BSD License', - 'Operating System :: OS Independent', - ], - keywords='calendar calendaring ical icalendar event todo journal ' - 'recurring', - author='Plone Foundation', - author_email='plone-developers@lists.sourceforge.net', - url='https://github.com/collective/icalendar', - license='BSD', - packages=setuptools.find_packages('src'), - package_dir={'': 'src'}, - include_package_data=True, - zip_safe=False, - install_requires=[ - 'setuptools', - 'python-dateutil', - 'pytz', - ], - extras_require={ - 'test': tests_require - } -) diff --git a/libs/icalendar-3.6.1/src/icalendar.egg-info/PKG-INFO b/libs/icalendar-3.6.1/src/icalendar.egg-info/PKG-INFO deleted file mode 100644 index ed7c36d..0000000 --- a/libs/icalendar-3.6.1/src/icalendar.egg-info/PKG-INFO +++ /dev/null @@ -1,534 +0,0 @@ -Metadata-Version: 1.1 -Name: icalendar -Version: 3.6.1 -Summary: iCalendar parser/generator -Home-page: https://github.com/collective/icalendar -Author: Plone Foundation -Author-email: plone-developers@lists.sourceforge.net -License: BSD -Description: ========================================================== - Internet Calendaring and Scheduling (iCalendar) for Python - ========================================================== - - The `icalendar`_ package is a parser/generator of iCalendar files for use - with Python. - - ---- - - :Homepage: http://icalendar.readthedocs.org - :Code: http://github.com/collective/icalendar - :Mailing list: http://github.com/collective/icalendar/issues - :Dependencies: `setuptools`_ and since version 3.0 we depend on `pytz`_. - :Compatible with: Python 2.6, 2.7 and 3.3+ - :License: `BSD`_ - - ---- - - - Roadmap - ======= - - - 3.6: Python 3 support (current version) - - - 4.0: API refactoring - - - - Changes in version 3.0 - ====================== - - API Change - ---------- - - Since version we unified to icalendar de/serialization API to use only to_ical - (for writing an ical string from the internal representation) and from_ical - (for parsing an ical string into the internal representation). - - to_ical is now used instead of the methods ical, string, as_string and instead - of string casting via __str__ and str. - - from_ical is now used instead of from_string. - - This change is a requirement for future Python 3 compatibility. Please update - your code to reflect to the new API. - - Timezone support - ---------------- - - Timezones are now fully supported in icalendar for serialization and - deserialization. We use the pytz library for timezone components of datetime - instances. The timezone identifiers must be valid pytz respectively Olson - database timezone identifiers. This can be a problem for 'GMT' identifiers, - which are not defined in the Olson database. - - Instead of the own UTC tzinfo implementation we use pytz UTC tzinfo object now. - - - About this fork which is not a fork anymore - =========================================== - - Aim of this fork (not fork anymore, read further) was to bring this package up - to date with latest icalendar `RFC`_ specification as part of - `plone.app.event`_ project which goal is to bring recurrent evens to `Plone`_. - - After some thoughts we (Plone developers involved with `plone.app.event`_) send - a suggestion to icalendar-dev@codespeak.net to take over mainaining of - `icalendar`_. Nobody object and since version 2.2 we are back to development. - - .. _`icalendar`: http://pypi.python.org/pypi/icalendar - .. _`plone.app.event`: http://github.com/plone/plone.app.event - .. _`Plone`: http://plone.org - .. _`pytz`: http://pypi.python.org/pypi/pytz - .. _`setuptools`: http://pypi.python.org/pypi/setuptools - .. _`RFC`: http://www.ietf.org/rfc/rfc5545.txt - .. _`BSD`: https://github.com/collective/icalendar/issues/2 - - - Test Coverage Report - ==================== - - Output from coverage test:: - - Name Stmts Miss Cover - ---------------------------------------------------------------------------------- - .tox/py27/lib/python2.7/site-packages/icalendar/__init__ 5 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/cal 234 7 97% - .tox/py27/lib/python2.7/site-packages/icalendar/caselessdict 55 5 91% - .tox/py27/lib/python2.7/site-packages/icalendar/compat 1 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/parser 189 6 97% - .tox/py27/lib/python2.7/site-packages/icalendar/parser_tools 20 0 100% - .tox/py27/lib/python2.7/site-packages/icalendar/prop 533 62 88% - .tox/py27/lib/python2.7/site-packages/icalendar/tools 16 0 100% - ---------------------------------------------------------------------------------- - TOTAL 1053 80 92% - - - Changelog - ========= - - 3.6.1 (2014-01-13) - ------------------ - - - Open text files referenced by setup.py as utf-8, no matter what the locale - settings are set to. Fixes #122. - [sochotnicky] - - - Add tox.ini to source tarball, which simplifies testing for in distributions. - [sochotnicky] - - - 3.6 (2014-01-06) - ---------------- - - - Python3 (3.3+) + Python 2 (2.6+) support [geier] - - - Made sure to_ical() always returns bytes [geier] - - - Support adding lists to a component property, which value already was a list - and remove the Component.set method, which was only used by the add method. - [thet] - - - Remove ability to add property parameters via a value's params attribute when - adding via cal.add (that was only possible for custom value objects and makes - up a strange API), but support a parameter attribute on cal.add's method - signature to pass a dictionary with property parameter key/value pairs. - Fixes #116. - [thet] - - - Backport some of Regebro's changes from his regebro-refactor branch. - [thet] - - - Raise explicit error on another malformed content line case. - [hajdbo] - - - Correctly parse datetime component property values with timezone information - when parsed from ical strings. - [untitaker] - - - 3.5 (2013-07-03) - ---------------- - - - Let to_unicode be more graceful for non-unicode strings, as like CMFPlone's - safe_unicode does it. - [thet] - - - 3.4 (2013-04-24) - ---------------- - - - Switch to unicode internally. This should fix all en/decoding errors. - [thet] - - - Support for non-ascii parameter values. Fixes #88. - [warvariuc] - - - Added functions to transform chars in string with '\\' + any of r'\,;:' chars - into '%{:02X}' form to avoid splitting on chars escaped with '\\'. - [warvariuc] - - - Allow seconds in vUTCOffset properties. Fixes #55. - [thet] - - - Let ``Component.decode`` better handle vRecur and vDDDLists properties. - Fixes #70. - [thet] - - - Don't let ``Component.add`` re-encode already encoded values. This simplifies - the API, since there is no need explicitly pass ``encode=False``. Fixes #82. - [thet] - - - Rename tzinfo_from_dt to tzid_from_dt, which is what it does. - [thet] - - - More support for dateutil parsed tzinfo objects. Fixes #89. - [leo-naeka] - - - Remove python-dateutil version fix at all. Current python-dateutil has Py3 - and Py2 compatibility. - [thet] - - - Declare the required python-dateutil dependency in setup.py. Fixes #90. - [kleink] - - - Raise test coverage. - [thet] - - - Remove interfaces module, as it is unused. - [thet] - - - Remove ``test_doctests.py``, test suite already created properly in - ``test_icalendar.py``. - [rnix] - - - Transformed doctests into unittests, Test fixes and cleanup. - [warvariuc] - - - 3.3 (2013-02-08) - ---------------- - - - Drop support for Python < 2.6. - [thet] - - - Allow vGeo to be instantiated with list and not only tuples of geo - coordinates. Fixes #83. - [thet] - - - Don't force to pass a list to vDDDLists and allow setting individual RDATE - and EXDATE values without having to wrap them in a list. - [thet] - - - Fix encoding function to allow setting RDATE and EXDATE values and not to - have bypass encoding with an icalendar property. - [thet] - - - Allow setting of timezone for vDDDLists and support timezone properties for - RDATE and EXDATE component properties. - [thet] - - - Move setting of TZID properties to vDDDTypes, where it belongs to. - [thet] - - - Use @staticmethod decorator instead of wrapper function. - [warvariuc, thet] - - - Extend quoting of parameter values to all of those characters: ",;: ’'". - This fixes an outlook incompatibility with some characters. Fixes: #79, - Fixes: #81. - [warvariuc] - - - Define VTIMETZONE subcomponents STANDARD and DAYLIGHT for RFC5545 compliance. - [thet] - - - 3.2 (2012-11-27) - ---------------- - - - Documentation file layout restructuring. - [thet] - - - Fix time support. vTime events can be instantiated with a datetime.time - object, and do not inherit from datetime.time itself. - [rdunklau] - - - Correctly handle tzinfo objects parsed with dateutil. Fixes #77. - [warvariuc, thet] - - - Text values are escaped correclty. Fixes #74. - [warvariuc] - - - Returned old folding algorithm, as the current implementation fails in some - cases. Fixes #72, Fixes #73. - [warvariuc] - - - Supports to_ical() on date/time properties for dates prior to 1900. - [cdevienne] - - - 3.1 (2012-09-05) - ---------------- - - - Make sure parameters to certain properties propagate to the ical output. - [kanarip] - - - Re-include doctests. - [rnix] - - - Ensure correct datatype at instance creation time in ``prop.vCalAddress`` - and ``prop.vText``. - [rnix] - - - Apply TZID parameter to datetimes parsed from RECURRENCE-ID - [dbstovall] - - - Localize datetimes for timezones to avoid DST transition errors. - [dbstovall] - - - Allow UTC-OFFSET property value data types in seconds, which follows RFC5545 - specification. - [nikolaeff] - - - Remove utctz and normalized_timezone methods to simplify the codebase. The - methods were too tiny to be useful and just used at one place. - [thet] - - - When using Component.add() to add icalendar properties, force a value - conversion to UTC for CREATED, DTSTART and LAST-MODIFIED. The RFC expects UTC - for those properties. - [thet] - - - Removed last occurrences of old API (from_string). - [Rembane] - - - Add 'recursive' argument to property_items() to switch recursive listing. - For example when parsing a text/calendar text including multiple components - (e.g. a VCALENDAR with 5 VEVENTs), the previous situation required us to look - over all properties in VEVENTs even if we just want the properties under the - VCALENDAR component (VERSION, PRODID, CALSCALE, METHOD). - [dmikurube] - - - All unit tests fixed. - [mikaelfrykholm] - - - 3.0.1b2 (2012-03-01) - -------------------- - - - For all TZID parameters in DATE-TIME properties, use timezone identifiers - (e.g. Europe/Vienna) instead of timezone names (e.g. CET), as required by - RFC5545. Timezone names are used together with timezone identifiers in the - Timezone components. - [thet] - - - Timezone parsing, issues and test fixes. - [mikaelfrykholm, garbas, tgecho] - - - Since we use pytz for timezones, also use UTC tzinfo object from the pytz - library instead of own implementation. - [thet] - - - 3.0.1b1 (2012-02-24) - -------------------- - - - Update Release information. - [thet] - - - 3.0 - --- - - - Add API for proper Timezone support. Allow creating ical DATE-TIME strings - with timezone information from Python datetimes with pytz based timezone - information and vice versa. - [thet] - - - Unify API to only use to_ical and from_ical and remove string casting as a - requirement for Python 3 compatibility: - New: to_ical. - Old: ical, string, as_string and string casting via __str__ and str. - New: from_ical. - Old: from_string. - [thet] - - - 2.2 (2011-08-24) - ---------------- - - - migration to https://github.com/collective/icalendar using svn2git preserving - tags, branches and authors. - [garbas] - - - using tox for testing on python 2.4, 2.5, 2.6, 2.6. - [garbas] - - - fixed tests so they pass also under python 2.7. - [garbas] - - - running tests on https://jenkins.plone.org/job/icalendar (only 2.6 for now) - with some other metrics (pylint, clonedigger, coverage). - [garbas] - - - review and merge changes from https://github.com/cozi/icalendar fork. - [garbas] - - - created sphinx documentation and started documenting development and goals. - [garbas] - - - hook out github repository to http://readthedocs.org service so sphinx - documentation is generated on each commit (for master). Documentation can be - visible on: http://readthedocs.org/docs/icalendar/en/latest/ - [garbas] - - - 2.1 (2009-12-14) - ---------------- - - - Fix deprecation warnings about ``object.__init__`` taking no parameters. - - - Set the VALUE parameter correctly for date values. - - - Long binary data would be base64 encoded with newlines, which made the - iCalendar files incorrect. (This still needs testing). - - - Correctly handle content lines which include newlines. - - - 2.0.1 (2008-07-11) - ------------------ - - - Made the tests run under Python 2.5+ - - - Renamed the UTC class to Utc, so it would not clash with the UTC object, - since that rendered the UTC object unpicklable. - - - 2.0 (2008-07-11) - ---------------- - - - EXDATE and RDATE now returns a vDDDLists object, which contains a list - of vDDDTypes objects. This is do that EXDATE and RDATE can contain - lists of dates, as per RFC. - - ***Note!***: This change is incompatible with earlier behavior, so if you - handle EXDATE and RDATE you will need to update your code. - - - When createing a vDuration of -5 hours (which in itself is nonsensical), - the ical output of that was -P1DT19H, which is correct, but ugly. Now - it's '-PT5H', which is prettier. - - - 1.2 (2006-11-25) - ---------------- - - - Fixed a string index out of range error in the new folding code. - - - 1.1 (2006-11-23) - ---------------- - - - Fixed a bug in caselessdicts popitem. (thanks to Michael Smith - ) - - - The RFC 2445 was a bit unclear on how to handle line folding when it - happened to be in the middle of a UTF-8 character. This has been clarified - in the following discussion: - http://lists.osafoundation.org/pipermail/ietf-calsify/2006-August/001126.html - And this is now implemented in iCalendar. It will not fold in the middle of - a UTF-8 character, but may fold in the middle of a UTF-8 composing character - sequence. - - - 1.0 (2006-08-03) - ---------------- - - - make get_inline and set_inline support non ascii codes. - - - Added support for creating a python egg distribution. - - - 0.11 (2005-11-08) - ----------------- - - - Changed component .from_string to use types_factory instead of hardcoding - entries to 'inline' - - - Changed UTC tzinfo to a singleton so the same one is used everywhere - - - Made the parser more strict by using regular expressions for key name, - param name and quoted/unquoted safe char as per the RFC - - - Added some tests from the schooltool icalendar parser for better coverage - - - Be more forgiving on the regex for folding lines - - - Allow for multiple top-level components on .from_string - - - Fix vWeekdays, wasn't accepting relative param (eg: -3SA vs -SA) - - - vDDDTypes didn't accept negative period (eg: -P30M) - - - 'N' is also acceptable as newline on content lines, per RFC - - - 0.10 (2005-04-28) - ----------------- - - - moved code to codespeak.net subversion. - - - reorganized package structure so that source code is under 'src' directory. - Non-package files remain in distribution root. - - - redid doc/.py files as doc/.txt, using more modern doctest. Before they - were .py files with big docstrings. - - - added test.py testrunner, and tests/test_icalendar.py that picks up all - doctests in source code and doc directory, and runs them, when typing:: - - python2.3 test.py - - - renamed iCalendar to lower case package name, lowercased, de-pluralized and - shorted module names, which are mostly implementation detail. - - - changed tests so they generate .ics files in a temp directory, not in the - structure itself. - - License - ======= - - Copyright (c) 2012-2013, Plone Foundation - All rights reserved. - - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions are met: - - - Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - - - Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Keywords: calendar calendaring ical icalendar event todo journal recurring -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: Programming Language :: Python -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.3 -Classifier: License :: OSI Approved :: BSD License -Classifier: Operating System :: OS Independent diff --git a/libs/icalendar-3.6.1/src/icalendar.egg-info/SOURCES.txt b/libs/icalendar-3.6.1/src/icalendar.egg-info/SOURCES.txt deleted file mode 100644 index 3ff0d5d..0000000 --- a/libs/icalendar-3.6.1/src/icalendar.egg-info/SOURCES.txt +++ /dev/null @@ -1,57 +0,0 @@ -CHANGES.rst -CONTRIBUTING.rst -LICENSE.rst -MANIFEST.in -README.rst -TODO.rst -setup.cfg -setup.py -tox.ini -docs/Makefile -docs/about.rst -docs/changelog.rst -docs/conf.py -docs/credits.rst -docs/index.rst -docs/install.rst -docs/license.rst -docs/usage.rst -docs/_themes/icalendar/layout.html -docs/_themes/icalendar/theme.conf -docs/_themes/icalendar/static/icalendar.css -src/icalendar/__init__.py -src/icalendar/cal.py -src/icalendar/caselessdict.py -src/icalendar/compat.py -src/icalendar/parser.py -src/icalendar/parser_tools.py -src/icalendar/prop.py -src/icalendar/tools.py -src/icalendar.egg-info/PKG-INFO -src/icalendar.egg-info/SOURCES.txt -src/icalendar.egg-info/dependency_links.txt -src/icalendar.egg-info/not-zip-safe -src/icalendar.egg-info/requires.txt -src/icalendar.egg-info/top_level.txt -src/icalendar/tests/__init__.py -src/icalendar/tests/encoding.ics -src/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics -src/icalendar/tests/issue_114_invalid_line.ics -src/icalendar/tests/issue_53_parsing_failure.ics -src/icalendar/tests/multiple.ics -src/icalendar/tests/recurrence.ics -src/icalendar/tests/test_encoding.py -src/icalendar/tests/test_fixed_issues.py -src/icalendar/tests/test_icalendar.py -src/icalendar/tests/test_multiple.py -src/icalendar/tests/test_property_params.py -src/icalendar/tests/test_recurrence.py -src/icalendar/tests/test_time.py -src/icalendar/tests/test_timezoned.py -src/icalendar/tests/test_unit_cal.py -src/icalendar/tests/test_unit_caselessdict.py -src/icalendar/tests/test_unit_parser_tools.py -src/icalendar/tests/test_unit_prop.py -src/icalendar/tests/test_unit_tools.py -src/icalendar/tests/time.ics -src/icalendar/tests/timezoned.ics \ No newline at end of file diff --git a/libs/icalendar-3.6.1/src/icalendar.egg-info/dependency_links.txt b/libs/icalendar-3.6.1/src/icalendar.egg-info/dependency_links.txt deleted file mode 100644 index 8b13789..0000000 --- a/libs/icalendar-3.6.1/src/icalendar.egg-info/dependency_links.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/libs/icalendar-3.6.1/src/icalendar.egg-info/not-zip-safe b/libs/icalendar-3.6.1/src/icalendar.egg-info/not-zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/libs/icalendar-3.6.1/src/icalendar.egg-info/not-zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/libs/icalendar-3.6.1/src/icalendar.egg-info/requires.txt b/libs/icalendar-3.6.1/src/icalendar.egg-info/requires.txt deleted file mode 100644 index 49da783..0000000 --- a/libs/icalendar-3.6.1/src/icalendar.egg-info/requires.txt +++ /dev/null @@ -1,5 +0,0 @@ -setuptools -python-dateutil -pytz - -[test] diff --git a/libs/icalendar-3.6.1/src/icalendar.egg-info/top_level.txt b/libs/icalendar-3.6.1/src/icalendar.egg-info/top_level.txt deleted file mode 100644 index 744b7a9..0000000 --- a/libs/icalendar-3.6.1/src/icalendar.egg-info/top_level.txt +++ /dev/null @@ -1 +0,0 @@ -icalendar diff --git a/libs/icalendar-3.6.1/src/icalendar/__init__.py b/libs/icalendar-3.6.1/src/icalendar/__init__.py deleted file mode 100644 index 79bbf4d..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/__init__.py +++ /dev/null @@ -1,59 +0,0 @@ -from icalendar.cal import ( - Calendar, - Event, - Todo, - Journal, - Timezone, - TimezoneStandard, - TimezoneDaylight, - FreeBusy, - Alarm, - ComponentFactory, -) -# Property Data Value Types -from icalendar.prop import ( - vBinary, - vBoolean, - vCalAddress, - vDatetime, - vDate, - vDDDTypes, - vDuration, - vFloat, - vInt, - vPeriod, - vWeekday, - vFrequency, - vRecur, - vText, - vTime, - vUri, - vGeo, - vUTCOffset, - TypesFactory, -) -# useful tzinfo subclasses -from icalendar.prop import ( - FixedOffset, - LocalTimezone, -) -# Parameters and helper methods for splitting and joining string with escaped -# chars. -from icalendar.parser import ( - Parameters, - q_split, - q_join, -) - - -__all__ = [ - Calendar, Event, Todo, Journal, - FreeBusy, Alarm, ComponentFactory, - Timezone, TimezoneStandard, TimezoneDaylight, - vBinary, vBoolean, vCalAddress, vDatetime, vDate, - vDDDTypes, vDuration, vFloat, vInt, vPeriod, - vWeekday, vFrequency, vRecur, vText, vTime, vUri, - vGeo, vUTCOffset, TypesFactory, - FixedOffset, LocalTimezone, - Parameters, q_split, q_join, -] diff --git a/libs/icalendar-3.6.1/src/icalendar/cal.py b/libs/icalendar-3.6.1/src/icalendar/cal.py deleted file mode 100644 index 4022e6d..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/cal.py +++ /dev/null @@ -1,493 +0,0 @@ -# -*- coding: utf-8 -*- -"""Calendar is a dictionary like Python object that can render itself as VCAL -files according to rfc2445. - -These are the defined components. -""" -from datetime import datetime -from icalendar.caselessdict import CaselessDict -from icalendar.parser import Contentline -from icalendar.parser import Contentlines -from icalendar.parser import Parameters -from icalendar.parser import q_join -from icalendar.parser import q_split -from icalendar.parser_tools import DEFAULT_ENCODING -from icalendar.parser_tools import data_encode -from icalendar.prop import TypesFactory -from icalendar.prop import vText, vDDDLists - -import pytz - - -###################################### -# The component factory - -class ComponentFactory(CaselessDict): - """All components defined in rfc 2445 are registered in this factory class. - To get a component you can use it like this. - """ - - def __init__(self, *args, **kwargs): - """Set keys to upper for initial dict. - """ - CaselessDict.__init__(self, *args, **kwargs) - self['VEVENT'] = Event - self['VTODO'] = Todo - self['VJOURNAL'] = Journal - self['VFREEBUSY'] = FreeBusy - self['VTIMEZONE'] = Timezone - self['STANDARD'] = TimezoneStandard - self['DAYLIGHT'] = TimezoneDaylight - self['VALARM'] = Alarm - self['VCALENDAR'] = Calendar - - -# These Properties have multiple property values inlined in one propertyline -# seperated by comma. Use CaselessDict as simple caseless set. -INLINE = CaselessDict( - [(cat, 1) for cat in ('CATEGORIES', 'RESOURCES', 'FREEBUSY')] -) - -_marker = [] - - -class Component(CaselessDict): - """Component is the base object for calendar, Event and the other - components defined in RFC 2445. normally you will not use this class - directy, but rather one of the subclasses. - """ - - name = '' # must be defined in each component - required = () # These properties are required - singletons = () # These properties must only appear once - multiple = () # may occur more than once - exclusive = () # These properties are mutually exclusive - inclusive = () # if any occurs the other(s) MUST occur - # ('duration', 'repeat') - ignore_exceptions = False # if True, and we cannot parse this - # component, we will silently ignore - # it, rather than let the exception - # propagate upwards - # not_compliant = [''] # List of non-compliant properties. - - def __init__(self, *args, **kwargs): - """Set keys to upper for initial dict. - """ - CaselessDict.__init__(self, *args, **kwargs) - # set parameters here for properties that use non-default values - self.subcomponents = [] # Components can be nested. - self.is_broken = False # True if we ignored an exception while - # parsing a property - - #def is_compliant(self, name): - # """Returns True is the given property name is compliant with the - # icalendar implementation. - # - # If the parser is too strict it might prevent parsing erroneous but - # otherwise compliant properties. So the parser is pretty lax, but it is - # possible to test for non-complience by calling this method. - # """ - # return name in not_compliant - - ############################# - # handling of property values - - def _encode(self, name, value, parameters=None, encode=1): - """Encode values to icalendar property values. - - :param name: Name of the property. - :type name: string - - :param value: Value of the property. Either of a basic Python type of - any of the icalendar's own property types. - :type value: Python native type or icalendar property type. - - :param parameters: Property parameter dictionary for the value. Only - available, if encode is set to True. - :type parameters: Dictionary - - :param encode: True, if the value should be encoded to one of - icalendar's own property types (Fallback is "vText") - or False, if not. - :type encode: Boolean - - :returns: icalendar property value - """ - if not encode: - return value - if isinstance(value, types_factory.all_types): - # Don't encode already encoded values. - return value - klass = types_factory.for_property(name) - obj = klass(value) - if parameters: - if isinstance(parameters, dict): - params = Parameters() - for key, item in parameters.items(): - params[key] = item - parameters = params - assert isinstance(parameters, Parameters) - obj.params = parameters - return obj - - def add(self, name, value, parameters=None, encode=1): - """Add a property. - - :param name: Name of the property. - :type name: string - - :param value: Value of the property. Either of a basic Python type of - any of the icalendar's own property types. - :type value: Python native type or icalendar property type. - - :param parameters: Property parameter dictionary for the value. Only - available, if encode is set to True. - :type parameters: Dictionary - - :param encode: True, if the value should be encoded to one of - icalendar's own property types (Fallback is "vText") - or False, if not. - :type encode: Boolean - - :returns: None - """ - if isinstance(value, datetime) and\ - name.lower() in ('dtstamp', 'created', 'last-modified'): - # RFC expects UTC for those... force value conversion. - if getattr(value, 'tzinfo', False) and value.tzinfo is not None: - value = value.astimezone(pytz.utc) - else: - # assume UTC for naive datetime instances - value = pytz.utc.localize(value) - - # encode value - if encode and isinstance(value, list) \ - and name.lower() not in ['rdate', 'exdate']: - # Individually convert each value to an ical type except rdate and - # exdate, where lists of dates might be passed to vDDDLists. - value = [self._encode(name, v, parameters, encode) for v in value] - else: - value = self._encode(name, value, parameters, encode) - - # set value - if name in self: - # If property already exists, append it. - #if name == 'attendee': import pdb; pdb.set_trace() - oldval = self[name] - if isinstance(oldval, list): - if isinstance(value, list): - value = oldval + value - else: - oldval.append(value) - value = oldval - else: - value = [oldval, value] - self[name] = value - - def _decode(self, name, value): - """Internal for decoding property values. - """ - - # TODO: Currently the decoded method calls the icalendar.prop instances - # from_ical. We probably want to decode properties into Python native - # types here. But when parsing from an ical string with from_ical, we - # want to encode the string into a real icalendar.prop property. - if isinstance(value, vDDDLists): - # TODO: Workaround unfinished decoding - return value - decoded = types_factory.from_ical(name, value) - # TODO: remove when proper decoded is implemented in every prop.* class - # Workaround to decode vText properly - if isinstance(decoded, vText): - decoded = decoded.encode(DEFAULT_ENCODING) - return decoded - - def decoded(self, name, default=_marker): - """Returns decoded value of property. - """ - # XXX: fail. what's this function supposed to do in the end? - # -rnix - - if name in self: - value = self[name] - if isinstance(value, list): - return [self._decode(name, v) for v in value] - return self._decode(name, value) - else: - if default is _marker: - raise KeyError(name) - else: - return default - - ######################################################################## - # Inline values. A few properties have multiple values inlined in in one - # property line. These methods are used for splitting and joining these. - - def get_inline(self, name, decode=1): - """Returns a list of values (split on comma). - """ - vals = [v.strip('" ') for v in q_split(self[name])] - if decode: - return [self._decode(name, val) for val in vals] - return vals - - def set_inline(self, name, values, encode=1): - """Converts a list of values into comma seperated string and sets value - to that. - """ - if encode: - values = [self._encode(name, value, encode=1) for value in values] - self[name] = types_factory['inline'](q_join(values)) - - ######################### - # Handling of components - - def add_component(self, component): - """Add a subcomponent to this component. - """ - self.subcomponents.append(component) - - def _walk(self, name): - """Walk to given component. - """ - result = [] - if name is None or self.name == name: - result.append(self) - for subcomponent in self.subcomponents: - result += subcomponent._walk(name) - return result - - def walk(self, name=None): - """Recursively traverses component and subcomponents. Returns sequence - of same. If name is passed, only components with name will be returned. - """ - if not name is None: - name = name.upper() - return self._walk(name) - - ##################### - # Generation - - def property_items(self, recursive=True): - """Returns properties in this component and subcomponents as: - [(name, value), ...] - """ - vText = types_factory['text'] - properties = [('BEGIN', vText(self.name).to_ical())] - property_names = self.sorted_keys() - for name in property_names: - values = self[name] - if isinstance(values, list): - # normally one property is one line - for value in values: - properties.append((name, value)) - else: - properties.append((name, values)) - if recursive: - # recursion is fun! - for subcomponent in self.subcomponents: - properties += subcomponent.property_items() - properties.append(('END', vText(self.name).to_ical())) - return properties - - @classmethod - def from_ical(cls, st, multiple=False): - """Populates the component recursively from a string. - """ - stack = [] # a stack of components - comps = [] - for line in Contentlines.from_ical(st): # raw parsing - if not line: - continue - name, params, vals = line.parts() - uname = name.upper() - # check for start of component - if uname == 'BEGIN': - # try and create one of the components defined in the spec, - # otherwise get a general Components for robustness. - c_name = vals.upper() - c_class = component_factory.get(c_name, cls) - component = c_class() - if not getattr(component, 'name', ''): # undefined components - component.name = c_name - stack.append(component) - # check for end of event - elif uname == 'END': - # we are done adding properties to this component - # so pop it from the stack and add it to the new top. - component = stack.pop() - if not stack: # we are at the end - comps.append(component) - else: - if not component.is_broken: - stack[-1].add_component(component) - # we are adding properties to the current top of the stack - else: - factory = types_factory.for_property(name) - component = stack[-1] - datetime_names = ('DTSTART', 'DTEND', 'RECURRENCE-ID', 'DUE', - 'FREEBUSY', 'RDATE', 'EXDATE') - try: - if name in datetime_names and 'TZID' in params: - vals = factory(factory.from_ical(vals, params['TZID'])) - else: - vals = factory(factory.from_ical(vals)) - except ValueError: - if not component.ignore_exceptions: - raise - component.is_broken = True - else: - vals.params = params - component.add(name, vals, encode=0) - - if multiple: - return comps - if len(comps) > 1: - raise ValueError('Found multiple components where ' - 'only one is allowed: {st!r}'.format(**locals())) - if len(comps) < 1: - raise ValueError('Found no components where ' - 'exactly one is required: ' - '{st!r}'.format(**locals())) - return comps[0] - - def __repr__(self): - return '%s(%s)' % (self.name, data_encode(self)) - - def content_line(self, name, value): - """Returns property as content line. - """ - params = getattr(value, 'params', Parameters()) - return Contentline.from_parts(name, params, value) - - def content_lines(self): - """Converts the Component and subcomponents into content lines. - """ - contentlines = Contentlines() - for name, value in self.property_items(): - cl = self.content_line(name, value) - contentlines.append(cl) - contentlines.append('') # remember the empty string in the end - return contentlines - - def to_ical(self): - content_lines = self.content_lines() - return content_lines.to_ical() - - -####################################### -# components defined in RFC 2445 - -class Event(Component): - - name = 'VEVENT' - - canonical_order = ( - 'SUMMARY', 'DTSTART', 'DTEND', 'DURATION', 'DTSTAMP', - 'UID', 'RECURRENCE-ID', 'SEQUENCE', - 'RRULE' 'EXRULE', 'RDATE', 'EXDATE', - ) - - required = ('UID',) - singletons = ( - 'CLASS', 'CREATED', 'DESCRIPTION', 'DTSTART', 'GEO', 'LAST-MODIFIED', - 'LOCATION', 'ORGANIZER', 'PRIORITY', 'DTSTAMP', 'SEQUENCE', 'STATUS', - 'SUMMARY', 'TRANSP', 'URL', 'RECURRENCE-ID', 'DTEND', 'DURATION', - 'DTSTART', - ) - exclusive = ('DTEND', 'DURATION', ) - multiple = ( - 'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE', - 'EXRULE', 'RSTATUS', 'RELATED', 'RESOURCES', 'RDATE', 'RRULE' - ) - ignore_exceptions = True - - -class Todo(Component): - - name = 'VTODO' - - required = ('UID',) - singletons = ( - 'CLASS', 'COMPLETED', 'CREATED', 'DESCRIPTION', 'DTSTAMP', 'DTSTART', - 'GEO', 'LAST-MODIFIED', 'LOCATION', 'ORGANIZER', 'PERCENT', 'PRIORITY', - 'RECURRENCE-ID', 'SEQUENCE', 'STATUS', 'SUMMARY', 'UID', 'URL', 'DUE', - 'DURATION', - ) - exclusive = ('DUE', 'DURATION',) - multiple = ( - 'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE', - 'EXRULE', 'RSTATUS', 'RELATED', 'RESOURCES', 'RDATE', 'RRULE' - ) - - -class Journal(Component): - - name = 'VJOURNAL' - - required = ('UID',) - singletons = ( - 'CLASS', 'CREATED', 'DESCRIPTION', 'DTSTART', 'DTSTAMP', - 'LAST-MODIFIED', 'ORGANIZER', 'RECURRENCE-ID', 'SEQUENCE', 'STATUS', - 'SUMMARY', 'UID', 'URL', - ) - multiple = ( - 'ATTACH', 'ATTENDEE', 'CATEGORIES', 'COMMENT', 'CONTACT', 'EXDATE', - 'EXRULE', 'RELATED', 'RDATE', 'RRULE', 'RSTATUS', - ) - - -class FreeBusy(Component): - - name = 'VFREEBUSY' - - required = ('UID',) - singletons = ( - 'CONTACT', 'DTSTART', 'DTEND', 'DURATION', 'DTSTAMP', 'ORGANIZER', - 'UID', 'URL', - ) - multiple = ('ATTENDEE', 'COMMENT', 'FREEBUSY', 'RSTATUS',) - - -class Timezone(Component): - name = 'VTIMEZONE' - canonical_order = ('TZID', 'STANDARD', 'DAYLIGHT',) - required = ('TZID', 'STANDARD', 'DAYLIGHT',) - singletons = ('TZID', 'LAST-MODIFIED', 'TZURL',) - - -class TimezoneStandard(Component): - name = 'STANDARD' - required = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM') - singletons = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM', 'RRULE') - multiple = ('COMMENT', 'RDATE', 'TZNAME') - - -class TimezoneDaylight(Component): - name = 'DAYLIGHT' - required = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM') - singletons = ('DTSTART', 'TZOFFSETTO', 'TZOFFSETFROM', 'RRULE') - multiple = ('COMMENT', 'RDATE', 'TZNAME') - - -class Alarm(Component): - - name = 'VALARM' - # not quite sure about these ... - required = ('ACTION', 'TRIGGER',) - singletons = ('ATTACH', 'ACTION', 'TRIGGER', 'DURATION', 'REPEAT',) - inclusive = (('DURATION', 'REPEAT',),) - - -class Calendar(Component): - """This is the base object for an iCalendar file. - """ - name = 'VCALENDAR' - canonical_order = ('VERSION', 'PRODID', 'CALSCALE', 'METHOD',) - required = ('prodid', 'version', ) - singletons = ('prodid', 'version', ) - multiple = ('calscale', 'method', ) - -# These are read only singleton, so one instance is enough for the module -types_factory = TypesFactory() -component_factory = ComponentFactory() diff --git a/libs/icalendar-3.6.1/src/icalendar/caselessdict.py b/libs/icalendar-3.6.1/src/icalendar/caselessdict.py deleted file mode 100644 index 08ec8a4..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/caselessdict.py +++ /dev/null @@ -1,98 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.parser_tools import to_unicode -from icalendar.parser_tools import data_encode - - -def canonsort_keys(keys, canonical_order=None): - """Sorts leading keys according to canonical_order. Keys not specified in - canonical_order will appear alphabetically at the end. - """ - canonical_map = dict((k, i) for i, k in enumerate(canonical_order or [])) - head = [k for k in keys if k in canonical_map] - tail = [k for k in keys if k not in canonical_map] - return sorted(head, key=lambda k: canonical_map[k]) + sorted(tail) - - -def canonsort_items(dict1, canonical_order=None): - """Returns a list of items from dict1, sorted by canonical_order. - """ - return [(k, dict1[k]) for \ - k in canonsort_keys(dict1.keys(), canonical_order)] - - -class CaselessDict(dict): - """A dictionary that isn't case sensitive, and only uses strings as keys. - Values retain their case. - """ - - def __init__(self, *args, **kwargs): - """Set keys to upper for initial dict. - """ - dict.__init__(self, *args, **kwargs) - for key, value in self.items(): - key_upper = to_unicode(key).upper() - if key != key_upper: - dict.__delitem__(self, key) - self[key_upper] = value - - def __getitem__(self, key): - key = to_unicode(key) - return dict.__getitem__(self, key.upper()) - - def __setitem__(self, key, value): - key = to_unicode(key) - dict.__setitem__(self, key.upper(), value) - - def __delitem__(self, key): - key = to_unicode(key) - dict.__delitem__(self, key.upper()) - - def __contains__(self, key): - key = to_unicode(key) - return dict.__contains__(self, key.upper()) - - def get(self, key, default=None): - key = to_unicode(key) - return dict.get(self, key.upper(), default) - - def setdefault(self, key, value=None): - key = to_unicode(key) - return dict.setdefault(self, key.upper(), value) - - def pop(self, key, default=None): - key = to_unicode(key) - return dict.pop(self, key.upper(), default) - - def popitem(self): - return dict.popitem(self) - - def has_key(self, key): - key = to_unicode(key) - return dict.__contains__(self, key.upper()) - - def update(self, indict): - # Multiple keys where key1.upper() == key2.upper() will be lost. - for key, value in indict.items(): # TODO optimize in python 2 - self[key] = value - - def copy(self): - return CaselessDict(dict.copy(self)) - - def __repr__(self): - return 'CaselessDict(%s)' % data_encode(self) - - # A list of keys that must appear first in sorted_keys and sorted_items; - # must be uppercase. - canonical_order = None - - def sorted_keys(self): - """Sorts keys according to the canonical_order for the derived class. - Keys not specified in canonical_order will appear at the end. - """ - return canonsort_keys(self.keys(), self.canonical_order) - - def sorted_items(self): - """Sorts items according to the canonical_order for the derived class. - Items not specified in canonical_order will appear at the end. - """ - return canonsort_items(self, self.canonical_order) diff --git a/libs/icalendar-3.6.1/src/icalendar/compat.py b/libs/icalendar-3.6.1/src/icalendar/compat.py deleted file mode 100644 index 4704a14..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/compat.py +++ /dev/null @@ -1,11 +0,0 @@ -import sys - - -if sys.version_info[0] == 2: # pragma: no cover - unicode_type = unicode - bytes_type = str - iteritems = lambda d, *args, **kwargs: iter(d.iteritems(*args, **kwargs)) -else: # pragma: no cover - unicode_type = str - bytes_type = bytes - iteritems = lambda d, *args, **kwargs: iter(d.items(*args, **kwargs)) diff --git a/libs/icalendar-3.6.1/src/icalendar/parser.py b/libs/icalendar-3.6.1/src/icalendar/parser.py deleted file mode 100644 index 7f57db2..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/parser.py +++ /dev/null @@ -1,371 +0,0 @@ -# -*- coding: utf-8 -*- -"""This module parses and generates contentlines as defined in RFC 2445 -(iCalendar), but will probably work for other MIME types with similar syntax. -Eg. RFC 2426 (vCard) - -It is stupid in the sense that it treats the content purely as strings. No type -conversion is attempted. -""" -from icalendar import compat -from icalendar.caselessdict import CaselessDict -from icalendar.parser_tools import DEFAULT_ENCODING -from icalendar.parser_tools import SEQUENCE_TYPES -from icalendar.parser_tools import data_encode -from icalendar.parser_tools import to_unicode - -import re - - -def escape_char(text): - """Format value according to iCalendar TEXT escaping rules. - """ - assert isinstance(text, (compat.unicode_type, compat.bytes_type)) - # NOTE: ORDER MATTERS! - return text.replace(r'\N', '\n')\ - .replace('\\', '\\\\')\ - .replace(';', r'\;')\ - .replace(',', r'\,')\ - .replace('\r\n', r'\n')\ - .replace('\n', r'\n') - - -def unescape_char(text): - assert isinstance(text, (compat.unicode_type, compat.bytes_type)) - # NOTE: ORDER MATTERS! - if isinstance(text, compat.unicode_type): - return text.replace(u'\\N', u'\\n')\ - .replace(u'\r\n', u'\n')\ - .replace(u'\\n', u'\n')\ - .replace(u'\\,', u',')\ - .replace(u'\\;', u';')\ - .replace(u'\\\\', u'\\') - elif isinstance(text, compat.bytes_type): - return text.replace(b'\N', b'\n')\ - .replace(b'\r\n', b'\n')\ - .replace(b'\n', b'\n')\ - .replace(b'\,', b',')\ - .replace(b'\;', b';')\ - .replace(b'\\\\', b'\\') - - -def tzid_from_dt(dt): - tzid = None - if hasattr(dt.tzinfo, 'zone'): - tzid = dt.tzinfo.zone # pytz implementation - elif hasattr(dt.tzinfo, 'tzname'): - try: - tzid = dt.tzinfo.tzname(dt) # dateutil implementation - except AttributeError: - # No tzid available - pass - return tzid - - -def foldline(line, limit=75, fold_sep=u'\r\n '): - """Make a string folded as defined in RFC5545 - Lines of text SHOULD NOT be longer than 75 octets, excluding the line - break. Long content lines SHOULD be split into a multiple line - representations using a line "folding" technique. That is, a long - line can be split between any two characters by inserting a CRLF - immediately followed by a single linear white-space character (i.e., - SPACE or HTAB). - """ - assert isinstance(line, compat.unicode_type) - assert u'\n' not in line - - ret_line = u'' - byte_count = 0 - for char in line: - char_byte_len = len(char.encode(DEFAULT_ENCODING)) - byte_count += char_byte_len - if byte_count >= limit: - ret_line += fold_sep - byte_count = char_byte_len - ret_line += char - - return ret_line - - -################################################################# -# Property parameter stuff - -def param_value(value): - """Returns a parameter value. - """ - if isinstance(value, SEQUENCE_TYPES): - return q_join(value) - return dquote(value) - - -# Could be improved -NAME = re.compile('[\w-]+') -UNSAFE_CHAR = re.compile('[\x00-\x08\x0a-\x1f\x7F",:;]') -QUNSAFE_CHAR = re.compile('[\x00-\x08\x0a-\x1f\x7F"]') -FOLD = re.compile(b'(\r?\n)+[ \t]') -uFOLD = re.compile(u'(\r?\n)+[ \t]') -NEWLINE = re.compile(r'\r?\n') - - -def validate_token(name): - match = NAME.findall(name) - if len(match) == 1 and name == match[0]: - return - raise ValueError(name) - - -def validate_param_value(value, quoted=True): - validator = QUNSAFE_CHAR if quoted else UNSAFE_CHAR - if validator.findall(value): - raise ValueError(value) - - -# chars presence of which in parameter value will be cause the value -# to be enclosed in double-quotes -QUOTABLE = re.compile("[,;: ’']") - - -def dquote(val): - """Enclose parameter values containing [,;:] in double quotes. - """ - # a double-quote character is forbidden to appear in a parameter value - # so replace it with a single-quote character - val = val.replace('"', "'") - if QUOTABLE.search(val): - return '"%s"' % val - return val - - -# parsing helper -def q_split(st, sep=','): - """Splits a string on char, taking double (q)uotes into considderation. - """ - result = [] - cursor = 0 - length = len(st) - inquote = 0 - for i in range(length): - ch = st[i] - if ch == '"': - inquote = not inquote - if not inquote and ch == sep: - result.append(st[cursor:i]) - cursor = i + 1 - if i + 1 == length: - result.append(st[cursor:]) - return result - - -def q_join(lst, sep=','): - """Joins a list on sep, quoting strings with QUOTABLE chars. - """ - return sep.join(dquote(itm) for itm in lst) - - -class Parameters(CaselessDict): - """Parser and generator of Property parameter strings. It knows nothing of - datatypes. Its main concern is textual structure. - """ - - def params(self): - """In rfc2445 keys are called parameters, so this is to be consitent - with the naming conventions. - """ - return self.keys() - -# TODO? -# Later, when I get more time... need to finish this off now. The last major -# thing missing. -# def _encode(self, name, value, cond=1): -# # internal, for conditional convertion of values. -# if cond: -# klass = types_factory.for_property(name) -# return klass(value) -# return value -# -# def add(self, name, value, encode=0): -# "Add a parameter value and optionally encode it." -# if encode: -# value = self._encode(name, value, encode) -# self[name] = value -# -# def decoded(self, name): -# "returns a decoded value, or list of same" - - def __repr__(self): - return 'Parameters(%s)' % data_encode(self) - - def to_ical(self): - result = [] - items = self.items() - for key, value in sorted(items): - value = param_value(value) - if isinstance(value, compat.unicode_type): - value = value.encode(DEFAULT_ENCODING) - # CaselessDict keys are always unicode - key = key.upper().encode(DEFAULT_ENCODING) - result.append(key + b'=' + value) - return b';'.join(result) - - @classmethod - def from_ical(cls, st, strict=False): - """Parses the parameter format from ical text format.""" - - # parse into strings - result = cls() - for param in q_split(st, ';'): - try: - key, val = q_split(param, '=') - validate_token(key) - # Property parameter values that are not in quoted - # strings are case insensitive. - vals = [] - for v in q_split(val, ','): - if v.startswith('"') and v.endswith('"'): - v = v.strip('"') - validate_param_value(v, quoted=True) - vals.append(v) - else: - validate_param_value(v, quoted=False) - if strict: - vals.append(v.upper()) - else: - vals.append(v) - if not vals: - result[key] = val - else: - if len(vals) == 1: - result[key] = vals[0] - else: - result[key] = vals - except ValueError as exc: - raise ValueError('%r is not a valid parameter string: %s' - % (param, exc)) - return result - - -def escape_string(val): - # '%{:02X}'.format(i) - return val.replace(r'\,', '%2C').replace(r'\:', '%3A')\ - .replace(r'\;', '%3B').replace(r'\\', '%5C') - - -def unsescape_string(val): - return val.replace('%2C', ',').replace('%3A', ':')\ - .replace('%3B', ';').replace('%5C', '\\') - - -######################################### -# parsing and generation of content lines - -class Contentline(compat.unicode_type): - """A content line is basically a string that can be folded and parsed into - parts. - """ - def __new__(cls, value, strict=False, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - assert u'\n' not in value, ('Content line can not contain unescaped ' - 'new line characters.') - self = super(Contentline, cls).__new__(cls, value) - self.strict = strict - return self - - @classmethod - def from_parts(cls, name, params, values): - """Turn a parts into a content line. - """ - assert isinstance(params, Parameters) - if hasattr(values, 'to_ical'): - values = values.to_ical() - else: - values = vText(values).to_ical() - # elif isinstance(values, basestring): - # values = escape_char(values) - - # TODO: after unicode only, remove this - # Convert back to unicode, after to_ical encoded it. - name = to_unicode(name) - values = to_unicode(values) - if params: - params = to_unicode(params.to_ical()) - return cls(u'%s;%s:%s' % (name, params, values)) - return cls(u'%s:%s' % (name, values)) - - def parts(self): - """Split the content line up into (name, parameters, values) parts. - """ - try: - st = escape_string(self) - name_split = None - value_split = None - in_quotes = False - for i, ch in enumerate(st): - if not in_quotes: - if ch in ':;' and not name_split: - name_split = i - if ch == ':' and not value_split: - value_split = i - if ch == '"': - in_quotes = not in_quotes - name = unsescape_string(st[:name_split]) - if not name: - raise ValueError('Key name is required') - validate_token(name) - if not name_split or name_split + 1 == value_split: - raise ValueError('Invalid content line') - params = Parameters.from_ical(st[name_split + 1: value_split], - strict=self.strict) - params = Parameters( - (unsescape_string(key), unsescape_string(value)) - for key, value in compat.iteritems(params) - ) - values = unsescape_string(st[value_split + 1:]) - return (name, params, values) - except ValueError as exc: - raise ValueError( - u"Content line could not be parsed into parts: %r: %s" - % (self, exc) - ) - - @classmethod - def from_ical(cls, ical, strict=False): - """Unfold the content lines in an iCalendar into long content lines. - """ - ical = to_unicode(ical) - # a fold is carriage return followed by either a space or a tab - return cls(uFOLD.sub('', ical), strict=strict) - - def to_ical(self): - """Long content lines are folded so they are less than 75 characters - wide. - """ - return foldline(self).encode(DEFAULT_ENCODING) - - -class Contentlines(list): - """I assume that iCalendar files generally are a few kilobytes in size. - Then this should be efficient. for Huge files, an iterator should probably - be used instead. - """ - def to_ical(self): - """Simply join self. - """ - return b'\r\n'.join(line.to_ical() for line in self if line) + b'\r\n' - - @classmethod - def from_ical(cls, st): - """Parses a string into content lines. - """ - st = to_unicode(st) - try: - # a fold is carriage return followed by either a space or a tab - unfolded = uFOLD.sub('', st) - lines = cls(Contentline(line) for - line in unfolded.splitlines() if line) - lines.append('') # '\r\n' at the end of every content line - return lines - except: - raise ValueError('Expected StringType with content lines') - - -# XXX: what kind of hack is this? import depends to be at end -from icalendar.prop import vText diff --git a/libs/icalendar-3.6.1/src/icalendar/parser_tools.py b/libs/icalendar-3.6.1/src/icalendar/parser_tools.py deleted file mode 100644 index 1e0dade..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/parser_tools.py +++ /dev/null @@ -1,33 +0,0 @@ -from icalendar import compat - - -SEQUENCE_TYPES = (list, tuple) -DEFAULT_ENCODING = 'utf-8' - - -def to_unicode(value, encoding='utf-8'): - """Converts a value to unicode, even if it is already a unicode string. - """ - if isinstance(value, compat.unicode_type): - return value - elif isinstance(value, compat.bytes_type): - try: - value = value.decode(encoding) - except UnicodeDecodeError: - value = value.decode('utf-8', 'replace') - return value - - -def data_encode(data, encoding=DEFAULT_ENCODING): - """Encode all datastructures to the given encoding. - Currently unicode strings, dicts and lists are supported. - """ - # http://stackoverflow.com/questions/1254454/fastest-way-to-convert-a-dicts-keys-values-from-unicode-to-str - if isinstance(data, compat.unicode_type): - return data.encode(encoding) - elif isinstance(data, dict): - return dict(map(data_encode, compat.iteritems(data))) - elif isinstance(data, list) or isinstance(data, tuple): - return list(map(data_encode, data)) - else: - return data diff --git a/libs/icalendar-3.6.1/src/icalendar/prop.py b/libs/icalendar-3.6.1/src/icalendar/prop.py deleted file mode 100644 index 82c9df8..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/prop.py +++ /dev/null @@ -1,993 +0,0 @@ -# -*- coding: utf-8 -*- -"""This module contains the parser/generators (or coders/encoders if you -prefer) for the classes/datatypes that are used in iCalendar: - -########################################################################### -# This module defines these property value data types and property parameters - -4.2 Defined property parameters are: - - ALTREP, CN, CUTYPE, DELEGATED-FROM, DELEGATED-TO, DIR, ENCODING, FMTTYPE, - FBTYPE, LANGUAGE, MEMBER, PARTSTAT, RANGE, RELATED, RELTYPE, ROLE, RSVP, - SENT-BY, TZID, VALUE - -4.3 Defined value data types are: - - BINARY, BOOLEAN, CAL-ADDRESS, DATE, DATE-TIME, DURATION, FLOAT, INTEGER, - PERIOD, RECUR, TEXT, TIME, URI, UTC-OFFSET - -########################################################################### - -iCalendar properties has values. The values are strongly typed. This module -defines these types, calling val.to_ical() on them, Will render them as defined -in rfc2445. - -If you pass any of these classes a Python primitive, you will have an object -that can render itself as iCalendar formatted date. - -Property Value Data Types starts with a 'v'. they all have an to_ical() and -from_ical() method. The to_ical() method generates a text string in the -iCalendar format. The from_ical() method can parse this format and return a -primitive Python datatype. So it should allways be true that: - - x == vDataType.from_ical(VDataType(x).to_ical()) - -These types are mainly used for parsing and file generation. But you can set -them directly. -""" -from datetime import date -from datetime import datetime -from datetime import time -from datetime import timedelta -from datetime import tzinfo -from dateutil.tz import tzutc -from icalendar import compat -from icalendar.caselessdict import CaselessDict -from icalendar.parser import Parameters -from icalendar.parser import escape_char -from icalendar.parser import tzid_from_dt -from icalendar.parser import unescape_char -from icalendar.parser_tools import DEFAULT_ENCODING -from icalendar.parser_tools import SEQUENCE_TYPES -from icalendar.parser_tools import to_unicode - -import base64 -import binascii -import pytz -import re -import time as _time - - -DATE_PART = r'(\d+)D' -TIME_PART = r'T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+)S)?' -DATETIME_PART = '(?:%s)?(?:%s)?' % (DATE_PART, TIME_PART) -WEEKS_PART = r'(\d+)W' -DURATION_REGEX = re.compile(r'([-+]?)P(?:%s|%s)$' - % (WEEKS_PART, DATETIME_PART)) -WEEKDAY_RULE = re.compile('(?P[+-]?)(?P[\d]?)' - '(?P[\w]{2})$') - - -#################################################### -# handy tzinfo classes you can use. -# - -ZERO = timedelta(0) -HOUR = timedelta(hours=1) -STDOFFSET = timedelta(seconds=-_time.timezone) -if _time.daylight: - DSTOFFSET = timedelta(seconds=-_time.altzone) -else: - DSTOFFSET = STDOFFSET -DSTDIFF = DSTOFFSET - STDOFFSET - - -class FixedOffset(tzinfo): - """Fixed offset in minutes east from UTC. - """ - def __init__(self, offset, name): - self.__offset = timedelta(minutes=offset) - self.__name = name - - def utcoffset(self, dt): - return self.__offset - - def tzname(self, dt): - return self.__name - - def dst(self, dt): - return ZERO - - -class LocalTimezone(tzinfo): - """Timezone of the machine where the code is running. - """ - def utcoffset(self, dt): - if self._isdst(dt): - return DSTOFFSET - else: - return STDOFFSET - - def dst(self, dt): - if self._isdst(dt): - return DSTDIFF - else: - return ZERO - - def tzname(self, dt): - return _time.tzname[self._isdst(dt)] - - def _isdst(self, dt): - tt = (dt.year, dt.month, dt.day, - dt.hour, dt.minute, dt.second, - dt.weekday(), 0, -1) - stamp = _time.mktime(tt) - tt = _time.localtime(stamp) - return tt.tm_isdst > 0 - - -class vBinary(object): - """Binary property values are base 64 encoded. - """ - - def __init__(self, obj): - self.obj = to_unicode(obj) - self.params = Parameters(encoding='BASE64', value="BINARY") - - def __repr__(self): - return "vBinary('%s')" % self.to_ical() - - def to_ical(self): - return binascii.b2a_base64(self.obj.encode('utf-8'))[:-1] - - @staticmethod - def from_ical(ical): - try: - return base64.b64decode(ical) - except UnicodeError: - raise ValueError('Not valid base 64 encoding.') - - -class vBoolean(int): - """Returns specific string according to state. - """ - BOOL_MAP = CaselessDict(true=True, false=False) - - def __new__(cls, *args, **kwargs): - self = super(vBoolean, cls).__new__(cls, *args, **kwargs) - self.params = Parameters() - return self - - def to_ical(self): - if self: - return b'TRUE' - return b'FALSE' - - @classmethod - def from_ical(cls, ical): - try: - return cls.BOOL_MAP[ical] - except: - raise ValueError("Expected 'TRUE' or 'FALSE'. Got %s" % ical) - - -class vCalAddress(compat.unicode_type): - """This just returns an unquoted string. - """ - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vCalAddress, cls).__new__(cls, value) - self.params = Parameters() - return self - - def __repr__(self): - return "vCalAddress('%s')" % self.to_ical() - - def to_ical(self): - return self.encode(DEFAULT_ENCODING) - - @classmethod - def from_ical(cls, ical): - return cls(ical) - - -class vFloat(float): - """Just a float. - """ - def __new__(cls, *args, **kwargs): - self = super(vFloat, cls).__new__(cls, *args, **kwargs) - self.params = Parameters() - return self - - def to_ical(self): - return compat.unicode_type(self).encode('utf-8') - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical) - except: - raise ValueError('Expected float value, got: %s' % ical) - - -class vInt(int): - """Just an int. - """ - def __new__(cls, *args, **kwargs): - self = super(vInt, cls).__new__(cls, *args, **kwargs) - self.params = Parameters() - return self - - def to_ical(self): - return compat.unicode_type(self).encode('utf-8') - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical) - except: - raise ValueError('Expected int, got: %s' % ical) - - -class vDDDLists(object): - """A list of vDDDTypes values. - """ - def __init__(self, dt_list): - if not hasattr(dt_list, '__iter__'): - dt_list = [dt_list] - vDDD = [] - tzid = None - for dt in dt_list: - dt = vDDDTypes(dt) - vDDD.append(dt) - if 'TZID' in dt.params: - tzid = dt.params['TZID'] - - if tzid: - # NOTE: no support for multiple timezones here! - self.params = Parameters({'TZID': tzid}) - self.dts = vDDD - - def to_ical(self): - dts_ical = (dt.to_ical() for dt in self.dts) - return b",".join(dts_ical) - - @staticmethod - def from_ical(ical, timezone=None): - out = [] - ical_dates = ical.split(",") - for ical_dt in ical_dates: - out.append(vDDDTypes.from_ical(ical_dt, timezone=timezone)) - return out - - -class vDDDTypes(object): - """A combined Datetime, Date or Duration parser/generator. Their format - cannot be confused, and often values can be of either types. - So this is practical. - """ - def __init__(self, dt): - if not isinstance(dt, (datetime, date, timedelta, time)): - raise ValueError('You must use datetime, date, timedelta or time') - if isinstance(dt, datetime): - self.params = Parameters(dict(value='DATE-TIME')) - elif isinstance(dt, date): - self.params = Parameters(dict(value='DATE')) - elif isinstance(dt, time): - self.params = Parameters(dict(value='TIME')) - - if (isinstance(dt, datetime) or isinstance(dt, time))\ - and getattr(dt, 'tzinfo', False): - tzinfo = dt.tzinfo - if tzinfo is not pytz.utc and not isinstance(tzinfo, tzutc): - # set the timezone as a parameter to the property - tzid = tzid_from_dt(dt) - if tzid: - self.params.update({'TZID': tzid}) - self.dt = dt - - def to_ical(self): - dt = self.dt - if isinstance(dt, datetime): - return vDatetime(dt).to_ical() - elif isinstance(dt, date): - return vDate(dt).to_ical() - elif isinstance(dt, timedelta): - return vDuration(dt).to_ical() - elif isinstance(dt, time): - return vTime(dt).to_ical() - else: - raise ValueError('Unknown date type') - - @classmethod - def from_ical(cls, ical, timezone=None): - if isinstance(ical, cls): - return ical.dt - u = ical.upper() - if u.startswith('-P') or u.startswith('P'): - return vDuration.from_ical(ical) - try: - return vDatetime.from_ical(ical, timezone=timezone) - except ValueError: - try: - return vDate.from_ical(ical) - except ValueError: - return vTime.from_ical(ical) - - -class vDate(object): - """Render and generates iCalendar date format. - """ - def __init__(self, dt): - if not isinstance(dt, date): - raise ValueError('Value MUST be a date instance') - self.dt = dt - self.params = Parameters(dict(value='DATE')) - - def to_ical(self): - s = "%04d%02d%02d" % (self.dt.year, self.dt.month, self.dt.day) - return s.encode('utf-8') - - @staticmethod - def from_ical(ical): - try: - timetuple = ( - int(ical[:4]), # year - int(ical[4:6]), # month - int(ical[6:8]), # day - ) - return date(*timetuple) - except: - raise ValueError('Wrong date format %s' % ical) - - -class vDatetime(object): - """Render and generates icalendar datetime format. - - vDatetime is timezone aware and uses the pytz library, an implementation of - the Olson database in Python. When a vDatetime object is created from an - ical string, you can pass a valid pytz timezone identifier. When a - vDatetime object is created from a python datetime object, it uses the - tzinfo component, if present. Otherwise an timezone-naive object is - created. Be aware that there are certain limitations with timezone naive - DATE-TIME components in the icalendar standard. - """ - def __init__(self, dt): - self.dt = dt - self.params = Parameters() - - def to_ical(self): - dt = self.dt - tzid = tzid_from_dt(dt) - - s = "%04d%02d%02dT%02d%02d%02d" % ( - dt.year, - dt.month, - dt.day, - dt.hour, - dt.minute, - dt.second - ) - if tzid == 'UTC': - s += "Z" - elif tzid: - self.params.update({'TZID': tzid}) - return s.encode('utf-8') - - @staticmethod - def from_ical(ical, timezone=None): - tzinfo = None - if timezone: - try: - tzinfo = pytz.timezone(timezone) - except pytz.UnknownTimeZoneError: - pass - - try: - timetuple = ( - int(ical[:4]), # year - int(ical[4:6]), # month - int(ical[6:8]), # day - int(ical[9:11]), # hour - int(ical[11:13]), # minute - int(ical[13:15]), # second - ) - if tzinfo: - return tzinfo.localize(datetime(*timetuple)) - elif not ical[15:]: - return datetime(*timetuple) - elif ical[15:16] == 'Z': - return datetime(tzinfo=pytz.utc, *timetuple) - else: - raise ValueError(ical) - except: - raise ValueError('Wrong datetime format: %s' % ical) - - -class vDuration(object): - """Subclass of timedelta that renders itself in the iCalendar DURATION - format. - """ - - def __init__(self, td): - if not isinstance(td, timedelta): - raise ValueError('Value MUST be a timedelta instance') - self.td = td - self.params = Parameters() - - def to_ical(self): - sign = "" - if self.td.days < 0: - sign = "-" - self.td = -self.td - timepart = "" - if self.td.seconds: - timepart = "T" - hours = self.td.seconds // 3600 - minutes = self.td.seconds % 3600 // 60 - seconds = self.td.seconds % 60 - if hours: - timepart += "%dH" % hours - if minutes or (hours and seconds): - timepart += "%dM" % minutes - if seconds: - timepart += "%dS" % seconds - if self.td.days == 0 and timepart: - return (compat.unicode_type(sign).encode('utf-8') + b'P' + - compat.unicode_type(timepart).encode('utf-8')) - else: - return (compat.unicode_type(sign).encode('utf-8') + b'P' + - compat.unicode_type(abs(self.td.days)).encode('utf-8') + - b'D' + compat.unicode_type(timepart).encode('utf-8')) - - @staticmethod - def from_ical(ical): - try: - match = DURATION_REGEX.match(ical) - sign, weeks, days, hours, minutes, seconds = match.groups() - if weeks: - value = timedelta(weeks=int(weeks)) - else: - value = timedelta(days=int(days or 0), - hours=int(hours or 0), - minutes=int(minutes or 0), - seconds=int(seconds or 0)) - if sign == '-': - value = -value - return value - except: - raise ValueError('Invalid iCalendar duration: %s' % ical) - - -class vPeriod(object): - """A precise period of time. - """ - def __init__(self, per): - start, end_or_duration = per - if not (isinstance(start, datetime) or isinstance(start, date)): - raise ValueError('Start value MUST be a datetime or date instance') - if not (isinstance(end_or_duration, datetime) or - isinstance(end_or_duration, date) or - isinstance(end_or_duration, timedelta)): - raise ValueError('end_or_duration MUST be a datetime, ' - 'date or timedelta instance') - by_duration = 0 - if isinstance(end_or_duration, timedelta): - by_duration = 1 - duration = end_or_duration - end = start + duration - else: - end = end_or_duration - duration = end - start - if start > end: - raise ValueError("Start time is greater than end time") - - self.params = Parameters() - # set the timezone identifier - # does not support different timezones for start and end - tzid = tzid_from_dt(start) - if tzid: - self.params['TZID'] = tzid - - self.start = start - self.end = end - self.by_duration = by_duration - self.duration = duration - - def __cmp__(self, other): - if not isinstance(other, vPeriod): - raise NotImplementedError('Cannot compare vPeriod with %r' % other) - return cmp((self.start, self.end), (other.start, other.end)) - - def overlaps(self, other): - if self.start > other.start: - return other.overlaps(self) - if self.start <= other.start < self.end: - return True - return False - - def to_ical(self): - if self.by_duration: - return (vDatetime(self.start).to_ical() + b'/' + - vDuration(self.duration).to_ical()) - return (vDatetime(self.start).to_ical() + b'/' + - vDatetime(self.end).to_ical()) - - @staticmethod - def from_ical(ical): - try: - start, end_or_duration = ical.split('/') - start = vDDDTypes.from_ical(start) - end_or_duration = vDDDTypes.from_ical(end_or_duration) - return (start, end_or_duration) - except: - raise ValueError('Expected period format, got: %s' % ical) - - def __repr__(self): - if self.by_duration: - p = (self.start, self.duration) - else: - p = (self.start, self.end) - return 'vPeriod(%r)' % p - - -class vWeekday(compat.unicode_type): - """This returns an unquoted weekday abbrevation. - """ - week_days = CaselessDict({ - "SU": 0, "MO": 1, "TU": 2, "WE": 3, "TH": 4, "FR": 5, "SA": 6, - }) - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vWeekday, cls).__new__(cls, value) - match = WEEKDAY_RULE.match(self) - if match is None: - raise ValueError('Expected weekday abbrevation, got: %s' % self) - match = match.groupdict() - sign = match['signal'] - weekday = match['weekday'] - relative = match['relative'] - if not weekday in vWeekday.week_days or sign not in '+-': - raise ValueError('Expected weekday abbrevation, got: %s' % self) - self.relative = relative and int(relative) or None - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING).upper() - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical.upper()) - except: - raise ValueError('Expected weekday abbrevation, got: %s' % ical) - - -class vFrequency(compat.unicode_type): - """A simple class that catches illegal values. - """ - - frequencies = CaselessDict({ - "SECONDLY": "SECONDLY", - "MINUTELY": "MINUTELY", - "HOURLY": "HOURLY", - "DAILY": "DAILY", - "WEEKLY": "WEEKLY", - "MONTHLY": "MONTHLY", - "YEARLY": "YEARLY", - }) - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vFrequency, cls).__new__(cls, value) - if not self in vFrequency.frequencies: - raise ValueError('Expected frequency, got: %s' % self) - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING).upper() - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical.upper()) - except: - raise ValueError('Expected frequency, got: %s' % ical) - - -class vRecur(CaselessDict): - """Recurrence definition. - """ - - frequencies = ["SECONDLY", "MINUTELY", "HOURLY", "DAILY", "WEEKLY", - "MONTHLY", "YEARLY"] - - # Mac iCal ignores RRULEs where FREQ is not the first rule part. - # Sorts parts according to the order listed in RFC 5545, section 3.3.10. - canonical_order = ("FREQ", "UNTIL", "COUNT", "INTERVAL", - "BYSECOND", "BYMINUTE", "BYHOUR", "BYDAY", - "BYMONTHDAY", "BYYEARDAY", "BYWEEKNO", "BYMONTH", - "BYSETPOS", "WKST") - - types = CaselessDict({ - 'COUNT': vInt, - 'INTERVAL': vInt, - 'BYSECOND': vInt, - 'BYMINUTE': vInt, - 'BYHOUR': vInt, - 'BYMONTHDAY': vInt, - 'BYYEARDAY': vInt, - 'BYMONTH': vInt, - 'UNTIL': vDDDTypes, - 'BYSETPOS': vInt, - 'WKST': vWeekday, - 'BYDAY': vWeekday, - 'FREQ': vFrequency, - }) - - def __init__(self, *args, **kwargs): - CaselessDict.__init__(self, *args, **kwargs) - self.params = Parameters() - - def to_ical(self): - result = [] - for key, vals in self.sorted_items(): - typ = self.types[key] - if not isinstance(vals, SEQUENCE_TYPES): - vals = [vals] - vals = b','.join(typ(val).to_ical() for val in vals) - - # CaselessDict keys are always unicode - key = key.encode(DEFAULT_ENCODING) - result.append(key + b'=' + vals) - - return b';'.join(result) - - @classmethod - def parse_type(cls, key, values): - # integers - parser = cls.types.get(key, vText) - return [parser.from_ical(v) for v in values.split(',')] - - @classmethod - def from_ical(cls, ical): - if isinstance(ical, cls): - return ical - try: - recur = cls() - for pairs in ical.split(';'): - key, vals = pairs.split('=') - recur[key] = cls.parse_type(key, vals) - return dict(recur) - except: - raise ValueError('Error in recurrence rule: %s' % ical) - - -class vText(compat.unicode_type): - """Simple text. - """ - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vText, cls).__new__(cls, value) - self.encoding = encoding - self.params = Parameters() - return self - - def __repr__(self): - return "vText('%s')" % self.to_ical() - - def to_ical(self): - return escape_char(self).encode(self.encoding) - - @classmethod - def from_ical(cls, ical): - ical_unesc = unescape_char(ical) - return cls(ical_unesc) - - -class vTime(object): - """Render and generates iCalendar time format. - """ - - def __init__(self, *args): - if len(args) == 1: - if not isinstance(args[0], (time, datetime)): - raise ValueError('Expected a datetime.time, got: %s' % args[0]) - self.dt = args[0] - else: - self.dt = time(*args) - self.params = Parameters(dict(value='TIME')) - - def to_ical(self): - return self.dt.strftime("%H%M%S") - - @staticmethod - def from_ical(ical): - # TODO: timezone support - try: - timetuple = (int(ical[:2]), int(ical[2:4]), int(ical[4:6])) - return time(*timetuple) - except: - raise ValueError('Expected time, got: %s' % ical) - - -class vUri(compat.unicode_type): - """Uniform resource identifier is basically just an unquoted string. - """ - - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vUri, cls).__new__(cls, value) - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING) - - @classmethod - def from_ical(cls, ical): - try: - return cls(ical) - except: - raise ValueError('Expected , got: %s' % ical) - - -class vGeo(object): - """A special type that is only indirectly defined in the rfc. - """ - - def __init__(self, geo): - try: - latitude, longitude = (geo[0], geo[1]) - latitude = float(latitude) - longitude = float(longitude) - except: - raise ValueError('Input must be (float, float) for ' - 'latitude and longitude') - self.latitude = latitude - self.longitude = longitude - self.params = Parameters() - - def to_ical(self): - return '%s;%s' % (self.latitude, self.longitude) - - @staticmethod - def from_ical(ical): - try: - latitude, longitude = ical.split(';') - return (float(latitude), float(longitude)) - except: - raise ValueError("Expected 'float;float' , got: %s" % ical) - - -class vUTCOffset(object): - """Renders itself as a utc offset. - """ - - def __init__(self, td): - if not isinstance(td, timedelta): - raise ValueError('Offset value MUST be a timedelta instance') - self.td = td - self.params = Parameters() - - def to_ical(self): - - if self.td < timedelta(0): - sign = '-%s' - td = timedelta(0)-self.td # get timedelta relative to 0 - else: - # Google Calendar rejects '0000' but accepts '+0000' - sign = '+%s' - td = self.td - - days, seconds = td.days, td.seconds - - hours = abs(days * 24 + seconds // 3600) - minutes = abs((seconds % 3600) // 60) - seconds = abs(seconds % 60) - if seconds: - duration = '%02i%02i%02i' % (hours, minutes, seconds) - else: - duration = '%02i%02i' % (hours, minutes) - return sign % duration - - @classmethod - def from_ical(cls, ical): - if isinstance(ical, cls): - return ical.td - try: - sign, hours, minutes, seconds = (ical[0:1], - int(ical[1:3]), - int(ical[3:5]), - int(ical[5:7] or 0)) - offset = timedelta(hours=hours, minutes=minutes, seconds=seconds) - except: - raise ValueError('Expected utc offset, got: %s' % ical) - if offset >= timedelta(hours=24): - raise ValueError( - 'Offset must be less than 24 hours, was %s' % ical) - if sign == '-': - return -offset - return offset - - -class vInline(compat.unicode_type): - """This is an especially dumb class that just holds raw unparsed text and - has parameters. Conversion of inline values are handled by the Component - class, so no further processing is needed. - """ - def __new__(cls, value, encoding=DEFAULT_ENCODING): - value = to_unicode(value, encoding=encoding) - self = super(vInline, cls).__new__(cls, value) - self.params = Parameters() - return self - - def to_ical(self): - return self.encode(DEFAULT_ENCODING) - - @classmethod - def from_ical(cls, ical): - return cls(ical) - - -class TypesFactory(CaselessDict): - """All Value types defined in rfc 2445 are registered in this factory - class. - - The value and parameter names don't overlap. So one factory is enough for - both kinds. - """ - - def __init__(self, *args, **kwargs): - "Set keys to upper for initial dict" - CaselessDict.__init__(self, *args, **kwargs) - self.all_types = ( - vBinary, - vBoolean, - vCalAddress, - vDDDLists, - vDDDTypes, - vDate, - vDatetime, - vDuration, - vFloat, - vFrequency, - vGeo, - vInline, - vInt, - vPeriod, - vRecur, - vText, - vTime, - vUTCOffset, - vUri, - vWeekday - ) - self['binary'] = vBinary - self['boolean'] = vBoolean - self['cal-address'] = vCalAddress - self['date'] = vDDDTypes - self['date-time'] = vDDDTypes - self['duration'] = vDDDTypes - self['float'] = vFloat - self['integer'] = vInt - self['period'] = vPeriod - self['recur'] = vRecur - self['text'] = vText - self['time'] = vTime - self['uri'] = vUri - self['utc-offset'] = vUTCOffset - self['geo'] = vGeo - self['inline'] = vInline - self['date-time-list'] = vDDDLists - - ################################################# - # Property types - - # These are the default types - types_map = CaselessDict({ - #################################### - # Property value types - # Calendar Properties - 'calscale': 'text', - 'method': 'text', - 'prodid': 'text', - 'version': 'text', - # Descriptive Component Properties - 'attach': 'uri', - 'categories': 'text', - 'class': 'text', - 'comment': 'text', - 'description': 'text', - 'geo': 'geo', - 'location': 'text', - 'percent-complete': 'integer', - 'priority': 'integer', - 'resources': 'text', - 'status': 'text', - 'summary': 'text', - # Date and Time Component Properties - 'completed': 'date-time', - 'dtend': 'date-time', - 'due': 'date-time', - 'dtstart': 'date-time', - 'duration': 'duration', - 'freebusy': 'period', - 'transp': 'text', - # Time Zone Component Properties - 'tzid': 'text', - 'tzname': 'text', - 'tzoffsetfrom': 'utc-offset', - 'tzoffsetto': 'utc-offset', - 'tzurl': 'uri', - # Relationship Component Properties - 'attendee': 'cal-address', - 'contact': 'text', - 'organizer': 'cal-address', - 'recurrence-id': 'date-time', - 'related-to': 'text', - 'url': 'uri', - 'uid': 'text', - # Recurrence Component Properties - 'exdate': 'date-time-list', - 'exrule': 'recur', - 'rdate': 'date-time-list', - 'rrule': 'recur', - # Alarm Component Properties - 'action': 'text', - 'repeat': 'integer', - 'trigger': 'duration', - # Change Management Component Properties - 'created': 'date-time', - 'dtstamp': 'date-time', - 'last-modified': 'date-time', - 'sequence': 'integer', - # Miscellaneous Component Properties - 'request-status': 'text', - #################################### - # parameter types (luckily there is no name overlap) - 'altrep': 'uri', - 'cn': 'text', - 'cutype': 'text', - 'delegated-from': 'cal-address', - 'delegated-to': 'cal-address', - 'dir': 'uri', - 'encoding': 'text', - 'fmttype': 'text', - 'fbtype': 'text', - 'language': 'text', - 'member': 'cal-address', - 'partstat': 'text', - 'range': 'text', - 'related': 'text', - 'reltype': 'text', - 'role': 'text', - 'rsvp': 'boolean', - 'sent-by': 'cal-address', - 'tzid': 'text', - 'value': 'text', - }) - - def for_property(self, name): - """Returns a the default type for a property or parameter - """ - return self[self.types_map.get(name, 'text')] - - def to_ical(self, name, value): - """Encodes a named value from a primitive python type to an icalendar - encoded string. - """ - type_class = self.for_property(name) - return type_class(value).to_ical() - - def from_ical(self, name, value): - """Decodes a named property or parameter value from an icalendar - encoded string to a primitive python type. - """ - type_class = self.for_property(name) - decoded = type_class.from_ical(value) - return decoded diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/__init__.py b/libs/icalendar-3.6.1/src/icalendar/tests/__init__.py deleted file mode 100644 index aba627b..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/__init__.py +++ /dev/null @@ -1,5 +0,0 @@ -# unittest/unittest2 importer -import unittest -if not hasattr(unittest.TestCase, 'assertIsNotNone'): - import unittest2 as unittest -unittest # pep 8 diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/encoding.ics b/libs/icalendar-3.6.1/src/icalendar/tests/encoding.ics deleted file mode 100644 index 5a0047e..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/encoding.ics +++ /dev/null @@ -1,16 +0,0 @@ -BEGIN:VCALENDAR -PRODID:-//Plönë.org//NONSGML plone.app.event//EN -VERSION:2.0 -X-WR-CALNAME:äöü ÄÖÜ € -X-WR-CALDESC:test non ascii: äöü ÄÖÜ € -X-WR-RELCALID:12345 -BEGIN:VEVENT -DTSTART:20101010T100000Z -DTEND:20101010T120000Z -CREATED:20101010T100000Z -UID:123456 -SUMMARY:Non-ASCII Test: ÄÖÜ äöü € -DESCRIPTION:icalendar should be able to handle non-ascii: €äüöÄÜÖ. -LOCATION:Tribstrül -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics b/libs/icalendar-3.6.1/src/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics deleted file mode 100644 index 2356cdc..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/issue_112_missing_tzinfo_on_exdate.ics +++ /dev/null @@ -1,48 +0,0 @@ -BEGIN:VCALENDAR -PRODID:-//Google Inc//Google Calendar 70.9054//EN -VERSION:2.0 -CALSCALE:GREGORIAN -METHOD:PUBLISH -X-WR-CALNAME:Market East -X-WR-TIMEZONE:America/New_York -X-WR-CALDESC: -BEGIN:VTIMEZONE -TZID:America/New_York -X-LIC-LOCATION:America/New_York -BEGIN:DAYLIGHT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -TZNAME:EDT -DTSTART:19700308T020000 -RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU -END:DAYLIGHT -BEGIN:STANDARD -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -TZNAME:EST -DTSTART:19701101T020000 -RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU -END:STANDARD -END:VTIMEZONE - -BEGIN:VEVENT -DTSTART;TZID=America/New_York:20130907T120000 -DTEND;TZID=America/New_York:20130907T170000 -RRULE:FREQ=WEEKLY;BYDAY=FR,SA;UNTIL=20131025T035959Z -EXDATE;TZID=America/New_York:20131012T120000 -EXDATE;TZID=America/New_York:20131011T120000 -DTSTAMP:20131021T025552Z -UID:ak30b02u7858q1oo6ji9dm4mgg@google.com -CREATED:20130903T181453Z -DESCRIPTION:The Fieldhouse and Hard Rock Cafe are working with PhillyRising - to provide live entertainment on Friday and Saturday afternoons throughout - the Summer. -LAST-MODIFIED:20131015T210927Z -LOCATION:12th and Market Streets (weather permitting) -SEQUENCE:0 -STATUS:CONFIRMED -SUMMARY:Market East Live! -TRANSP:OPAQUE -END:VEVENT - -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/issue_114_invalid_line.ics b/libs/icalendar-3.6.1/src/icalendar/tests/issue_114_invalid_line.ics deleted file mode 100644 index 53965f0..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/issue_114_invalid_line.ics +++ /dev/null @@ -1,41 +0,0 @@ -BEGIN:VEVENT -DTSTART:20130927T130000Z -DTEND:20130927T140000Z -DTSTAMP:20131107T004757Z -ORGANIZER;CN=gxxxxxxxn@nxx.fr:mailto:gxxxxxn@nxx.fr -UID:040000008200E00074C5B7101A82E00800000000A0F3321606B6CE01000000000000000 - 010000000F09F33F0E8ED4C44B99F6027ACF588D0 -ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=NEEDS-ACTION;CN=St - eve Bxxxxxx;X-NUM-GUESTS=0:mailto:sxxxxxt@nxx.fr -ATTENDEE;CUTYPE=INDIVIDUAL;ROLE=REQ-PARTICIPANT;PARTSTAT=ACCEPTED;CN=Boris - Hxxxxx;X-NUM-GUESTS=0:mailto:bxxxxxxk@vxxxxxxxx.com -CREATED:20130920T113409Z -DESCRIPTION:Quand : vendredi 27 septembre 2013 15:00-16:00 (UTC+01:00) Brux - elles\, Copenhague\, Madrid\, Paris.\nEmplacement : Conf-Call - 01 xx xx xx - xx\n\nRemarque : le décalage GMT ci-dessus ne tient pas compte des réglage - s de l'heure d'été.\n\n*~*~*~*~*~*~*~*~*~*\n\nComme convenu à l’instant par - e-mail\n -LAST-MODIFIED:20130920T115104Z -LOCATION:Conf-Call - 01 xx xx xx xx -SEQUENCE:0 -STATUS:CONFIRMED -SUMMARY:Nxx - Réunion lancement PxxxxxxT -TRANSP:OPAQUE -X-ALT-DESC;FMTTYPE=text/html:\n\n\n\n\n\n\n\n\n

Qu - and : vendredi 27 septembre 2013 15:00-16:00 (UTC+01:00) Bruxelles\, Copenh - ague\, Madrid\, Paris.

\n\n

Emplacement : Conf-Call - 01 xx xx xx xx

- \n\n

Remarque : le décalage - GMT ci-dessus ne tient pas compte des réglages de l'heure d'été.

\n\n

*~*~*~*~*~*~*~*~ - *~*

\n\n

Co - mme convenu à l’instant par e-mail

- \n\n\n -X-MICROSOFT-CDO-BUSYSTATUS:TENTATIVE -X-MICROSOFT-CDO-IMPORTANCE:1 -X-MICROSOFT-CDO-INTENDEDSTATUS:BUSY -X -END:VEVENT diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/issue_53_parsing_failure.ics b/libs/icalendar-3.6.1/src/icalendar/tests/issue_53_parsing_failure.ics deleted file mode 100644 index 67f42cf..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/issue_53_parsing_failure.ics +++ /dev/null @@ -1,78 +0,0 @@ -BEGIN:VCALENDAR -VERSION:2.0 -PRODID:-//Meetup//RemoteApi//EN -CALSCALE:GREGORIAN -METHOD:PUBLISH -X-ORIGINAL-URL:http://www.meetup.com/DevOpsDC/events/ical/DevOpsDC/ -X-WR-CALNAME:Events - DevOpsDC -BEGIN:VTIMEZONE -TZID:America/New_York -TZURL:http://tzurl.org/zoneinfo-outlook/America/New_York -X-LIC-LOCATION:America/New_York -BEGIN:DAYLIGHT -TZOFFSETFROM:-0500 -TZOFFSETTO:-0400 -TZNAME:EDT -DTSTART:19700308T020000 -RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=2SU -END:DAYLIGHT -BEGIN:STANDARD -TZOFFSETFROM:-0400 -TZOFFSETTO:-0500 -TZNAME:EST -DTSTART:19701101T020000 -RRULE:FREQ=YEARLY;BYMONTH=11;BYDAY=1SU -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTAMP:20120605T003759Z -DTSTART;TZID=America/New_York:20120712T183000 -DTEND;TZID=America/New_York:20120712T213000 -STATUS:CONFIRMED -SUMMARY:DevOps DC Meetup -DESCRIPTION:DevOpsDC\nThursday\, July 12 at 6:30 PM\n\nThis will be a joi - nt meetup / hack night with the DC jQuery Users Group. The idea behind - the hack night: Small teams consisting of at least 1 member...\n\nDeta - ils: http://www.meetup.com/DevOpsDC/events/47635522/ -CLASS:PUBLIC -CREATED:20120111T120339Z -GEO:38.90;-77.01 -LOCATION:Fathom Creative\, Inc. (1333 14th Street Northwest\, Washington - D.C.\, DC 20005) -URL:http://www.meetup.com/DevOpsDC/events/47635522/ -LAST-MODIFIED:20120522T174406Z -UID:event_qtkfrcyqkbnb@meetup.com -END:VEVENT -BEGIN:VEVENT -DTSTAMP:20120605T003759Z -DTSTART;TZID=America/New_York:20120911T183000 -DTEND;TZID=America/New_York:20120911T213000 -STATUS:CONFIRMED -SUMMARY:DevOps DC Meetup -DESCRIPTION:DevOpsDC\nTuesday\, September 11 at 6:30 PM\n\n \n\nDetails: - http://www.meetup.com/DevOpsDC/events/47635532/ -CLASS:PUBLIC -CREATED:20120111T120352Z -GEO:38.90;-77.01 -LOCATION:CustomInk\, LLC (7902 Westpark Drive\, McLean\, VA 22102) -URL:http://www.meetup.com/DevOpsDC/events/47635532/ -LAST-MODIFIED:20120316T202210Z -UID:event_qtkfrcyqmbpb@meetup.com -END:VEVENT -BEGIN:VEVENT -DTSTAMP:20120605T003759Z -DTSTART;TZID=America/New_York:20121113T183000 -DTEND;TZID=America/New_York:20121113T213000 -STATUS:CONFIRMED -SUMMARY:DevOps DC Meetup -DESCRIPTION:DevOpsDC\nTuesday\, November 13 at 6:30 PM\n\n \n\nDetails: h - ttp://www.meetup.com/DevOpsDC/events/47635552/ -CLASS:PUBLIC -CREATED:20120111T120402Z -GEO:38.90;-77.01 -LOCATION:CustomInk\, LLC (7902 Westpark Drive\, McLean\, VA 22102) -URL:http://www.meetup.com/DevOpsDC/events/47635552/ -LAST-MODIFIED:20120316T202210Z -UID:event_qtkfrcyqpbrb@meetup.com -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/multiple.ics b/libs/icalendar-3.6.1/src/icalendar/tests/multiple.ics deleted file mode 100644 index dbbde27..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/multiple.ics +++ /dev/null @@ -1,80 +0,0 @@ -BEGIN:VCALENDAR -VERSION - - :2.0 -PRODID - - :-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN -METHOD - - :PUBLISH -BEGIN:VEVENT -UID - - :956630271 -SUMMARY - - :Christmas Day -CLASS - - :PUBLIC -X-MOZILLA-ALARM-DEFAULT-UNITS - - :minutes -X-MOZILLA-ALARM-DEFAULT-LENGTH - - :15 -X-MOZILLA-RECUR-DEFAULT-UNITS - - :weeks -X-MOZILLA-RECUR-DEFAULT-INTERVAL - - :1 -DTSTART - - ;VALUE=DATE - :20031225 -DTEND - - ;VALUE=DATE - :20031226 -DTSTAMP - - :20020430T114937Z -END:VEVENT -END:VCALENDAR -BEGIN:VCALENDAR -VERSION - :2.0 -PRODID - :-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN -METHOD - :PUBLISH -BEGIN:VEVENT -UID - :911737808 -SUMMARY - :Boxing Day -CLASS - :PUBLIC -X-MOZILLA-ALARM-DEFAULT-UNITS - :minutes -X-MOZILLA-ALARM-DEFAULT-LENGTH - :15 -X-MOZILLA-RECUR-DEFAULT-UNITS - :weeks -X-MOZILLA-RECUR-DEFAULT-INTERVAL - :1 -DTSTART - ;VALUE=DATE - :20030501 -DTSTAMP - :20020430T114937Z -END:VEVENT -BEGIN:VEVENT -UID - :wh4t3v3r -DTSTART;VALUE=DATE:20031225 -SUMMARY:Christmas again! -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/recurrence.ics b/libs/icalendar-3.6.1/src/icalendar/tests/recurrence.ics deleted file mode 100644 index a5596a5..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/recurrence.ics +++ /dev/null @@ -1,24 +0,0 @@ -BEGIN:VCALENDAR -METHOD:Request -PRODID:-//My product//mxm.dk/ -VERSION:2.0 -BEGIN:VEVENT -DTSTART:19960401T010000 -DTEND:19960401T020000 -RRULE:FREQ=DAILY;COUNT=100 -EXDATE:19960402T010000Z,19960403T010000Z,19960404T010000Z -SUMMARY:A recurring event with exdates -END:VEVENT -BEGIN:VEVENT -DTSTART;TZID=Europe/Vienna:20120327T100000 -DTEND;TZID=Europe/Vienna:20120327T180000 -RRULE:FREQ=WEEKLY;UNTIL=20120703T080000Z;BYDAY=TU -EXDATE;TZID=Europe/Vienna:20120529T100000 -EXDATE;TZID=Europe/Vienna:20120403T100000 -EXDATE;TZID=Europe/Vienna:20120410T100000 -EXDATE;TZID=Europe/Vienna:20120501T100000 -EXDATE;TZID=Europe/Vienna:20120417T100000 -DTSTAMP:20130716T120638Z -SUMMARY:A Recurring event with multiple exdates, one per line. -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_encoding.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_encoding.py deleted file mode 100644 index 196b4a9..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_encoding.py +++ /dev/null @@ -1,90 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.tests import unittest - -import datetime -import icalendar -import os -import pytz - - -class TestEncoding(unittest.TestCase): - - def test_create_from_ical(self): - directory = os.path.dirname(__file__) - data = open(os.path.join(directory, 'encoding.ics'), 'rb').read() - cal = icalendar.Calendar.from_ical(data) - - self.assertEqual(cal['prodid'].to_ical().decode('utf-8'), - u"-//Plönë.org//NONSGML plone.app.event//EN") - self.assertEqual(cal['X-WR-CALDESC'].to_ical().decode('utf-8'), - u"test non ascii: äöü ÄÖÜ €") - - event = cal.walk('VEVENT')[0] - self.assertEqual(event['SUMMARY'].to_ical().decode('utf-8'), - u'Non-ASCII Test: ÄÖÜ äöü €') - self.assertEqual( - event['DESCRIPTION'].to_ical().decode('utf-8'), - u'icalendar should be able to handle non-ascii: €äüöÄÜÖ.' - ) - self.assertEqual(event['LOCATION'].to_ical().decode('utf-8'), - u'Tribstrül') - - def test_create_to_ical(self): - cal = icalendar.Calendar() - - cal.add('prodid', u"-//Plönë.org//NONSGML plone.app.event//EN") - cal.add('version', u"2.0") - cal.add('x-wr-calname', u"äöü ÄÖÜ €") - cal.add('x-wr-caldesc', u"test non ascii: äöü ÄÖÜ €") - cal.add('x-wr-relcalid', u"12345") - - event = icalendar.Event() - event.add( - 'dtstart', - datetime.datetime(2010, 10, 10, 10, 00, 00, tzinfo=pytz.utc) - ) - event.add( - 'dtend', - datetime.datetime(2010, 10, 10, 12, 00, 00, tzinfo=pytz.utc) - ) - event.add( - 'created', - datetime.datetime(2010, 10, 10, 0, 0, 0, tzinfo=pytz.utc) - ) - event.add('uid', u'123456') - event.add('summary', u'Non-ASCII Test: ÄÖÜ äöü €') - event.add( - 'description', - u'icalendar should be able to de/serialize non-ascii.' - ) - event.add('location', u'Tribstrül') - cal.add_component(event) - - ical_lines = cal.to_ical().splitlines() - cmp = b'PRODID:-//Pl\xc3\xb6n\xc3\xab.org//NONSGML plone.app.event//EN' - self.assertTrue(cmp in ical_lines) - - def test_create_event_simple(self): - event = icalendar.Event() - event.add( - "dtstart", - datetime.datetime(2010, 10, 10, 0, 0, 0, tzinfo=pytz.utc) - ) - event.add("summary", u"åäö") - out = event.to_ical() - summary = b'SUMMARY:\xc3\xa5\xc3\xa4\xc3\xb6' - self.assertTrue(summary in out.splitlines()) - - def test_unicode_parameter_name(self): - # Test for issue #80 - cal = icalendar.Calendar() - event = icalendar.Event() - event.add(u'DESCRIPTION', u'äöüßÄÖÜ') - cal.add_component(event) - c = cal.to_ical() - self.assertEqual( - c, - b'BEGIN:VCALENDAR\r\nBEGIN:VEVENT\r\nDESCRIPTION:' - + b'\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f\xc3\x84\xc3\x96\xc3\x9c\r\n' - + b'END:VEVENT\r\nEND:VCALENDAR\r\n' - ) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_fixed_issues.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_fixed_issues.py deleted file mode 100644 index c860d71..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_fixed_issues.py +++ /dev/null @@ -1,247 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.parser_tools import to_unicode -from icalendar.tests import unittest - -import datetime -import icalendar -import os -import pytz - - -class TestIssues(unittest.TestCase): - - def test_issue_53(self): - """Issue #53 - Parsing failure on some descriptions? - https://github.com/collective/icalendar/issues/53 - """ - - directory = os.path.dirname(__file__) - ics = open(os.path.join(directory, 'issue_53_parsing_failure.ics'), - 'rb') - cal = icalendar.Calendar.from_ical(ics.read()) - ics.close() - - event = cal.walk('VEVENT')[0] - desc = event.get('DESCRIPTION') - self.assertTrue(b'July 12 at 6:30 PM' in desc.to_ical()) - - timezones = cal.walk('VTIMEZONE') - self.assertEqual(len(timezones), 1) - tz = timezones[0] - self.assertEqual(tz['tzid'].to_ical(), b"America/New_York") - - def test_issue_55(self): - """Issue #55 - Parse error on utc-offset with seconds value - https://github.com/collective/icalendar/issues/55 - """ - ical_str = """BEGIN:VTIMEZONE -TZID:America/Los Angeles -BEGIN:STANDARD -DTSTART:18831118T120702 -RDATE:18831118T120702 -TZNAME:PST -TZOFFSETFROM:-075258 -TZOFFSETTO:-0800 -END:STANDARD -END:VTIMEZONE""" - - tz = icalendar.Timezone.from_ical(ical_str) - self.assertEqual( - tz.to_ical(), - b'BEGIN:VTIMEZONE\r\nTZID:America/Los Angeles\r\n' - b'BEGIN:STANDARD\r\n' - b'DTSTART:18831118T120702\r\nRDATE:18831118T120702\r\nTZNAME:PST' - b'\r\nTZOFFSETFROM:-075258\r\nTZOFFSETTO:-0800\r\n' - b'END:STANDARD\r\n' - b'END:VTIMEZONE\r\n') - - def test_issue_58(self): - """Issue #58 - TZID on UTC DATE-TIMEs - https://github.com/collective/icalendar/issues/58 - """ - - # According to RFC 2445: "The TZID property parameter MUST NOT be - # applied to DATE-TIME or TIME properties whose time values are - # specified in UTC." - - event = icalendar.Event() - dt = pytz.utc.localize(datetime.datetime(2012, 7, 16, 0, 0, 0)) - event.add('dtstart', dt) - self.assertEqual( - event.to_ical(), - b"BEGIN:VEVENT\r\n" - b"DTSTART;VALUE=DATE-TIME:20120716T000000Z\r\n" - b"END:VEVENT\r\n" - ) - - def test_issue_64(self): - """Issue #64 - Event.to_ical() fails for unicode strings - https://github.com/collective/icalendar/issues/64 - """ - - # Non-unicode characters - event = icalendar.Event() - event.add("dtstart", datetime.datetime(2012, 9, 3, 0, 0, 0)) - event.add("summary", u"abcdef") - self.assertEqual( - event.to_ical(), - b"BEGIN:VEVENT\r\nSUMMARY:abcdef\r\nDTSTART;VALUE=DATE-TIME:" - b"20120903T000000\r\nEND:VEVENT\r\n" - ) - - # Unicode characters - event = icalendar.Event() - event.add("dtstart", datetime.datetime(2012, 9, 3, 0, 0, 0)) - event.add("summary", u"åäö") - self.assertEqual( - event.to_ical(), - b"BEGIN:VEVENT\r\nSUMMARY:\xc3\xa5\xc3\xa4\xc3\xb6\r\n" - b"DTSTART;VALUE=DATE-TIME:20120903T000000\r\nEND:VEVENT\r\n" - ) - - def test_issue_70(self): - """Issue #70 - e.decode("RRULE") causes Attribute Error - https://github.com/collective/icalendar/issues/70 - """ - - ical_str = """BEGIN:VEVENT -CREATED:20081114T072804Z -UID:D449CA84-00A3-4E55-83E1-34B58268853B -DTEND:20070220T180000 -RRULE:FREQ=WEEKLY;INTERVAL=1;UNTIL=20070619T225959 -TRANSP:OPAQUE -SUMMARY:Esb mellon phone conf -DTSTART:20070220T170000 -DTSTAMP:20070221T095412Z -SEQUENCE:0 -END:VEVENT""" - - cal = icalendar.Calendar.from_ical(ical_str) - recur = cal.decoded("RRULE") - self.assertIsInstance(recur, icalendar.vRecur) - self.assertEqual( - recur.to_ical(), - b'FREQ=WEEKLY;UNTIL=20070619T225959;INTERVAL=1' - ) - - def test_issue_82(self): - """Issue #82 - vBinary __repr__ called rather than to_ical from - container types - https://github.com/collective/icalendar/issues/82 - """ - - b = icalendar.vBinary('text') - b.params['FMTTYPE'] = 'text/plain' - self.assertEqual(b.to_ical(), b'dGV4dA==') - e = icalendar.Event() - e.add('ATTACH', b) - self.assertEqual( - e.to_ical(), - b"BEGIN:VEVENT\r\nATTACH;ENCODING=BASE64;FMTTYPE=text/plain;" - b"VALUE=BINARY:dGV4dA==\r\nEND:VEVENT\r\n" - ) - - def test_issue_100(self): - """Issue #100 - Transformed doctests into unittests, Test fixes and - cleanup. - https://github.com/collective/icalendar/pull/100 - """ - - ical_content = "BEGIN:VEVENT\r\nSUMMARY;LANGUAGE=ru:te\r\nEND:VEVENT" - icalendar.Event.from_ical(ical_content).to_ical() - - def test_issue_101(self): - """Issue #101 - icalender is choking on umlauts in ORGANIZER - - https://github.com/collective/icalendar/issues/101 - """ - ical_str = """BEGIN:VCALENDAR -VERSION:2.0 -X-WR-CALNAME:Kalender von acme\, admin -PRODID:-//The Horde Project//Horde_iCalendar Library\, Horde 3.3.5//EN -METHOD:PUBLISH -BEGIN:VEVENT -DTSTART:20130416T100000Z -DTEND:20130416T110000Z -DTSTAMP:20130416T092616Z -UID:20130416112341.10064jz0k4j7uem8@acmenet.de -CREATED:20130416T092341Z -LAST-MODIFIED:20130416T092341Z -SUMMARY:wichtiger termin 1 -ORGANIZER;CN="acme, ädmin":mailto:adm-acme@mydomain.de -LOCATION:im büro -CLASS:PUBLIC -STATUS:CONFIRMED -TRANSP:OPAQUE -END:VEVENT -END:VCALENDAR""" - - cal = icalendar.Calendar.from_ical(ical_str) - org_cn = cal.walk('VEVENT')[0]['ORGANIZER'].params['CN'] - self.assertEqual(org_cn, u'acme, ädmin') - - def test_issue_112(self): - """Issue #112 - No timezone info on EXDATE - https://github.com/collective/icalendar/issues/112 - """ - directory = os.path.dirname(__file__) - path = os.path.join(directory, - 'issue_112_missing_tzinfo_on_exdate.ics') - with open(path, 'rb') as ics: - cal = icalendar.Calendar.from_ical(ics.read()) - event = cal.walk('VEVENT')[0] - - event_ical = to_unicode(event.to_ical()) # Py3 str type doesn't - # support buffer API - # General timezone aware dates in ical string - self.assertTrue('DTSTART;TZID=America/New_York:20130907T120000' - in event_ical) - self.assertTrue('DTEND;TZID=America/New_York:20130907T170000' - in event_ical) - # Specific timezone aware exdates in ical string - self.assertTrue('EXDATE;TZID=America/New_York:20131012T120000' - in event_ical) - self.assertTrue('EXDATE;TZID=America/New_York:20131011T120000' - in event_ical) - - self.assertEqual(event['exdate'][0].dts[0].dt.tzname(), 'EDT') - - def test_issue_114(self): - """Issue #114/#115 - invalid line in event breaks the parser - https://github.com/collective/icalendar/issues/114 - """ - - directory = os.path.dirname(__file__) - ics = open(os.path.join(directory, 'issue_114_invalid_line.ics'), 'rb') - with self.assertRaises(ValueError): - cal = icalendar.Calendar.from_ical(ics.read()) - cal # pep 8 - ics.close() - - def test_issue_116(self): - """Issue #116/#117 - How to add 'X-APPLE-STRUCTURED-LOCATION' - """ - event = icalendar.Event() - event.add( - "X-APPLE-STRUCTURED-LOCATION", - "geo:-33.868900,151.207000", - parameters={ - "VALUE": "URI", - "X-ADDRESS": "367 George Street Sydney CBD NSW 2000", - "X-APPLE-RADIUS": "72", - "X-TITLE": "367 George Street" - } - ) - self.assertEqual( - event.to_ical(), - b'BEGIN:VEVENT\r\nX-APPLE-STRUCTURED-LOCATION;VALUE=URI;' - b'X-ADDRESS="367 George Street Sydney \r\n CBD NSW 2000";' - b'X-APPLE-RADIUS=72;X-TITLE="367 George Street":' - b'geo:-33.868900\r\n \\,151.207000\r\nEND:VEVENT\r\n' - ) - - # roundtrip - self.assertEqual( - event.to_ical(), - icalendar.Event.from_ical(event.to_ical()).to_ical() - ) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_icalendar.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_icalendar.py deleted file mode 100644 index 22ebd80..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_icalendar.py +++ /dev/null @@ -1,253 +0,0 @@ -# coding: utf-8 -from icalendar.tests import unittest - - -class IcalendarTestCase (unittest.TestCase): - - def test_long_lines(self): - from ..parser import Contentlines, Contentline - c = Contentlines([Contentline('BEGIN:VEVENT')]) - c.append(Contentline(''.join('123456789 ' * 10))) - self.assertEqual( - c.to_ical(), - b'BEGIN:VEVENT\r\n123456789 123456789 123456789 123456789 ' - b'123456789 123456789 123456789 1234\r\n 56789 123456789 ' - b'123456789 \r\n' - ) - - # from doctests - # Notice that there is an extra empty string in the end of the content - # lines. That is so they can be easily joined with: - # '\r\n'.join(contentlines)) - self.assertEqual(Contentlines.from_ical('A short line\r\n'), - ['A short line', '']) - self.assertEqual(Contentlines.from_ical('A faked\r\n long line\r\n'), - ['A faked long line', '']) - self.assertEqual( - Contentlines.from_ical('A faked\r\n long line\r\nAnd another ' - 'lin\r\n\te that is folded\r\n'), - ['A faked long line', 'And another line that is folded', ''] - ) - - def test_contentline_class(self): - from ..parser import Contentline, Parameters - from ..prop import vText - - self.assertEqual( - Contentline('Si meliora dies, ut vina, poemata reddit').to_ical(), - b'Si meliora dies, ut vina, poemata reddit' - ) - - # A long line gets folded - c = Contentline(''.join(['123456789 '] * 10)).to_ical() - self.assertEqual( - c, - (b'123456789 123456789 123456789 123456789 123456789 123456789 ' - b'123456789 1234\r\n 56789 123456789 123456789 ') - ) - - # A folded line gets unfolded - self.assertEqual( - Contentline.from_ical(c), - ('123456789 123456789 123456789 123456789 123456789 123456789 ' - '123456789 123456789 123456789 123456789 ') - ) - - # http://tools.ietf.org/html/rfc5545#section-3.3.11 - # An intentional formatted text line break MUST only be included in - # a "TEXT" property value by representing the line break with the - # character sequence of BACKSLASH, followed by a LATIN SMALL LETTER - # N or a LATIN CAPITAL LETTER N, that is "\n" or "\N". - - # Newlines are not allwoed in content lines - self.assertRaises(AssertionError, Contentline, b'1234\r\n\r\n1234') - - self.assertEqual( - Contentline('1234\\n\\n1234').to_ical(), - b'1234\\n\\n1234' - ) - - # We do not fold within a UTF-8 character - c = Contentline(b'This line has a UTF-8 character where it should be ' - b'folded. Make sure it g\xc3\xabts folded before that ' - b'character.') - - self.assertIn(b'\xc3\xab', c.to_ical()) - - # Another test of the above - c = Contentline(b'x' * 73 + b'\xc3\xab' + b'\\n ' + b'y' * 10) - - self.assertEqual(c.to_ical().count(b'\xc3'), 1) - - # Don't fail if we fold a line that is exactly X times 74 characters - # long - c = Contentline(''.join(['x'] * 148)).to_ical() - - # It can parse itself into parts, - # which is a tuple of (name, params, vals) - self.assertEqual( - Contentline('dtstart:20050101T120000').parts(), - ('dtstart', Parameters({}), '20050101T120000') - ) - - self.assertEqual( - Contentline('dtstart;value=datetime:20050101T120000').parts(), - ('dtstart', Parameters({'VALUE': 'datetime'}), '20050101T120000') - ) - - c = Contentline('ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:' - 'MAILTO:maxm@example.com') - self.assertEqual( - c.parts(), - ('ATTENDEE', - Parameters({'ROLE': 'REQ-PARTICIPANT', 'CN': 'Max Rasmussen'}), - 'MAILTO:maxm@example.com') - ) - self.assertEqual( - c.to_ical().decode('utf-8'), - 'ATTENDEE;CN=Max Rasmussen;ROLE=REQ-PARTICIPANT:' - 'MAILTO:maxm@example.com' - ) - - # and back again - # NOTE: we are quoting property values with spaces in it. - parts = ('ATTENDEE', - Parameters({'ROLE': 'REQ-PARTICIPANT', - 'CN': 'Max Rasmussen'}), - 'MAILTO:maxm@example.com') - self.assertEqual( - Contentline.from_parts(*parts), - 'ATTENDEE;CN="Max Rasmussen";ROLE=REQ-PARTICIPANT:' - 'MAILTO:maxm@example.com' - ) - - # and again - parts = ('ATTENDEE', Parameters(), 'MAILTO:maxm@example.com') - self.assertEqual( - Contentline.from_parts(*parts), - 'ATTENDEE:MAILTO:maxm@example.com' - ) - - # A value can also be any of the types defined in PropertyValues - parts = ('ATTENDEE', Parameters(), vText('MAILTO:test@example.com')) - self.assertEqual( - Contentline.from_parts(*parts), - 'ATTENDEE:MAILTO:test@example.com' - ) - - # A value in UTF-8 - parts = ('SUMMARY', Parameters(), vText('INternational char æ ø å')) - self.assertEqual( - Contentline.from_parts(*parts), - u'SUMMARY:INternational char æ ø å' - ) - - # A value can also be unicode - parts = ('SUMMARY', Parameters(), vText(u'INternational char æ ø å')) - self.assertEqual( - Contentline.from_parts(*parts), - u'SUMMARY:INternational char æ ø å' - ) - - # Traversing could look like this. - name, params, vals = c.parts() - self.assertEqual(name, 'ATTENDEE') - self.assertEqual(vals, 'MAILTO:maxm@example.com') - self.assertEqual( - sorted(params.items()), - sorted([('ROLE', 'REQ-PARTICIPANT'), ('CN', 'Max Rasmussen')]) - ) - - # And the traditional failure - with self.assertRaisesRegexp( - ValueError, - 'Content line could not be parsed into parts' - ): - Contentline('ATTENDEE;maxm@example.com').parts() - - # Another failure: - with self.assertRaisesRegexp( - ValueError, - 'Content line could not be parsed into parts' - ): - Contentline(':maxm@example.com').parts() - - self.assertEqual( - Contentline('key;param=:value').parts(), - ('key', Parameters({'PARAM': ''}), 'value') - ) - - self.assertEqual( - Contentline('key;param="pvalue":value').parts(), - ('key', Parameters({'PARAM': 'pvalue'}), 'value') - ) - - # Should bomb on missing param: - with self.assertRaisesRegexp( - ValueError, - 'Content line could not be parsed into parts' - ): - Contentline.from_ical("k;:no param").parts() - - self.assertEqual( - Contentline('key;param=pvalue:value', strict=False).parts(), - ('key', Parameters({'PARAM': 'pvalue'}), 'value') - ) - - # If strict is set to True, uppercase param values that are not - # double-quoted, this is because the spec says non-quoted params are - # case-insensitive. - self.assertEqual( - Contentline('key;param=pvalue:value', strict=True).parts(), - ('key', Parameters({'PARAM': 'PVALUE'}), 'value') - ) - - self.assertEqual( - Contentline('key;param="pValue":value', strict=True).parts(), - ('key', Parameters({'PARAM': 'pValue'}), 'value') - ) - - def test_fold_line(self): - from ..parser import foldline - - self.assertEqual(foldline(u'foo'), u'foo') - self.assertEqual( - foldline(u"Lorem ipsum dolor sit amet, consectetur adipiscing " - u"elit. Vestibulum convallis imperdiet dui posuere."), - (u'Lorem ipsum dolor sit amet, consectetur adipiscing elit. ' - u'Vestibulum conval\r\n lis imperdiet dui posuere.') - ) - - # I don't really get this test - # at least just but bytes in there - # porting it to "run" under python 2 & 3 makes it not much better - with self.assertRaises(AssertionError): - foldline(u'привет'.encode('utf-8'), limit=3) - - self.assertEqual(foldline(u'foobar', limit=4), u'foo\r\n bar') - self.assertEqual( - foldline(u'Lorem ipsum dolor sit amet, consectetur adipiscing elit' - u'. Vestibulum convallis imperdiet dui posuere.'), - (u'Lorem ipsum dolor sit amet, consectetur adipiscing elit.' - u' Vestibulum conval\r\n lis imperdiet dui posuere.') - ) - self.assertEqual( - foldline(u'DESCRIPTION:АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭЮЯ'), - u'DESCRIPTION:АБВГДЕЁЖЗИЙКЛМНОПРСТУФХЦЧШЩЬЫЪЭ\r\n ЮЯ' - ) - - def test_value_double_quoting(self): - from ..parser import dquote - self.assertEqual(dquote('Max'), 'Max') - self.assertEqual(dquote('Rasmussen, Max'), '"Rasmussen, Max"') - self.assertEqual(dquote('name:value'), '"name:value"') - - def test_q_split(self): - from ..parser import q_split - self.assertEqual(q_split('Max,Moller,"Rasmussen, Max"'), - ['Max', 'Moller', '"Rasmussen, Max"']) - - def test_q_join(self): - from ..parser import q_join - self.assertEqual(q_join(['Max', 'Moller', 'Rasmussen, Max']), - 'Max,Moller,"Rasmussen, Max"') diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_multiple.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_multiple.py deleted file mode 100644 index c0d9dfe..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_multiple.py +++ /dev/null @@ -1,28 +0,0 @@ -from icalendar import Calendar -from icalendar.prop import vText -from icalendar.tests import unittest - -import os - - -class TestMultiple(unittest.TestCase): - """A example with multiple VCALENDAR components""" - - def test_multiple(self): - - directory = os.path.dirname(__file__) - cals = Calendar.from_ical( - open(os.path.join(directory, 'multiple.ics'), 'rb').read(), - multiple=True - ) - - self.assertEqual(len(cals), 2) - self.assertSequenceEqual([comp.name for comp in cals[0].walk()], - ['VCALENDAR', 'VEVENT']) - self.assertSequenceEqual([comp.name for comp in cals[1].walk()], - ['VCALENDAR', 'VEVENT', 'VEVENT']) - - self.assertEqual( - cals[0]['prodid'], - vText('-//Mozilla.org/NONSGML Mozilla Calendar V1.0//EN') - ) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_property_params.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_property_params.py deleted file mode 100644 index 2dfd49b..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_property_params.py +++ /dev/null @@ -1,207 +0,0 @@ -# coding: utf-8 -from icalendar import Calendar -from icalendar import Event -from icalendar import Parameters -from icalendar import vCalAddress -from icalendar.tests import unittest - -import icalendar - - -class TestPropertyParams(unittest.TestCase): - - def test_property_params(self): - # Property parameters with values containing a COLON character, a - # SEMICOLON character or a COMMA character MUST be placed in quoted - # text. - cal_address = vCalAddress('mailto:john.doe@example.org') - cal_address.params["CN"] = "Doe, John" - ical = Calendar() - ical.add('organizer', cal_address) - - ical_str = Calendar.to_ical(ical) - exp_str = b"""BEGIN:VCALENDAR\r\nORGANIZER;CN="Doe, John":"""\ - b"""mailto:john.doe@example.org\r\nEND:VCALENDAR\r\n""" - - self.assertEqual(ical_str, exp_str) - - # other way around: ensure the property parameters can be restored from - # an icalendar string. - ical2 = Calendar.from_ical(ical_str) - self.assertEqual(ical2.get('ORGANIZER').params.get('CN'), 'Doe, John') - - def test_unicode_param(self): - cal_address = vCalAddress('mailto:john.doe@example.org') - cal_address.params["CN"] = "Джон Доу" - vevent = Event() - vevent['ORGANIZER'] = cal_address - self.assertEqual( - vevent.to_ical().decode('utf-8'), - u'BEGIN:VEVENT\r\n' - u'ORGANIZER;CN="Джон Доу":mailto:john.doe@example.org\r\n' - u'END:VEVENT\r\n' - ) - - self.assertEqual(vevent['ORGANIZER'].params['CN'], - 'Джон Доу') - - def test_quoting(self): - # not double-quoted - self._test_quoting(u"Aramis", u'Aramis') - # if a space is present - enclose in double quotes - self._test_quoting(u"Aramis Alameda", u'"Aramis Alameda"') - # a single quote in parameter value - double quote the value - self._test_quoting(u"Aramis d'Alameda", u'"Aramis d\'Alameda"') - # double quote is replaced with single quote - self._test_quoting(u"Aramis d\"Alameda", u'"Aramis d\'Alameda"') - self._test_quoting(u"Арамис д'Аламеда", u'"Арамис д\'Аламеда"') - - def _test_quoting(self, cn_param, cn_quoted): - """ - @param cn_param: CN parameter value to test for quoting - @param cn_quoted: expected quoted parameter in icalendar format - """ - vevent = Event() - attendee = vCalAddress('test@mail.com') - attendee.params['CN'] = cn_param - vevent.add('ATTENDEE', attendee) - self.assertEqual( - vevent.to_ical(), - b'BEGIN:VEVENT\r\nATTENDEE;CN=' + cn_quoted.encode('utf-8') + - b':test@mail.com\r\nEND:VEVENT\r\n' - ) - - def test_escaping(self): - # verify that escaped non safe chars are decoded correctly - NON_SAFE_CHARS = u',\\;:' - for char in NON_SAFE_CHARS: - cn_escaped = u"Society\\%s 2014" % char - cn_decoded = u"Society%s 2014" % char - vevent = Event.from_ical( - u'BEGIN:VEVENT\r\n' - u'ORGANIZER;CN=%s:that\r\n' - u'END:VEVENT\r\n' % cn_escaped - ) - self.assertEqual(vevent['ORGANIZER'].params['CN'], cn_decoded) - - vevent = Event.from_ical( - 'BEGIN:VEVENT\r\n' - 'ORGANIZER;CN=that\\, that\\; %th%%at%\\\\ that\\:' - ':это\\, то\\; that\\\\ %th%%at%\\:\r\n' - 'END:VEVENT\r\n' - ) - self.assertEqual( - vevent['ORGANIZER'].params['CN'], - r'that, that; %th%%at%\ that:' - ) - self.assertEqual( - vevent['ORGANIZER'].to_ical().decode('utf-8'), - u'это, то; that\\ %th%%at%:' - ) - - def test_parameters_class(self): - - # Simple parameter:value pair - p = Parameters(parameter1='Value1') - self.assertEqual(p.to_ical(), b'PARAMETER1=Value1') - - # keys are converted to upper - self.assertEqual(list(p.keys()), ['PARAMETER1']) - - # Parameters are case insensitive - self.assertEqual(p['parameter1'], 'Value1') - self.assertEqual(p['PARAMETER1'], 'Value1') - - # Parameter with list of values must be seperated by comma - p = Parameters({'parameter1': ['Value1', 'Value2']}) - self.assertEqual(p.to_ical(), b'PARAMETER1=Value1,Value2') - - # Multiple parameters must be seperated by a semicolon - p = Parameters({'RSVP': 'TRUE', 'ROLE': 'REQ-PARTICIPANT'}) - self.assertEqual(p.to_ical(), b'ROLE=REQ-PARTICIPANT;RSVP=TRUE') - - # Parameter values containing ',;:' must be double quoted - p = Parameters({'ALTREP': 'http://www.wiz.org'}) - self.assertEqual(p.to_ical(), b'ALTREP="http://www.wiz.org"') - - # list items must be quoted seperately - p = Parameters({'MEMBER': ['MAILTO:projectA@host.com', - 'MAILTO:projectB@host.com']}) - self.assertEqual( - p.to_ical(), - b'MEMBER="MAILTO:projectA@host.com","MAILTO:projectB@host.com"' - ) - - # Now the whole sheebang - p = Parameters({'parameter1': 'Value1', - 'parameter2': ['Value2', 'Value3'], - 'ALTREP': ['http://www.wiz.org', 'value4']}) - self.assertEqual( - p.to_ical(), - (b'ALTREP="http://www.wiz.org",value4;PARAMETER1=Value1;' - b'PARAMETER2=Value2,Value3') - ) - - # We can also parse parameter strings - self.assertEqual( - Parameters.from_ical('PARAMETER1=Value 1;param2=Value 2'), - Parameters({'PARAMETER1': 'Value 1', 'PARAM2': 'Value 2'}) - ) - - # Including empty strings - self.assertEqual(Parameters.from_ical('param='), - Parameters({'PARAM': ''})) - - # We can also parse parameter strings - self.assertEqual( - Parameters.from_ical( - 'MEMBER="MAILTO:projectA@host.com","MAILTO:projectB@host.com"' - ), - Parameters({'MEMBER': ['MAILTO:projectA@host.com', - 'MAILTO:projectB@host.com']}) - ) - - # We can also parse parameter strings - self.assertEqual( - Parameters.from_ical('ALTREP="http://www.wiz.org",value4;' - 'PARAMETER1=Value1;PARAMETER2=Value2,Value3'), - Parameters({'PARAMETER1': 'Value1', - 'ALTREP': ['http://www.wiz.org', 'value4'], - 'PARAMETER2': ['Value2', 'Value3']}) - ) - - def test_parse_and_access_property_params(self): - """Parse an ics string and access some property parameters then. - This is a follow-up of a question recieved per email. - - """ - ics = """BEGIN:VCALENDAR -VERSION:2.0 -PRODID://RESEARCH IN MOTION//BIS 3.0 -METHOD:REQUEST -BEGIN:VEVENT -SEQUENCE:2 -X-RIM-REVISION:0 -SUMMARY:Test meeting from BB -X-MICROSOFT-CDO-ALLDAYEVENT:TRUE -CLASS:PUBLIC -ATTENDEE;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN="RembrandXS":MAILTO:rembrand@xs4all.nl -ATTENDEE;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN="RembrandDX":MAILTO:rembrand@daxlab.com -ATTENDEE;PARTSTAT=NEEDS-ACTION;RSVP=TRUE;CN="RembrandSB":MAILTO:rembspam@xs4all.nl -UID:XRIMCAL-628059586-522954492-9750559 -DTSTART;VALUE=DATE:20120814 -DTEND;VALUE=DATE:20120815 -DESCRIPTION:Test meeting from BB -DTSTAMP:20120813T151458Z -ORGANIZER:mailto:rembrand@daxlab.com -END:VEVENT -END:VCALENDAR""" - - cal = icalendar.Calendar.from_ical(ics) - event = cal.walk("VEVENT")[0] - event['attendee'][0] - self.assertEqual(event['attendee'][0].to_ical(), - b'MAILTO:rembrand@xs4all.nl') - self.assertEqual(event['attendee'][0].params.to_ical(), - b'CN=RembrandXS;PARTSTAT=NEEDS-ACTION;RSVP=TRUE') - self.assertEqual(event['attendee'][0].params['cn'], u'RembrandXS') diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_recurrence.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_recurrence.py deleted file mode 100644 index d7ba4e3..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_recurrence.py +++ /dev/null @@ -1,62 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.caselessdict import CaselessDict -from icalendar.tests import unittest - -import datetime -import icalendar -import os -import pytz - - -class TestRecurrence(unittest.TestCase): - - def setUp(self): - directory = os.path.dirname(__file__) - self.cal = icalendar.Calendar.from_ical( - open(os.path.join(directory, 'recurrence.ics'), 'rb').read() - ) - - def test_recurrence_exdates_one_line(self): - first_event = self.cal.walk('vevent')[0] - - self.assertIsInstance(first_event, CaselessDict) - self.assertEqual( - first_event['rrule'], {'COUNT': [100], 'FREQ': ['DAILY']} - ) - - self.assertEqual( - first_event['exdate'].to_ical(), - b'19960402T010000Z,19960403T010000Z,19960404T010000Z' - ) - - self.assertEqual( - first_event['exdate'].dts[0].dt, - datetime.datetime(1996, 4, 2, 1, 0, tzinfo=pytz.utc) - ) - - self.assertEqual( - first_event['exdate'].dts[1].dt, - datetime.datetime(1996, 4, 3, 1, 0, tzinfo=pytz.utc) - ) - - self.assertEqual( - first_event['exdate'].dts[2].dt, - datetime.datetime(1996, 4, 4, 1, 0, tzinfo=pytz.utc) - ) - - def test_recurrence_exdates_multiple_lines(self): - event = self.cal.walk('vevent')[1] - - exdate = event['exdate'] - - # TODO: DOCUMENT BETTER! - # In this case we have multiple EXDATE definitions, one per line. - # Icalendar makes a list out of this instead of zipping it into one - # vDDDLists object. Actually, this feels correct for me, as it also - # allows to define different timezones per exdate line - but client - # code has to handle this as list and not blindly expecting to be able - # to call event['EXDATE'].to_ical() on it: - self.assertEqual(isinstance(exdate, list), True) # multiple EXDATE - self.assertEqual(exdate[0].to_ical(), b'20120529T100000') - - # TODO: test for embedded timezone information! diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_time.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_time.py deleted file mode 100644 index 51312e5..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_time.py +++ /dev/null @@ -1,29 +0,0 @@ -from icalendar.tests import unittest - -import datetime -import icalendar -import os - - -class TestTime(unittest.TestCase): - - def setUp(self): - icalendar.cal.types_factory.types_map['X-SOMETIME'] = 'time' - - def tearDown(self): - icalendar.cal.types_factory.types_map.pop('X-SOMETIME') - - def test_create_from_ical(self): - directory = os.path.dirname(__file__) - ics = open(os.path.join(directory, 'time.ics'), 'rb') - cal = icalendar.Calendar.from_ical(ics.read()) - ics.close() - - self.assertEqual(cal['X-SOMETIME'].dt, datetime.time(17, 20, 10)) - self.assertEqual(cal['X-SOMETIME'].to_ical(), '172010') - - def test_create_to_ical(self): - cal = icalendar.Calendar() - cal.add('X-SOMETIME', datetime.time(17, 20, 10)) - self.assertTrue(b'X-SOMETIME;VALUE=TIME:172010' in - cal.to_ical().splitlines()) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_timezoned.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_timezoned.py deleted file mode 100644 index 8c42cc7..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_timezoned.py +++ /dev/null @@ -1,141 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.tests import unittest - -import datetime -import dateutil.parser -import icalendar -import os -import pytz - - -class TestTimezoned(unittest.TestCase): - - def test_create_from_ical(self): - directory = os.path.dirname(__file__) - cal = icalendar.Calendar.from_ical( - open(os.path.join(directory, 'timezoned.ics'), 'rb').read() - ) - - self.assertEqual( - cal['prodid'].to_ical(), - b"-//Plone.org//NONSGML plone.app.event//EN" - ) - - timezones = cal.walk('VTIMEZONE') - self.assertEqual(len(timezones), 1) - - tz = timezones[0] - self.assertEqual(tz['tzid'].to_ical(), b"Europe/Vienna") - - std = tz.walk('STANDARD')[0] - self.assertEqual( - std.decoded('TZOFFSETFROM'), - datetime.timedelta(0, 7200) - ) - - ev1 = cal.walk('VEVENT')[0] - self.assertEqual( - ev1.decoded('DTSTART'), - datetime.datetime(2012, 2, 13, 10, 0, 0, - tzinfo=pytz.timezone('Europe/Vienna'))) - self.assertEqual( - ev1.decoded('DTSTAMP'), - datetime.datetime(2010, 10, 10, 9, 10, 10, tzinfo=pytz.utc)) - - def test_create_to_ical(self): - cal = icalendar.Calendar() - - cal.add('prodid', u"-//Plone.org//NONSGML plone.app.event//EN") - cal.add('version', u"2.0") - cal.add('x-wr-calname', u"test create calendar") - cal.add('x-wr-caldesc', u"icalendar tests") - cal.add('x-wr-relcalid', u"12345") - cal.add('x-wr-timezone', u"Europe/Vienna") - - tzc = icalendar.Timezone() - tzc.add('tzid', 'Europe/Vienna') - tzc.add('x-lic-location', 'Europe/Vienna') - - tzs = icalendar.TimezoneStandard() - tzs.add('tzname', 'CET') - tzs.add('dtstart', datetime.datetime(1970, 10, 25, 3, 0, 0)) - tzs.add('rrule', {'freq': 'yearly', 'bymonth': 10, 'byday': '-1su'}) - tzs.add('TZOFFSETFROM', datetime.timedelta(hours=2)) - tzs.add('TZOFFSETTO', datetime.timedelta(hours=1)) - - tzd = icalendar.TimezoneDaylight() - tzd.add('tzname', 'CEST') - tzd.add('dtstart', datetime.datetime(1970, 3, 29, 2, 0, 0)) - tzs.add('rrule', {'freq': 'yearly', 'bymonth': 3, 'byday': '-1su'}) - tzd.add('TZOFFSETFROM', datetime.timedelta(hours=1)) - tzd.add('TZOFFSETTO', datetime.timedelta(hours=2)) - - tzc.add_component(tzs) - tzc.add_component(tzd) - cal.add_component(tzc) - - event = icalendar.Event() - tz = pytz.timezone("Europe/Vienna") - event.add( - 'dtstart', - datetime.datetime(2012, 2, 13, 10, 00, 00, tzinfo=tz)) - event.add( - 'dtend', - datetime.datetime(2012, 2, 17, 18, 00, 00, tzinfo=tz)) - event.add( - 'dtstamp', - datetime.datetime(2010, 10, 10, 10, 10, 10, tzinfo=tz)) - event.add( - 'created', - datetime.datetime(2010, 10, 10, 10, 10, 10, tzinfo=tz)) - event.add('uid', u'123456') - event.add( - 'last-modified', - datetime.datetime(2010, 10, 10, 10, 10, 10, tzinfo=tz)) - event.add('summary', u'artsprint 2012') - # event.add('rrule', u'FREQ=YEARLY;INTERVAL=1;COUNT=10') - event.add('description', u'sprinting at the artsprint') - event.add('location', u'aka bild, wien') - event.add('categories', u'first subject') - event.add('categories', u'second subject') - event.add('attendee', u'häns') - event.add('attendee', u'franz') - event.add('attendee', u'sepp') - event.add('contact', u'Max Mustermann, 1010 Wien') - event.add('url', u'http://plone.org') - cal.add_component(event) - - test_out = b'|'.join(cal.to_ical().splitlines()) - test_out = test_out.decode('utf-8') - - vtimezone_lines = "BEGIN:VTIMEZONE|TZID:Europe/Vienna|X-LIC-LOCATION:" - "Europe/Vienna|BEGIN:STANDARD|DTSTART;VALUE=DATE-TIME:19701025T03" - "0000|RRULE:FREQ=YEARLY;BYDAY=-1SU;BYMONTH=10|RRULE:FREQ=YEARLY;B" - "YDAY=-1SU;BYMONTH=3|TZNAME:CET|TZOFFSETFROM:+0200|TZOFFSETTO:+01" - "00|END:STANDARD|BEGIN:DAYLIGHT|DTSTART;VALUE=DATE-TIME:19700329T" - "020000|TZNAME:CEST|TZOFFSETFROM:+0100|TZOFFSETTO:+0200|END:DAYLI" - "GHT|END:VTIMEZONE" - self.assertTrue(vtimezone_lines in test_out) - - test_str = "DTSTART;TZID=Europe/Vienna;VALUE=DATE-TIME:20120213T100000" - self.assertTrue(test_str in test_out) - self.assertTrue("ATTENDEE:sepp" in test_out) - - # ical standard expects DTSTAMP and CREATED in UTC - self.assertTrue("DTSTAMP;VALUE=DATE-TIME:20101010T091010Z" in test_out) - self.assertTrue("CREATED;VALUE=DATE-TIME:20101010T091010Z" in test_out) - - def test_tzinfo_dateutil(self): - # Test for issues #77, #63 - # references: #73,7430b66862346fe3a6a100ab25e35a8711446717 - - date = dateutil.parser.parse('2012-08-30T22:41:00Z') - date2 = dateutil.parser.parse('2012-08-30T22:41:00 +02:00') - self.assertTrue(date.tzinfo.__module__ == 'dateutil.tz') - self.assertTrue(date2.tzinfo.__module__ == 'dateutil.tz') - - # make sure, it's parsed properly and doesn't throw an error - self.assertTrue(icalendar.vDDDTypes(date).to_ical() - == b'20120830T224100Z') - self.assertTrue(icalendar.vDDDTypes(date2).to_ical() - == b'20120830T224100') diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_cal.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_cal.py deleted file mode 100644 index 4419f21..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_cal.py +++ /dev/null @@ -1,349 +0,0 @@ -from datetime import datetime -from datetime import timedelta -from icalendar.tests import unittest - -import icalendar -import pytz - - -class TestCalComponent(unittest.TestCase): - - def test_cal_Component(self): - from icalendar.cal import Component, Calendar, Event - from icalendar import prop - - # A component is like a dictionary with extra methods and attributes. - c = Component() - c.name = 'VCALENDAR' - - # Every key defines a property.A property can consist of either a - # single item. This can be set with a single value... - c['prodid'] = '-//max m//icalendar.mxm.dk/' - self.assertEqual( - c, - Calendar({'PRODID': '-//max m//icalendar.mxm.dk/'}) - ) - - # or with a list - c['ATTENDEE'] = ['Max M', 'Rasmussen'] - self.assertEqual( - c, - Calendar({'ATTENDEE': ['Max M', 'Rasmussen'], - 'PRODID': '-//max m//icalendar.mxm.dk/'}) - ) - - ### ADD MULTIPLE VALUES TO A PROPERTY - - # if you use the add method you don't have to considder if a value is - # a list or not. - c = Component() - c.name = 'VEVENT' - - # add multiple values at once - c.add('attendee', - ['test@test.com', 'test2@test.com']) - - # or add one per line - c.add('attendee', 'maxm@mxm.dk') - c.add('attendee', 'test@example.dk') - - # add again multiple values at once to very concatenaton of lists - c.add('attendee', - ['test3@test.com', 'test4@test.com']) - - self.assertEqual( - c, - Event({'ATTENDEE': [ - prop.vCalAddress('test@test.com'), - prop.vCalAddress('test2@test.com'), - prop.vCalAddress('maxm@mxm.dk'), - prop.vCalAddress('test@example.dk'), - prop.vCalAddress('test3@test.com'), - prop.vCalAddress('test4@test.com') - ]}) - ) - - ### - - # You can get the values back directly ... - c.add('prodid', '-//my product//') - self.assertEqual(c['prodid'], prop.vText(u'-//my product//')) - - # ... or decoded to a python type - self.assertEqual(c.decoded('prodid'), b'-//my product//') - - # With default values for non existing properties - self.assertEqual(c.decoded('version', 'No Version'), 'No Version') - - c.add('rdate', [datetime(2013, 3, 28), datetime(2013, 3, 27)]) - self.assertTrue(isinstance(c.decoded('rdate'), prop.vDDDLists)) - - # The component can render itself in the RFC 2445 format. - c = Component() - c.name = 'VCALENDAR' - c.add('attendee', 'Max M') - self.assertEqual( - c.to_ical(), - b'BEGIN:VCALENDAR\r\nATTENDEE:Max M\r\nEND:VCALENDAR\r\n' - ) - - # Components can be nested, so You can add a subcompont. Eg a calendar - # holds events. - e = Component(summary='A brief history of time') - e.name = 'VEVENT' - e.add('dtend', '20000102T000000', encode=0) - e.add('dtstart', '20000101T000000', encode=0) - self.assertEqual( - e.to_ical(), - b'BEGIN:VEVENT\r\nDTEND:20000102T000000\r\n' - + b'DTSTART:20000101T000000\r\nSUMMARY:A brief history of time\r' - + b'\nEND:VEVENT\r\n' - ) - - c.add_component(e) - self.assertEqual( - c.subcomponents, - [Event({'DTEND': '20000102T000000', 'DTSTART': '20000101T000000', - 'SUMMARY': 'A brief history of time'})] - ) - - # We can walk over nested componentes with the walk method. - self.assertEqual([i.name for i in c.walk()], ['VCALENDAR', 'VEVENT']) - - # We can also just walk over specific component types, by filtering - # them on their name. - self.assertEqual([i.name for i in c.walk('VEVENT')], ['VEVENT']) - - self.assertEqual( - [i['dtstart'] for i in c.walk('VEVENT')], - ['20000101T000000'] - ) - - # We can enumerate property items recursively with the property_items - # method. - self.assertEqual( - c.property_items(), - [('BEGIN', b'VCALENDAR'), ('ATTENDEE', prop.vCalAddress('Max M')), - ('BEGIN', b'VEVENT'), ('DTEND', '20000102T000000'), - ('DTSTART', '20000101T000000'), - ('SUMMARY', 'A brief history of time'), ('END', b'VEVENT'), - ('END', b'VCALENDAR')] - ) - - # We can also enumerate property items just under the component. - self.assertEqual( - c.property_items(recursive=False), - [('BEGIN', b'VCALENDAR'), - ('ATTENDEE', prop.vCalAddress('Max M')), - ('END', b'VCALENDAR')] - ) - - sc = c.subcomponents[0] - self.assertEqual( - sc.property_items(recursive=False), - [('BEGIN', b'VEVENT'), ('DTEND', '20000102T000000'), - ('DTSTART', '20000101T000000'), - ('SUMMARY', 'A brief history of time'), ('END', b'VEVENT')] - ) - - # Text fields which span multiple mulitple lines require proper - # indenting - c = Calendar() - c['description'] = u'Paragraph one\n\nParagraph two' - self.assertEqual( - c.to_ical(), - b'BEGIN:VCALENDAR\r\nDESCRIPTION:Paragraph one\\n\\nParagraph two' - + b'\r\nEND:VCALENDAR\r\n' - ) - - # INLINE properties have their values on one property line. Note the - # double quoting of the value with a colon in it. - c = Calendar() - c['resources'] = 'Chair, Table, "Room: 42"' - self.assertEqual( - c, - Calendar({'RESOURCES': 'Chair, Table, "Room: 42"'}) - ) - - self.assertEqual( - c.to_ical(), - b'BEGIN:VCALENDAR\r\nRESOURCES:Chair\\, Table\\, "Room: 42"\r\n' - + b'END:VCALENDAR\r\n' - ) - - # The inline values must be handled by the get_inline() and - # set_inline() methods. - self.assertEqual( - c.get_inline('resources', decode=0), - [u'Chair', u'Table', u'Room: 42'] - ) - - # These can also be decoded - self.assertEqual( - c.get_inline('resources', decode=1), - [b'Chair', b'Table', b'Room: 42'] - ) - - # You can set them directly ... - c.set_inline('resources', ['A', 'List', 'of', 'some, recources'], - encode=1) - self.assertEqual(c['resources'], 'A,List,of,"some, recources"') - - # ... and back again - self.assertEqual( - c.get_inline('resources', decode=0), - ['A', 'List', 'of', 'some, recources'] - ) - - c['freebusy'] = '19970308T160000Z/PT3H,19970308T200000Z/PT1H,'\ - + '19970308T230000Z/19970309T000000Z' - self.assertEqual( - c.get_inline('freebusy', decode=0), - ['19970308T160000Z/PT3H', '19970308T200000Z/PT1H', - '19970308T230000Z/19970309T000000Z'] - ) - - freebusy = c.get_inline('freebusy', decode=1) - self.assertTrue(isinstance(freebusy[0][0], datetime)) - self.assertTrue(isinstance(freebusy[0][1], timedelta)) - - def test_cal_Component_add(self): - # Test the for timezone correctness: dtstart should preserve it's - # timezone, crated, dtstamp and last-modified must be in UTC. - Component = icalendar.cal.Component - comp = Component() - comp.add('dtstart', datetime(2010, 10, 10, 10, 0, 0, - tzinfo=pytz.timezone("Europe/Vienna"))) - comp.add('created', datetime(2010, 10, 10, 12, 0, 0)) - comp.add('dtstamp', datetime(2010, 10, 10, 14, 0, 0, - tzinfo=pytz.timezone("Europe/Vienna"))) - comp.add('last-modified', datetime(2010, 10, 10, 16, 0, 0, - tzinfo=pytz.utc)) - - lines = comp.to_ical().splitlines() - self.assertTrue( - b"DTSTART;TZID=Europe/Vienna;VALUE=DATE-TIME:20101010T100000" - in lines) - self.assertTrue(b"CREATED;VALUE=DATE-TIME:20101010T120000Z" in lines) - self.assertTrue(b"DTSTAMP;VALUE=DATE-TIME:20101010T130000Z" in lines) - self.assertTrue( - b"LAST-MODIFIED;VALUE=DATE-TIME:20101010T160000Z" in lines - ) - - def test_cal_Component_add_no_reencode(self): - """Already encoded values should not be re-encoded. - """ - from icalendar import cal, prop - comp = cal.Component() - comp.add('ATTACH', 'me') - - comp.add('ATTACH', 'you', encode=False) - binary = prop.vBinary('us') - comp.add('ATTACH', binary) - - self.assertEqual(comp['ATTACH'], [u'me', 'you', binary]) - - def test_cal_Component_add_property_parameter(self): - # Test the for timezone correctness: dtstart should preserve it's - # timezone, crated, dtstamp and last-modified must be in UTC. - Component = icalendar.cal.Component - comp = Component() - comp.add('X-TEST-PROP', 'tryout.', - parameters={'prop1': 'val1', 'prop2': 'val2'}) - lines = comp.to_ical().splitlines() - self.assertTrue(b"X-TEST-PROP;PROP1=val1;PROP2=val2:tryout." in lines) - - def test_cal_Component_from_ical(self): - # Check for proper handling of TZID parameter of datetime properties - Component = icalendar.cal.Component - for component_name, property_name in ( - ('VEVENT', 'DTSTART'), - ('VEVENT', 'DTEND'), - ('VEVENT', 'RECURRENCE-ID'), - ('VTODO', 'DUE') - ): - component_str = 'BEGIN:' + component_name + '\n' - component_str += property_name + ';TZID=America/Denver:' - component_str += '20120404T073000\nEND:' + component_name - component = Component.from_ical(component_str) - self.assertEqual(str(component[property_name].dt.tzinfo.zone), - "America/Denver") - - component_str = 'BEGIN:' + component_name + '\n' - component_str += property_name + ':' - component_str += '20120404T073000\nEND:' + component_name - component = Component.from_ical(component_str) - self.assertEqual(component[property_name].dt.tzinfo, - None) - - -class TestCal(unittest.TestCase): - - def test_cal_ComponentFactory(self): - ComponentFactory = icalendar.cal.ComponentFactory - factory = ComponentFactory() - component = factory['VEVENT'] - event = component(dtstart='19700101') - self.assertEqual( - event.to_ical(), - b'BEGIN:VEVENT\r\nDTSTART:19700101\r\nEND:VEVENT\r\n' - ) - - self.assertEqual( - factory.get('VCALENDAR', icalendar.cal.Component), - icalendar.cal.Calendar) - - def test_cal_Calendar(self): - # Setting up a minimal calendar component looks like this - cal = icalendar.cal.Calendar() - - # Some properties are required to be compliant - cal['prodid'] = '-//My calendar product//mxm.dk//' - cal['version'] = '2.0' - - # We also need at least one subcomponent for a calendar to be compliant - event = icalendar.cal.Event() - event['summary'] = 'Python meeting about calendaring' - event['uid'] = '42' - event.add('dtstart', datetime(2005, 4, 4, 8, 0, 0)) - cal.add_component(event) - self.assertEqual( - cal.subcomponents[0].to_ical(), - b'BEGIN:VEVENT\r\nSUMMARY:Python meeting about calendaring\r\n' - + b'DTSTART;VALUE=DATE-TIME:20050404T080000\r\nUID:42\r\n' - + b'END:VEVENT\r\n') - - # Write to disc - import tempfile - import os - directory = tempfile.mkdtemp() - open(os.path.join(directory, 'test.ics'), 'wb').write(cal.to_ical()) - - # Parsing a complete calendar from a string will silently ignore bogus - # events. The bogosity in the following is the third EXDATE: it has an - # empty DATE. - s = '\r\n'.join(('BEGIN:VCALENDAR', - 'PRODID:-//Google Inc//Google Calendar 70.9054//EN', - 'VERSION:2.0', - 'CALSCALE:GREGORIAN', - 'METHOD:PUBLISH', - 'BEGIN:VEVENT', - 'DESCRIPTION:Perfectly OK event', - 'DTSTART;VALUE=DATE:20080303', - 'DTEND;VALUE=DATE:20080304', - 'RRULE:FREQ=DAILY;UNTIL=20080323T235959Z', - 'EXDATE;VALUE=DATE:20080311', - 'END:VEVENT', - 'BEGIN:VEVENT', - 'DESCRIPTION:Bogus event', - 'DTSTART;VALUE=DATE:20080303', - 'DTEND;VALUE=DATE:20080304', - 'RRULE:FREQ=DAILY;UNTIL=20080323T235959Z', - 'EXDATE;VALUE=DATE:20080311', - 'EXDATE;VALUE=DATE:', - 'END:VEVENT', - 'END:VCALENDAR')) - self.assertEqual( - [e['DESCRIPTION'].to_ical() - for e in icalendar.cal.Calendar.from_ical(s).walk('VEVENT')], - [b'Perfectly OK event']) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_caselessdict.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_caselessdict.py deleted file mode 100644 index 929fa6c..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_caselessdict.py +++ /dev/null @@ -1,90 +0,0 @@ -from icalendar.tests import unittest - -import icalendar - - -class TestCaselessdict(unittest.TestCase): - - def test_caselessdict_canonsort_keys(self): - canonsort_keys = icalendar.caselessdict.canonsort_keys - - keys = ['DTEND', 'DTSTAMP', 'DTSTART', 'UID', 'SUMMARY', 'LOCATION'] - - out = canonsort_keys(keys) - self.assertEqual( - out, - ['DTEND', 'DTSTAMP', 'DTSTART', 'LOCATION', 'SUMMARY', 'UID'] - ) - - out = canonsort_keys(keys, ('SUMMARY', 'DTSTART', 'DTEND', )) - self.assertEqual( - out, - ['SUMMARY', 'DTSTART', 'DTEND', 'DTSTAMP', 'LOCATION', 'UID'] - ) - - out = canonsort_keys(keys, ('UID', 'DTSTART', 'DTEND', )) - self.assertEqual( - out, - ['UID', 'DTSTART', 'DTEND', 'DTSTAMP', 'LOCATION', 'SUMMARY'] - ) - - out = canonsort_keys( - keys, - ('UID', 'DTSTART', 'DTEND', 'RRULE', 'EXDATE') - ) - self.assertEqual( - out, - ['UID', 'DTSTART', 'DTEND', 'DTSTAMP', 'LOCATION', 'SUMMARY'] - ) - - def test_caselessdict_canonsort_items(self): - canonsort_items = icalendar.caselessdict.canonsort_items - - d = dict(i=7, c='at', a=3.5, l=(2, 3), e=[4, 5], n=13, d={'x': 'y'}, - r=1.0) - - out = canonsort_items(d) - self.assertEqual( - out, - [('a', 3.5), ('c', 'at'), ('d', {'x': 'y'}), ('e', [4, 5]), - ('i', 7), ('l', (2, 3)), ('n', 13), ('r', 1.0)] - ) - - out = canonsort_items(d, ('i', 'c', 'a')) - self.assertTrue( - out, - [('i', 7), ('c', 'at'), ('a', 3.5), ('d', {'x': 'y'}), - ('e', [4, 5]), ('l', (2, 3)), ('n', 13), ('r', 1.0)] - ) - - def test_CaselessDict(self): - CaselessDict = icalendar.caselessdict.CaselessDict - - ncd = CaselessDict(key1='val1', key2='val2') - self.assertEqual( - ncd, - CaselessDict({'KEY2': 'val2', 'KEY1': 'val1'}) - ) - - self.assertEqual(ncd['key1'], 'val1') - self.assertEqual(ncd['KEY1'], 'val1') - - ncd['KEY3'] = 'val3' - self.assertEqual(ncd['key3'], 'val3') - - self.assertEqual(ncd.setdefault('key3', 'FOUND'), 'val3') - self.assertEqual(ncd.setdefault('key4', 'NOT FOUND'), 'NOT FOUND') - self.assertEqual(ncd['key4'], 'NOT FOUND') - self.assertEqual(ncd.get('key1'), 'val1') - self.assertEqual(ncd.get('key3', 'NOT FOUND'), 'val3') - self.assertEqual(ncd.get('key4', 'NOT FOUND'), 'NOT FOUND') - self.assertTrue('key4' in ncd) - - del ncd['key4'] - self.assertFalse('key4' in ncd) - - ncd.update({'key5': 'val5', 'KEY6': 'val6', 'KEY5': 'val7'}) - self.assertEqual(ncd['key6'], 'val6') - - keys = sorted(ncd.keys()) - self.assertEqual(keys, ['KEY1', 'KEY2', 'KEY3', 'KEY5', 'KEY6']) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_parser_tools.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_parser_tools.py deleted file mode 100644 index ff9de9c..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_parser_tools.py +++ /dev/null @@ -1,29 +0,0 @@ -# -*- coding: utf-8 -*- -from icalendar.parser_tools import data_encode -from icalendar.parser_tools import to_unicode -from icalendar.tests import unittest - - -class TestParserTools(unittest.TestCase): - - def test_parser_tools_to_unicode(self): - - self.assertEqual(to_unicode('spam'), u'spam') - self.assertEqual(to_unicode(u'spam'), u'spam') - self.assertEqual(to_unicode(u'spam'.encode('utf-8')), u'spam') - self.assertEqual(to_unicode(b'\xc6\xb5'), u'\u01b5') - self.assertEqual(to_unicode(u'\xc6\xb5'.encode('iso-8859-1')), - u'\u01b5') - self.assertEqual(to_unicode(b'\xc6\xb5', encoding='ascii'), u'\u01b5') - self.assertEqual(to_unicode(1), 1) - self.assertEqual(to_unicode(None), None) - - def test_parser_tools_data_encode(self): - - data1 = { - u'k1': u'v1', 'k2': 'v2', u'k3': u'v3', - 'li1': ['it1', u'it2', {'k4': u'v4', u'k5': 'v5'}, 123] - } - res = {b'k3': b'v3', b'k2': b'v2', b'k1': b'v1', - b'li1': [b'it1', b'it2', {b'k5': b'v5', b'k4': b'v4'}, 123]} - self.assertEqual(data_encode(data1), res) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_prop.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_prop.py deleted file mode 100644 index 30dd84c..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_prop.py +++ /dev/null @@ -1,497 +0,0 @@ -# -*- coding: utf-8 -*- -from datetime import date -from datetime import datetime -from datetime import time -from datetime import timedelta -from icalendar.parser import Parameters -from icalendar.tests import unittest - -import pytz - - -class TestProp(unittest.TestCase): - - def test_prop_vBinary(self): - from ..prop import vBinary - - txt = b'This is gibberish' - txt_ical = b'VGhpcyBpcyBnaWJiZXJpc2g=' - self.assertEqual(vBinary(txt).to_ical(), txt_ical) - self.assertEqual(vBinary.from_ical(txt_ical), txt) - - # The roundtrip test - txt = b'Binary data \x13 \x56' - txt_ical = b'QmluYXJ5IGRhdGEgEyBW' - self.assertEqual(vBinary(txt).to_ical(), txt_ical) - self.assertEqual(vBinary.from_ical(txt_ical), txt) - - self.assertIsInstance(vBinary('txt').params, Parameters) - self.assertEqual( - vBinary('txt').params, {'VALUE': 'BINARY', 'ENCODING': 'BASE64'} - ) - - # Long data should not have line breaks, as that would interfere - txt = b'a' * 99 - txt_ical = b'YWFh' * 33 - self.assertEqual(vBinary(txt).to_ical(), txt_ical) - self.assertEqual(vBinary.from_ical(txt_ical), txt) - - def test_prop_vBoolean(self): - from ..prop import vBoolean - - self.assertEqual(vBoolean(True).to_ical(), b'TRUE') - self.assertEqual(vBoolean(0).to_ical(), b'FALSE') - - # The roundtrip test - self.assertEqual(vBoolean.from_ical(vBoolean(True).to_ical()), True) - self.assertEqual(vBoolean.from_ical('true'), True) - - def test_prop_vCalAddress(self): - from ..prop import vCalAddress - txt = b'MAILTO:maxm@mxm.dk' - a = vCalAddress(txt) - a.params['cn'] = 'Max M' - - self.assertEqual(a.to_ical(), txt) - self.assertIsInstance(a.params, Parameters) - self.assertEqual(a.params, {'CN': 'Max M'}) - self.assertEqual(vCalAddress.from_ical(txt), 'MAILTO:maxm@mxm.dk') - - def test_prop_vFloat(self): - from ..prop import vFloat - self.assertEqual(vFloat(1.0).to_ical(), b'1.0') - self.assertEqual(vFloat.from_ical('42'), 42.0) - self.assertEqual(vFloat(42).to_ical(), b'42.0') - - def test_prop_vInt(self): - from ..prop import vInt - self.assertEqual(vInt(42).to_ical(), b'42') - self.assertEqual(vInt.from_ical('13'), 13) - self.assertRaises(ValueError, vInt.from_ical, '1s3') - - def test_prop_vDDDLists(self): - from ..prop import vDDDLists - - dt_list = vDDDLists.from_ical('19960402T010000Z') - self.assertTrue(isinstance(dt_list, list)) - self.assertEqual(len(dt_list), 1) - self.assertTrue(isinstance(dt_list[0], datetime)) - self.assertEqual(str(dt_list[0]), '1996-04-02 01:00:00+00:00') - - p = '19960402T010000Z,19960403T010000Z,19960404T010000Z' - dt_list = vDDDLists.from_ical(p) - self.assertEqual(len(dt_list), 3) - self.assertEqual(str(dt_list[0]), '1996-04-02 01:00:00+00:00') - self.assertEqual(str(dt_list[2]), '1996-04-04 01:00:00+00:00') - - dt_list = vDDDLists([]) - self.assertEqual(dt_list.to_ical(), b'') - - dt_list = vDDDLists([datetime(2000, 1, 1)]) - self.assertEqual(dt_list.to_ical(), b'20000101T000000') - - dt_list = vDDDLists([datetime(2000, 1, 1), datetime(2000, 11, 11)]) - self.assertEqual(dt_list.to_ical(), b'20000101T000000,20001111T000000') - - def test_prop_vDDDTypes(self): - from ..prop import vDDDTypes - - self.assertTrue(isinstance(vDDDTypes.from_ical('20010101T123000'), - datetime)) - - self.assertEqual(vDDDTypes.from_ical('20010101T123000Z'), - datetime(2001, 1, 1, 12, 30, tzinfo=pytz.utc)) - - self.assertTrue(isinstance(vDDDTypes.from_ical('20010101'), date)) - - self.assertEqual(vDDDTypes.from_ical('P31D'), timedelta(31)) - - self.assertEqual(vDDDTypes.from_ical('-P31D'), timedelta(-31)) - - # Bad input - self.assertRaises(ValueError, vDDDTypes, 42) - - def test_prop_vDate(self): - from ..prop import vDate - - self.assertEqual(vDate(date(2001, 1, 1)).to_ical(), b'20010101') - self.assertEqual(vDate(date(1899, 1, 1)).to_ical(), b'18990101') - - self.assertEqual(vDate.from_ical('20010102'), date(2001, 1, 2)) - - self.assertRaises(ValueError, vDate, 'd') - - def test_prop_vDatetime(self): - from ..prop import vDatetime - - dt = datetime(2001, 1, 1, 12, 30, 0) - self.assertEqual(vDatetime(dt).to_ical(), b'20010101T123000') - - self.assertEqual(vDatetime.from_ical('20000101T120000'), - datetime(2000, 1, 1, 12, 0)) - - dutc = datetime(2001, 1, 1, 12, 30, 0, tzinfo=pytz.utc) - self.assertEqual(vDatetime(dutc).to_ical(), b'20010101T123000Z') - - dutc = datetime(1899, 1, 1, 12, 30, 0, tzinfo=pytz.utc) - self.assertEqual(vDatetime(dutc).to_ical(), b'18990101T123000Z') - - self.assertEqual(vDatetime.from_ical('20010101T000000'), - datetime(2001, 1, 1, 0, 0)) - - self.assertRaises(ValueError, vDatetime.from_ical, '20010101T000000A') - - utc = vDatetime.from_ical('20010101T000000Z') - self.assertEqual(vDatetime(utc).to_ical(), b'20010101T000000Z') - - # 1 minute before transition to DST - dat = vDatetime.from_ical('20120311T015959', 'America/Denver') - self.assertEqual(dat.strftime('%Y%m%d%H%M%S %z'), - '20120311015959 -0700') - - # After transition to DST - dat = vDatetime.from_ical('20120311T030000', 'America/Denver') - self.assertEqual(dat.strftime('%Y%m%d%H%M%S %z'), - '20120311030000 -0600') - - dat = vDatetime.from_ical('20101010T000000', 'Europe/Vienna') - self.assertEqual(vDatetime(dat).to_ical(), b'20101010T000000') - - def test_prop_vDuration(self): - from ..prop import vDuration - - self.assertEqual(vDuration(timedelta(11)).to_ical(), b'P11D') - self.assertEqual(vDuration(timedelta(-14)).to_ical(), b'-P14D') - self.assertEqual( - vDuration(timedelta(1, 7384)).to_ical(), - b'P1DT2H3M4S' - ) - self.assertEqual(vDuration(timedelta(1, 7380)).to_ical(), b'P1DT2H3M') - self.assertEqual(vDuration(timedelta(1, 7200)).to_ical(), b'P1DT2H') - self.assertEqual(vDuration(timedelta(0, 7200)).to_ical(), b'PT2H') - self.assertEqual(vDuration(timedelta(0, 7384)).to_ical(), b'PT2H3M4S') - self.assertEqual(vDuration(timedelta(0, 184)).to_ical(), b'PT3M4S') - self.assertEqual(vDuration(timedelta(0, 22)).to_ical(), b'PT22S') - self.assertEqual(vDuration(timedelta(0, 3622)).to_ical(), b'PT1H0M22S') - self.assertEqual(vDuration(timedelta(days=1, hours=5)).to_ical(), - b'P1DT5H') - self.assertEqual(vDuration(timedelta(hours=-5)).to_ical(), b'-PT5H') - self.assertEqual(vDuration(timedelta(days=-1, hours=-5)).to_ical(), - b'-P1DT5H') - - # How does the parsing work? - self.assertEqual(vDuration.from_ical('PT1H0M22S'), timedelta(0, 3622)) - - self.assertRaises(ValueError, vDuration.from_ical, 'kox') - - self.assertEqual(vDuration.from_ical('-P14D'), timedelta(-14)) - - self.assertRaises(ValueError, vDuration, 11) - - def test_prop_vPeriod(self): - from ..prop import vPeriod - - # One day in exact datetimes - per = (datetime(2000, 1, 1), datetime(2000, 1, 2)) - self.assertEqual(vPeriod(per).to_ical(), - b'20000101T000000/20000102T000000') - - per = (datetime(2000, 1, 1), timedelta(days=31)) - self.assertEqual(vPeriod(per).to_ical(), b'20000101T000000/P31D') - - # Roundtrip - p = vPeriod.from_ical('20000101T000000/20000102T000000') - self.assertEqual( - p, - (datetime(2000, 1, 1, 0, 0), datetime(2000, 1, 2, 0, 0)) - ) - self.assertEqual(vPeriod(p).to_ical(), - b'20000101T000000/20000102T000000') - - self.assertEqual(vPeriod.from_ical('20000101T000000/P31D'), - (datetime(2000, 1, 1, 0, 0), timedelta(31))) - - # Roundtrip with absolute time - p = vPeriod.from_ical('20000101T000000Z/20000102T000000Z') - self.assertEqual(vPeriod(p).to_ical(), - b'20000101T000000Z/20000102T000000Z') - - # And an error - self.assertRaises(ValueError, - vPeriod.from_ical, '20000101T000000/Psd31D') - - # Timezoned - dk = pytz.timezone('Europe/Copenhagen') - start = datetime(2000, 1, 1, tzinfo=dk) - end = datetime(2000, 1, 2, tzinfo=dk) - per = (start, end) - self.assertEqual(vPeriod(per).to_ical(), - b'20000101T000000/20000102T000000') - self.assertEqual(vPeriod(per).params['TZID'], - 'Europe/Copenhagen') - - p = vPeriod((datetime(2000, 1, 1, tzinfo=dk), timedelta(days=31))) - self.assertEqual(p.to_ical(), b'20000101T000000/P31D') - - def test_prop_vWeekday(self): - from ..prop import vWeekday - - self.assertEqual(vWeekday('mo').to_ical(), b'MO') - self.assertRaises(ValueError, vWeekday, 'erwer') - self.assertEqual(vWeekday.from_ical('mo'), 'MO') - self.assertEqual(vWeekday.from_ical('+3mo'), '+3MO') - self.assertRaises(ValueError, vWeekday.from_ical, 'Saturday') - self.assertEqual(vWeekday('+mo').to_ical(), b'+MO') - self.assertEqual(vWeekday('+3mo').to_ical(), b'+3MO') - self.assertEqual(vWeekday('-tu').to_ical(), b'-TU') - - def test_prop_vFrequency(self): - from ..prop import vFrequency - - self.assertRaises(ValueError, vFrequency, 'bad test') - self.assertEqual(vFrequency('daily').to_ical(), b'DAILY') - self.assertEqual(vFrequency('daily').from_ical('MONTHLY'), 'MONTHLY') - - def test_prop_vRecur(self): - from ..prop import vRecur - - # Let's see how close we can get to one from the rfc: - # FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;BYMINUTE=30 - - r = dict(freq='yearly', interval=2) - r.update({ - 'bymonth': 1, - 'byday': 'su', - 'byhour': [8, 9], - 'byminute': 30 - }) - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=SU;BYMONTH=1' - ) - - r = vRecur(FREQ='yearly', INTERVAL=2) - r.update({ - 'BYMONTH': 1, - 'BYDAY': 'su', - 'BYHOUR': [8, 9], - 'BYMINUTE': 30, - }) - self.assertEqual( - r.to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=SU;BYMONTH=1' - ) - - r = vRecur(freq='DAILY', count=10) - r['bysecond'] = [0, 15, 30, 45] - self.assertEqual(r.to_ical(), - b'FREQ=DAILY;COUNT=10;BYSECOND=0,15,30,45') - - r = vRecur(freq='DAILY', until=datetime(2005, 1, 1, 12, 0, 0)) - self.assertEqual(r.to_ical(), b'FREQ=DAILY;UNTIL=20050101T120000') - - # How do we fare with regards to parsing? - r = vRecur.from_ical('FREQ=DAILY;INTERVAL=2;COUNT=10') - self.assertEqual(r, - {'COUNT': [10], 'FREQ': ['DAILY'], 'INTERVAL': [2]}) - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=DAILY;COUNT=10;INTERVAL=2' - ) - - r = vRecur.from_ical('FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=-SU;' - 'BYHOUR=8,9;BYMINUTE=30') - self.assertEqual( - r, - {'BYHOUR': [8, 9], 'BYDAY': ['-SU'], 'BYMINUTE': [30], - 'BYMONTH': [1], 'FREQ': ['YEARLY'], 'INTERVAL': [2]} - ) - - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=-SU;' - b'BYMONTH=1' - ) - - # Some examples from the spec - r = vRecur.from_ical('FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR;BYSETPOS=-1') - self.assertEqual(vRecur(r).to_ical(), - b'FREQ=MONTHLY;BYDAY=MO,TU,WE,TH,FR;BYSETPOS=-1') - - p = 'FREQ=YEARLY;INTERVAL=2;BYMONTH=1;BYDAY=SU;BYHOUR=8,9;BYMINUTE=30' - r = vRecur.from_ical(p) - self.assertEqual( - vRecur(r).to_ical(), - b'FREQ=YEARLY;INTERVAL=2;BYMINUTE=30;BYHOUR=8,9;BYDAY=SU;BYMONTH=1' - ) - - # and some errors - self.assertRaises(ValueError, vRecur.from_ical, 'BYDAY=12') - - def test_prop_vText(self): - from ..prop import vText - - self.assertEqual(vText(u'Simple text').to_ical(), b'Simple text') - - # Escaped text - t = vText('Text ; with escaped, chars') - self.assertEqual(t.to_ical(), b'Text \\; with escaped\\, chars') - - # Escaped newlines - self.assertEqual(vText('Text with escaped\\N chars').to_ical(), - b'Text with escaped\\n chars') - - # If you pass a unicode object, it will be utf-8 encoded. As this is - # the (only) standard that RFC 2445 support. - t = vText(u'international chars \xe4\xf6\xfc') - self.assertEqual(t.to_ical(), - b'international chars \xc3\xa4\xc3\xb6\xc3\xbc') - - # and parsing? - self.assertEqual(vText.from_ical('Text \\; with escaped\\, chars'), - u'Text ; with escaped, chars') - - t = vText.from_ical('A string with\\; some\\\\ characters in\\it') - self.assertEqual(t, "A string with; some\\ characters in\it") - - # We are forgiving to utf-8 encoding errors: - # We intentionally use a string with unexpected encoding - # - self.assertEqual(vText.from_ical(b'Ol\xe9'), u'Ol\ufffd') - - # Notice how accented E character, encoded with latin-1, got replaced - # with the official U+FFFD REPLACEMENT CHARACTER. - - def test_prop_vTime(self): - from ..prop import vTime - - self.assertEqual(vTime(12, 30, 0).to_ical(), '123000') - self.assertEqual(vTime.from_ical('123000'), time(12, 30)) - - # We should also fail, right? - self.assertRaises(ValueError, vTime.from_ical, '263000') - - def test_prop_vUri(self): - from ..prop import vUri - - self.assertEqual(vUri('http://www.example.com/').to_ical(), - b'http://www.example.com/') - self.assertEqual(vUri.from_ical('http://www.example.com/'), - 'http://www.example.com/') - - def test_prop_vGeo(self): - from ..prop import vGeo - - # Pass a list - self.assertEqual(vGeo([1.2, 3.0]).to_ical(), '1.2;3.0') - - # Pass a tuple - self.assertEqual(vGeo((1.2, 3.0)).to_ical(), '1.2;3.0') - - g = vGeo.from_ical('37.386013;-122.082932') - self.assertEqual(g, (float('37.386013'), float('-122.082932'))) - - self.assertEqual(vGeo(g).to_ical(), '37.386013;-122.082932') - - self.assertRaises(ValueError, vGeo, 'g') - - def test_prop_vUTCOffset(self): - from ..prop import vUTCOffset - - self.assertEqual(vUTCOffset(timedelta(hours=2)).to_ical(), '+0200') - - self.assertEqual(vUTCOffset(timedelta(hours=-5)).to_ical(), '-0500') - - self.assertEqual(vUTCOffset(timedelta()).to_ical(), '+0000') - - self.assertEqual(vUTCOffset(timedelta(minutes=-30)).to_ical(), - '-0030') - - self.assertEqual( - vUTCOffset(timedelta(hours=2, minutes=-30)).to_ical(), - '+0130' - ) - - self.assertEqual(vUTCOffset(timedelta(hours=1, minutes=30)).to_ical(), - '+0130') - - # Support seconds - self.assertEqual(vUTCOffset(timedelta(hours=1, - minutes=30, - seconds=7)).to_ical(), '+013007') - - # Parsing - - self.assertEqual(vUTCOffset.from_ical('0000'), timedelta(0)) - self.assertEqual(vUTCOffset.from_ical('-0030'), timedelta(-1, 84600)) - self.assertEqual(vUTCOffset.from_ical('+0200'), timedelta(0, 7200)) - self.assertEqual(vUTCOffset.from_ical('+023040'), timedelta(0, 9040)) - - self.assertEqual(vUTCOffset(vUTCOffset.from_ical('+0230')).to_ical(), - '+0230') - - # And a few failures - self.assertRaises(ValueError, vUTCOffset.from_ical, '+323k') - - self.assertRaises(ValueError, vUTCOffset.from_ical, '+2400') - - def test_prop_vInline(self): - from ..prop import vInline - - self.assertEqual(vInline('Some text'), 'Some text') - self.assertEqual(vInline.from_ical('Some text'), 'Some text') - - t2 = vInline('other text') - t2.params['cn'] = 'Test Osterone' - self.assertIsInstance(t2.params, Parameters) - self.assertEqual(t2.params, {'CN': 'Test Osterone'}) - - def test_prop_TypesFactory(self): - from ..prop import TypesFactory - - # To get a type you can use it like this. - factory = TypesFactory() - datetime_parser = factory['date-time'] - self.assertEqual(datetime_parser(datetime(2001, 1, 1)).to_ical(), - b'20010101T000000') - - # A typical use is when the parser tries to find a content type and use - # text as the default - value = '20050101T123000' - value_type = 'date-time' - self.assertEqual(factory.get(value_type, 'text').from_ical(value), - datetime(2005, 1, 1, 12, 30)) - - # It can also be used to directly encode property and parameter values - self.assertEqual( - factory.to_ical('comment', u'by Rasmussen, Max M\xfcller'), - b'by Rasmussen\\, Max M\xc3\xbcller' - ) - self.assertEqual(factory.to_ical('priority', 1), b'1') - self.assertEqual(factory.to_ical('cn', u'Rasmussen, Max M\xfcller'), - b'Rasmussen\\, Max M\xc3\xbcller') - self.assertEqual( - factory.from_ical('cn', b'Rasmussen\\, Max M\xc3\xb8ller'), - u'Rasmussen, Max M\xf8ller' - ) - - -class TestPropertyValues(unittest.TestCase): - - def test_vDDDLists_timezone(self): - """Test vDDDLists with timezone information. - """ - from .. import Event - vevent = Event() - at = pytz.timezone('Europe/Vienna') - dt1 = at.localize(datetime(2013, 1, 1)) - dt2 = at.localize(datetime(2013, 1, 2)) - dt3 = at.localize(datetime(2013, 1, 3)) - vevent.add('rdate', [dt1, dt2]) - vevent.add('exdate', dt3) - ical = vevent.to_ical() - - self.assertTrue( - b'RDATE;TZID=Europe/Vienna:20130101T000000,20130102T000000' in ical - ) - self.assertTrue(b'EXDATE;TZID=Europe/Vienna:20130103T000000' in ical) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_tools.py b/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_tools.py deleted file mode 100644 index be2e41e..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/test_unit_tools.py +++ /dev/null @@ -1,28 +0,0 @@ -from icalendar.tests import unittest -from icalendar.tools import UIDGenerator - - -class TestTools(unittest.TestCase): - - def test_tools_UIDGenerator(self): - - # Automatic semi-random uid - g = UIDGenerator() - uid = g.uid() - - txt = uid.to_ical() - length = 15 + 1 + 16 + 1 + 11 - self.assertTrue(len(txt) == length) - self.assertTrue(b'@example.com' in txt) - - # You should at least insert your own hostname to be more compliant - uid = g.uid('Example.ORG') - txt = uid.to_ical() - self.assertTrue(len(txt) == length) - self.assertTrue(b'@Example.ORG' in txt) - - # You can also insert a path or similar - uid = g.uid('Example.ORG', '/path/to/content') - txt = uid.to_ical() - self.assertTrue(len(txt) == length) - self.assertTrue(b'-/path/to/content@Example.ORG' in txt) diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/time.ics b/libs/icalendar-3.6.1/src/icalendar/tests/time.ics deleted file mode 100644 index d730a4c..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/time.ics +++ /dev/null @@ -1,3 +0,0 @@ -BEGIN:VCALENDAR -X-SOMETIME;VALUE=TIME:172010 -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tests/timezoned.ics b/libs/icalendar-3.6.1/src/icalendar/tests/timezoned.ics deleted file mode 100644 index 5878b72..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tests/timezoned.ics +++ /dev/null @@ -1,36 +0,0 @@ -BEGIN:VCALENDAR -PRODID:-//Plone.org//NONSGML plone.app.event//EN -VERSION:2.0 -X-WR-CALNAME:test create calendar -X-WR-CALDESC:icalendar test -X-WR-RELCALID:12345 -X-WR-TIMEZONE:Europe/Vienna -BEGIN:VTIMEZONE -TZID:Europe/Vienna -X-LIC-LOCATION:Europe/Vienna -BEGIN:DAYLIGHT -TZOFFSETFROM:+0100 -TZOFFSETTO:+0200 -TZNAME:CEST -DTSTART:19700329T020000 -RRULE:FREQ=YEARLY;BYMONTH=3;BYDAY=-1SU -END:DAYLIGHT -BEGIN:STANDARD -TZOFFSETFROM:+0200 -TZOFFSETTO:+0100 -TZNAME:CET -DTSTART:19701025T030000 -RRULE:FREQ=YEARLY;BYMONTH=10;BYDAY=-1SU -END:STANDARD -END:VTIMEZONE -BEGIN:VEVENT -DTSTART;TZID=Europe/Vienna:20120213T100000 -DTEND;TZID=Europe/Vienna:20120217T180000 -DTSTAMP:20101010T091010Z -CREATED:20101010T091010Z -UID:123456 -SUMMARY:artsprint 2012 -DESCRIPTION:sprinting at the artsprint -LOCATION:aka bild, wien -END:VEVENT -END:VCALENDAR diff --git a/libs/icalendar-3.6.1/src/icalendar/tools.py b/libs/icalendar-3.6.1/src/icalendar/tools.py deleted file mode 100644 index 387c43a..0000000 --- a/libs/icalendar-3.6.1/src/icalendar/tools.py +++ /dev/null @@ -1,33 +0,0 @@ -from datetime import datetime -from icalendar.parser_tools import to_unicode -from icalendar.prop import vDatetime -from icalendar.prop import vText -from string import ascii_letters -from string import digits - -import random - - -class UIDGenerator(object): - """If you are too lazy to create real uid's. - - """ - chars = list(ascii_letters + digits) - - def rnd_string(self, length=16): - """Generates a string with random characters of length. - """ - return ''.join([random.choice(self.chars) for _ in range(length)]) - - def uid(self, host_name='example.com', unique=''): - """Generates a unique id consisting of: - datetime-uniquevalue@host. - Like: - 20050105T225746Z-HKtJMqUgdO0jDUwm@example.com - """ - host_name = to_unicode(host_name) - unique = unique or self.rnd_string() - today = to_unicode(vDatetime(datetime.today()).to_ical()) - return vText('%s-%s@%s' % (today, - unique, - host_name)) diff --git a/libs/icalendar-3.6.1/tox.ini b/libs/icalendar-3.6.1/tox.ini deleted file mode 100644 index 9f26c5a..0000000 --- a/libs/icalendar-3.6.1/tox.ini +++ /dev/null @@ -1,23 +0,0 @@ -[tox] -envlist = py26,py27,py33 - -[testenv:py26] -deps = - unittest2 - discover - coverage -commands = - coverage erase - coverage run --source=icalendar --omit=*tests* {envbindir}/discover icalendar - coverage report --omit=*tests* - coverage html --omit=*tests* - -[testenv] -deps = - discover - coverage -commands = - coverage erase - coverage run --source=icalendar --omit=*tests* {envbindir}/discover icalendar - coverage report --omit=*tests* - coverage html --omit=*tests* diff --git a/libs/setuptools-2.2/CHANGES (links).txt b/libs/setuptools-2.2/CHANGES (links).txt deleted file mode 100644 index 9351169..0000000 --- a/libs/setuptools-2.2/CHANGES (links).txt +++ /dev/null @@ -1,1578 +0,0 @@ -======= -CHANGES -======= - ---- -2.2 ---- - -* `Issue #141 `_: Restored fix for allowing setup_requires dependencies to - override installed dependencies during setup. -* `Issue #128 `_: Fixed issue where only the first dependency link was honored - in a distribution where multiple dependency links were supplied. - ------ -2.1.2 ------ - -* `Issue #144 `_: Read long_description using codecs module to avoid errors - installing on systems where LANG=C. - ------ -2.1.1 ------ - -* `Issue #139 `_: Fix regression in re_finder for CVS repos (and maybe Git repos - as well). - ---- -2.1 ---- - -* `Issue #129 `_: Suppress inspection of ``*.whl`` files when searching for files - in a zip-imported file. -* `Issue #131 `_: Fix RuntimeError when constructing an egg fetcher. - ------ -2.0.2 ------ - -* Fix NameError during installation with Python implementations (e.g. Jython) - not containing parser module. -* Fix NameError in ``sdist:re_finder``. - ------ -2.0.1 ------ - -* `Issue #124 `_: Fixed error in list detection in upload_docs. - ---- -2.0 ---- - -* `Issue #121 `_: Exempt lib2to3 pickled grammars from DirectorySandbox. -* `Issue #41 `_: Dropped support for Python 2.4 and Python 2.5. Clients requiring - setuptools for those versions of Python should use setuptools 1.x. -* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients - expecting this boolean variable should use ``site.ENABLE_USER_SITE`` - instead. -* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class - should use ``pkgutil.ImpImporter`` instead. - ------ -1.4.2 ------ - -* `Issue #116 `_: Correct TypeError when reading a local package index on Python - 3. - ------ -1.4.1 ------ - -* `Issue #114 `_: Use ``sys.getfilesystemencoding`` for decoding config in - ``bdist_wininst`` distributions. - -* `Issue #105 `_ and `Issue #113 `_: Establish a more robust technique for - determining the terminal encoding:: - - 1. Try ``getpreferredencoding`` - 2. If that returns US_ASCII or None, try the encoding from - ``getdefaultlocale``. If that encoding was a "fallback" because Python - could not figure it out from the environment or OS, encoding remains - unresolved. - 3. If the encoding is resolved, then make sure Python actually implements - the encoding. - 4. On the event of an error or unknown codec, revert to fallbacks - (UTF-8 on Darwin, ASCII on everything else). - 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was - a bug on older Python releases. - - On a side note, it would seem that the encoding only matters for when SVN - does not yet support ``--xml`` and when getting repository and svn version - numbers. The ``--xml`` technique should yield UTF-8 according to some - messages on the SVN mailing lists. So if the version numbers are always - 7-bit ASCII clean, it may be best to only support the file parsing methods - for legacy SVN releases and support for SVN without the subprocess command - would simple go away as support for the older SVNs does. - ---- -1.4 ---- - -* `Issue #27 `_: ``easy_install`` will now use credentials from .pypirc if - present for connecting to the package index. -* `Pull Request #21 `_: Omit unwanted newlines in ``package_index._encode_auth`` - when the username/password pair length indicates wrapping. - ------ -1.3.2 ------ - -* `Issue #99 `_: Fix filename encoding issues in SVN support. - ------ -1.3.1 ------ - -* Remove exuberant warning in SVN support when SVN is not used. - ---- -1.3 ---- - -* Address security vulnerability in SSL match_hostname check as reported in - `Python #17997 `_. -* Prefer `backports.ssl_match_hostname - `_ for backport - implementation if present. -* Correct NameError in ``ssl_support`` module (``socket.error``). - ---- -1.2 ---- - -* `Issue #26 `_: Add support for SVN 1.7. Special thanks to Philip Thiem for the - contribution. -* `Issue #93 `_: Wheels are now distributed with every release. Note that as - reported in `Issue #108 `_, as of Pip 1.4, scripts aren't installed properly - from wheels. Therefore, if using Pip to install setuptools from a wheel, - the ``easy_install`` command will not be available. -* Setuptools "natural" launcher support, introduced in 1.0, is now officially - supported. - ------ -1.1.7 ------ - -* Fixed behavior of NameError handling in 'script template (dev).py' (script - launcher for 'develop' installs). -* ``ez_setup.py`` now ensures partial downloads are cleaned up following - a failed download. -* `Distribute #363 `_ and `Issue #55 `_: Skip an sdist test that fails on locales - other than UTF-8. - ------ -1.1.6 ------ - -* `Distribute #349 `_: ``sandbox.execfile`` now opens the target file in binary - mode, thus honoring a BOM in the file when compiled. - ------ -1.1.5 ------ - -* `Issue #69 `_: Second attempt at fix (logic was reversed). - ------ -1.1.4 ------ - -* `Issue #77 `_: Fix error in upload command (Python 2.4). - ------ -1.1.3 ------ - -* Fix NameError in previous patch. - ------ -1.1.2 ------ - -* `Issue #69 `_: Correct issue where 404 errors are returned for URLs with - fragments in them (such as #egg=). - ------ -1.1.1 ------ - -* `Issue #75 `_: Add ``--insecure`` option to ez_setup.py to accommodate - environments where a trusted SSL connection cannot be validated. -* `Issue #76 `_: Fix AttributeError in upload command with Python 2.4. - ---- -1.1 ---- - -* `Issue #71 `_ (`Distribute #333 `_): EasyInstall now puts less emphasis on the - condition when a host is blocked via ``--allow-hosts``. -* `Issue #72 `_: Restored Python 2.4 compatibility in ``ez_setup.py``. - ---- -1.0 ---- - -* `Issue #60 `_: On Windows, Setuptools supports deferring to another launcher, - such as Vinay Sajip's `pylauncher `_ - (included with Python 3.3) to launch console and GUI scripts and not install - its own launcher executables. This experimental functionality is currently - only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to - "natural". In the future, this behavior may become default, but only after - it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` - also accepts "executable" to force the default behavior of creating launcher - executables. -* `Issue #63 `_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or - wget for retrieving the Setuptools tarball for improved security of the - install. The script will still fall back to a simple ``urlopen`` on - platforms that do not have these tools. -* `Issue #65 `_: Deprecated the ``Features`` functionality. -* `Issue #52 `_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) - connection. - -Backward-Incompatible Changes -============================= - -This release includes a couple of backward-incompatible changes, but most if -not all users will find 1.0 a drop-in replacement for 0.9. - -* `Issue #50 `_: Normalized API of environment marker support. Specifically, - removed line number and filename from SyntaxErrors when returned from - `pkg_resources.invalid_marker`. Any clients depending on the specific - string representation of exceptions returned by that function may need to - be updated to account for this change. -* `Issue #50 `_: SyntaxErrors generated by `pkg_resources.invalid_marker` are - normalized for cross-implementation consistency. -* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` - options to easy_install. These options have been deprecated since 0.6a11. - ------ -0.9.8 ------ - -* `Issue #53 `_: Fix NameErrors in `_vcs_split_rev_from_url`. - ------ -0.9.7 ------ - -* `Issue #49 `_: Correct AttributeError on PyPy where a hashlib.HASH object does - not have a `.name` attribute. -* `Issue #34 `_: Documentation now refers to bootstrap script in code repository - referenced by bookmark. -* Add underscore-separated keys to environment markers (markerlib). - ------ -0.9.6 ------ - -* `Issue #44 `_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` - attribute. - ------ -0.9.5 ------ - -* `Python #17980 `_: Fix security vulnerability in SSL certificate validation. - ------ -0.9.4 ------ - -* `Issue #43 `_: Fix issue (introduced in 0.9.1) with version resolution when - upgrading over other releases of Setuptools. - ------ -0.9.3 ------ - -* `Issue #42 `_: Fix new ``AttributeError`` introduced in last fix. - ------ -0.9.2 ------ - -* `Issue #42 `_: Fix regression where blank checksums would trigger an - ``AttributeError``. - ------ -0.9.1 ------ - -* `Distribute #386 `_: Allow other positional and keyword arguments to os.open. -* Corrected dependency on certifi mis-referenced in 0.9. - ---- -0.9 ---- - -* `package_index` now validates hashes other than MD5 in download links. - ---- -0.8 ---- - -* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 - conversion. - ------ -0.7.8 ------ - -* `Distribute #375 `_: Yet another fix for yet another regression. - ------ -0.7.7 ------ - -* `Distribute #375 `_: Repair AttributeError created in last release (redo). -* `Issue #30 `_: Added test for get_cache_path. - ------ -0.7.6 ------ - -* `Distribute #375 `_: Repair AttributeError created in last release. - ------ -0.7.5 ------ - -* `Issue #21 `_: Restore Python 2.4 compatibility in ``test_easy_install``. -* `Distribute #375 `_: Merged additional warning from Distribute 0.6.46. -* Now honor the environment variable - ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now - deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. - ------ -0.7.4 ------ - -* `Issue #20 `_: Fix comparison of parsed SVN version on Python 3. - ------ -0.7.3 ------ - -* `Issue #1 `_: Disable installation of Windows-specific files on non-Windows systems. -* Use new sysconfig module with Python 2.7 or >=3.2. - ------ -0.7.2 ------ - -* `Issue #14 `_: Use markerlib when the `parser` module is not available. -* `Issue #10 `_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. - ------ -0.7.1 ------ - -* Fix NameError (`Issue #3 `_) again - broken in bad merge. - ---- -0.7 ---- - -* Merged Setuptools and Distribute. See docs/merge.txt for details. - -Added several features that were slated for setuptools 0.6c12: - -* Index URL now defaults to HTTPS. -* Added experimental environment marker support. Now clients may designate a - PEP-426 environment marker for "extra" dependencies. Setuptools uses this - feature in ``setup.py`` for optional SSL and certificate validation support - on older platforms. Based on Distutils-SIG discussions, the syntax is - somewhat tentative. There should probably be a PEP with a firmer spec before - the feature should be considered suitable for use. -* Added support for SSL certificate validation when installing packages from - an HTTPS service. - ------ -0.7b4 ------ - -* `Issue #3 `_: Fixed NameError in SSL support. - ------- -0.6.49 ------- - -* Move warning check in ``get_cache_path`` to follow the directory creation - to avoid errors when the cache path does not yet exist. Fixes the error - reported in `Distribute #375 `_. - ------- -0.6.48 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46 (redo). - ------- -0.6.47 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46. - ------- -0.6.46 ------- - -* `Distribute #375 `_: Issue a warning if the PYTHON_EGG_CACHE or otherwise - customized egg cache location specifies a directory that's group- or - world-writable. - ------- -0.6.45 ------- - -* `Distribute #379 `_: ``distribute_setup.py`` now traps VersionConflict as well, - restoring ability to upgrade from an older setuptools version. - ------- -0.6.44 ------- - -* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to - satisfy use_setuptools. - ------- -0.6.43 ------- - -* `Distribute #378 `_: Restore support for Python 2.4 Syntax (regression in 0.6.42). - ------- -0.6.42 ------- - -* External links finder no longer yields duplicate links. -* `Distribute #337 `_: Moved site.py to setuptools/site-patch.py (graft of very old - patch from setuptools trunk which inspired PR `#31 `_). - ------- -0.6.41 ------- - -* `Distribute #27 `_: Use public api for loading resources from zip files rather than - the private method `_zip_directory_cache`. -* Added a new function ``easy_install.get_win_launcher`` which may be used by - third-party libraries such as buildout to get a suitable script launcher. - ------- -0.6.40 ------- - -* `Distribute #376 `_: brought back cli.exe and gui.exe that were deleted in the - previous release. - ------- -0.6.39 ------- - -* Add support for console launchers on ARM platforms. -* Fix possible issue in GUI launchers where the subsystem was not supplied to - the linker. -* Launcher build script now refactored for robustness. -* `Distribute #375 `_: Resources extracted from a zip egg to the file system now also - check the contents of the file against the zip contents during each - invocation of get_resource_filename. - ------- -0.6.38 ------- - -* `Distribute #371 `_: The launcher manifest file is now installed properly. - ------- -0.6.37 ------- - -* `Distribute #143 `_: Launcher scripts, including easy_install itself, are now - accompanied by a manifest on 32-bit Windows environments to avoid the - Installer Detection Technology and thus undesirable UAC elevation described - in `this Microsoft article - `_. - ------- -0.6.36 ------- - -* `Pull Request #35 `_: In `Buildout #64 `_, it was reported that - under Python 3, installation of distutils scripts could attempt to copy - the ``__pycache__`` directory as a file, causing an error, apparently only - under Windows. Easy_install now skips all directories when processing - metadata scripts. - ------- -0.6.35 ------- - - -Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in -how it parses version numbers. - -* `Distribute #278 `_: Restored compatibility with distribute 0.6.22 and setuptools - 0.6. Updated the documentation to match more closely with the version - parsing as intended in setuptools 0.6. - ------- -0.6.34 ------- - -* `Distribute #341 `_: 0.6.33 fails to build under Python 2.4. - ------- -0.6.33 ------- - -* Fix 2 errors with Jython 2.5. -* Fix 1 failure with Jython 2.5 and 2.7. -* Disable workaround for Jython scripts on Linux systems. -* `Distribute #336 `_: `setup.py` no longer masks failure exit code when tests fail. -* Fix issue in pkg_resources where try/except around a platform-dependent - import would trigger hook load failures on Mercurial. See pull request 32 - for details. -* `Distribute #341 `_: Fix a ResourceWarning. - ------- -0.6.32 ------- - -* Fix test suite with Python 2.6. -* Fix some DeprecationWarnings and ResourceWarnings. -* `Distribute #335 `_: Backed out `setup_requires` superceding installed requirements - until regression can be addressed. - ------- -0.6.31 ------- - -* `Distribute #303 `_: Make sure the manifest only ever contains UTF-8 in Python 3. -* `Distribute #329 `_: Properly close files created by tests for compatibility with - Jython. -* Work around `Jython #1980 `_ and `Jython #1981 `_. -* `Distribute #334 `_: Provide workaround for packages that reference `sys.__stdout__` - such as numpy does. This change should address - `virtualenv `#359 `_ `_ as long - as the system encoding is UTF-8 or the IO encoding is specified in the - environment, i.e.:: - - PYTHONIOENCODING=utf8 pip install numpy - -* Fix for encoding issue when installing from Windows executable on Python 3. -* `Distribute #323 `_: Allow `setup_requires` requirements to supercede installed - requirements. Added some new keyword arguments to existing pkg_resources - methods. Also had to updated how __path__ is handled for namespace packages - to ensure that when a new egg distribution containing a namespace package is - placed on sys.path, the entries in __path__ are found in the same order they - would have been in had that egg been on the path when pkg_resources was - first imported. - ------- -0.6.30 ------- - -* `Distribute #328 `_: Clean up temporary directories in distribute_setup.py. -* Fix fatal bug in distribute_setup.py. - ------- -0.6.29 ------- - -* `Pull Request #14 `_: Honor file permissions in zip files. -* `Distribute #327 `_: Merged pull request `#24 `_ to fix a dependency problem with pip. -* Merged pull request `#23 `_ to fix https://github.com/pypa/virtualenv/issues/301. -* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` - to produce uploadable documentation. -* `Distribute #326 `_: `upload_docs` provided mangled auth credentials under Python 3. -* `Distribute #320 `_: Fix check for "createable" in distribute_setup.py. -* `Distribute #305 `_: Remove a warning that was triggered during normal operations. -* `Distribute #311 `_: Print metadata in UTF-8 independent of platform. -* `Distribute #303 `_: Read manifest file with UTF-8 encoding under Python 3. -* `Distribute #301 `_: Allow to run tests of namespace packages when using 2to3. -* `Distribute #304 `_: Prevent import loop in site.py under Python 3.3. -* `Distribute #283 `_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. -* `Distribute #299 `_: The develop command didn't work on Python 3, when using 2to3, - as the egg link would go to the Python 2 source. Linking to the 2to3'd code - in build/lib makes it work, although you will have to rebuild the module - before testing it. -* `Distribute #306 `_: Even if 2to3 is used, we build in-place under Python 2. -* `Distribute #307 `_: Prints the full path when .svn/entries is broken. -* `Distribute #313 `_: Support for sdist subcommands (Python 2.7) -* `Distribute #314 `_: test_local_index() would fail an OS X. -* `Distribute #310 `_: Non-ascii characters in a namespace __init__.py causes errors. -* `Distribute #218 `_: Improved documentation on behavior of `package_data` and - `include_package_data`. Files indicated by `package_data` are now included - in the manifest. -* `distribute_setup.py` now allows a `--download-base` argument for retrieving - distribute from a specified location. - ------- -0.6.28 ------- - -* `Distribute #294 `_: setup.py can now be invoked from any directory. -* Scripts are now installed honoring the umask. -* Added support for .dist-info directories. -* `Distribute #283 `_: Fix and disable scanning of `*.pyc` / `*.pyo` files on - Python 3.3. - ------- -0.6.27 ------- - -* Support current snapshots of CPython 3.3. -* Distribute now recognizes README.rst as a standard, default readme file. -* Exclude 'encodings' modules when removing modules from sys.modules. - Workaround for `#285 `_. -* `Distribute #231 `_: Don't fiddle with system python when used with buildout - (bootstrap.py) - ------- -0.6.26 ------- - -* `Distribute #183 `_: Symlinked files are now extracted from source distributions. -* `Distribute #227 `_: Easy_install fetch parameters are now passed during the - installation of a source distribution; now fulfillment of setup_requires - dependencies will honor the parameters passed to easy_install. - ------- -0.6.25 ------- - -* `Distribute #258 `_: Workaround a cache issue -* `Distribute #260 `_: distribute_setup.py now accepts the --user parameter for - Python 2.6 and later. -* `Distribute #262 `_: package_index.open_with_auth no longer throws LookupError - on Python 3. -* `Distribute #269 `_: AttributeError when an exception occurs reading Manifest.in - on late releases of Python. -* `Distribute #272 `_: Prevent TypeError when namespace package names are unicode - and single-install-externally-managed is used. Also fixes PIP issue - 449. -* `Distribute #273 `_: Legacy script launchers now install with Python2/3 support. - ------- -0.6.24 ------- - -* `Distribute #249 `_: Added options to exclude 2to3 fixers - ------- -0.6.23 ------- - -* `Distribute #244 `_: Fixed a test -* `Distribute #243 `_: Fixed a test -* `Distribute #239 `_: Fixed a test -* `Distribute #240 `_: Fixed a test -* `Distribute #241 `_: Fixed a test -* `Distribute #237 `_: Fixed a test -* `Distribute #238 `_: easy_install now uses 64bit executable wrappers on 64bit Python -* `Distribute #208 `_: Fixed parsed_versions, it now honors post-releases as noted in the documentation -* `Distribute #207 `_: Windows cli and gui wrappers pass CTRL-C to child python process -* `Distribute #227 `_: easy_install now passes its arguments to setup.py bdist_egg -* `Distribute #225 `_: Fixed a NameError on Python 2.5, 2.4 - ------- -0.6.21 ------- - -* `Distribute #225 `_: FIxed a regression on py2.4 - ------- -0.6.20 ------- - -* `Distribute #135 `_: Include url in warning when processing URLs in package_index. -* `Distribute #212 `_: Fix issue where easy_instal fails on Python 3 on windows installer. -* `Distribute #213 `_: Fix typo in documentation. - ------- -0.6.19 ------- - -* `Distribute #206 `_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' - ------- -0.6.18 ------- - -* `Distribute #210 `_: Fixed a regression introduced by `Distribute #204 `_ fix. - ------- -0.6.17 ------- - -* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment - variable to allow to disable installation of easy_install-${version} script. -* Support Python >=3.1.4 and >=3.2.1. -* `Distribute #204 `_: Don't try to import the parent of a namespace package in - declare_namespace -* `Distribute #196 `_: Tolerate responses with multiple Content-Length headers -* `Distribute #205 `_: Sandboxing doesn't preserve working_set. Leads to setup_requires - problems. - ------- -0.6.16 ------- - -* Builds sdist gztar even on Windows (avoiding `Distribute #193 `_). -* `Distribute #192 `_: Fixed metadata omitted on Windows when package_dir - specified with forward-slash. -* `Distribute #195 `_: Cython build support. -* `Distribute #200 `_: Issues with recognizing 64-bit packages on Windows. - ------- -0.6.15 ------- - -* Fixed typo in bdist_egg -* Several issues under Python 3 has been solved. -* `Distribute #146 `_: Fixed missing DLL files after easy_install of windows exe package. - ------- -0.6.14 ------- - -* `Distribute #170 `_: Fixed unittest failure. Thanks to Toshio. -* `Distribute #171 `_: Fixed race condition in unittests cause deadlocks in test suite. -* `Distribute #143 `_: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. -* `Distribute #174 `_: Fixed the edit mode when its used with setuptools itself - ------- -0.6.13 ------- - -* `Distribute #160 `_: 2.7 gives ValueError("Invalid IPv6 URL") -* `Distribute #150 `_: Fixed using ~/.local even in a --no-site-packages virtualenv -* `Distribute #163 `_: scan index links before external links, and don't use the md5 when - comparing two distributions - ------- -0.6.12 ------- - -* `Distribute #149 `_: Fixed various failures on 2.3/2.4 - ------- -0.6.11 ------- - -* Found another case of SandboxViolation - fixed -* `Distribute #15 `_ and `Distribute #48 `_: Introduced a socket timeout of 15 seconds on url openings -* Added indexsidebar.html into MANIFEST.in -* `Distribute #108 `_: Fixed TypeError with Python3.1 -* `Distribute #121 `_: Fixed --help install command trying to actually install. -* `Distribute #112 `_: Added an os.makedirs so that Tarek's solution will work. -* `Distribute #133 `_: Added --no-find-links to easy_install -* Added easy_install --user -* `Distribute #100 `_: Fixed develop --user not taking '.' in PYTHONPATH into account -* `Distribute #134 `_: removed spurious UserWarnings. Patch by VanLindberg -* `Distribute #138 `_: cant_write_to_target error when setup_requires is used. -* `Distribute #147 `_: respect the sys.dont_write_bytecode flag - ------- -0.6.10 ------- - -* Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - ------ -0.6.9 ------ - -* `Distribute #90 `_: unknown setuptools version can be added in the working set -* `Distribute #87 `_: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. -* `Distribute #89 `_: added a side bar with a download link to the doc. -* `Distribute #86 `_: fixed missing sentence in pkg_resources doc. -* Added a nicer error message when a DistributionNotFound is raised. -* `Distribute #80 `_: test_develop now works with Python 3.1 -* `Distribute #93 `_: upload_docs now works if there is an empty sub-directory. -* `Distribute #70 `_: exec bit on non-exec files -* `Distribute #99 `_: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). -* `Distribute #101 `_: Allowing ``os.devnull`` in Sandbox -* `Distribute #92 `_: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) -* `Distribute #103 `_: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. -* `Distribute #104 `_: remvoved the assertion when the installation fails, - with a nicer message for the end user. -* `Distribute #100 `_: making sure there's no SandboxViolation when - the setup script patches setuptools. - ------ -0.6.8 ------ - -* Added "check_packages" in dist. (added in Setuptools 0.6c11) -* Fixed the DONT_PATCH_SETUPTOOLS state. - ------ -0.6.7 ------ - -* `Distribute #58 `_: Added --user support to the develop command -* `Distribute #11 `_: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" -* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. -* Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. -* `Distribute #21 `_: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. -* Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. -* `Distribute #64 `_: use_setuptools no longer rebuilds the distribute egg every - time it is run -* use_setuptools now properly respects the requested version -* use_setuptools will no longer try to import a distribute egg for the - wrong Python version -* `Distribute #74 `_: no_fake should be True by default. -* `Distribute #72 `_: avoid a bootstrapping issue with easy_install -U - ------ -0.6.6 ------ - -* Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - ------ -0.6.5 ------ - -* `Distribute #65 `_: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - -* `Distribute #67 `_: Fixed doc typo (PEP 381/382) - -* Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - -* When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - -* Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - ------ -0.6.4 ------ - -* Added the generation of `distribute_setup_3k.py` during the release. - This closes `Distribute #52 `_. - -* Added an upload_docs command to easily upload project documentation to - PyPI's https://pythonhosted.org. This close issue `Distribute #56 `_. - -* Fixed a bootstrap bug on the use_setuptools() API. - ------ -0.6.3 ------ - -setuptools -========== - -* Fixed a bunch of calls to file() that caused crashes on Python 3. - -bootstrapping -============= - -* Fixed a bug in sorting that caused bootstrap to fail on Python 3. - ------ -0.6.2 ------ - -setuptools -========== - -* Added Python 3 support; see docs/python3.txt. - This closes `Old Setuptools #39 `_. - -* Added option to run 2to3 automatically when installing on Python 3. - This closes issue `Distribute #31 `_. - -* Fixed invalid usage of requirement.parse, that broke develop -d. - This closes `Old Setuptools #44 `_. - -* Fixed script launcher for 64-bit Windows. - This closes `Old Setuptools #2 `_. - -* KeyError when compiling extensions. - This closes `Old Setuptools #41 `_. - -bootstrapping -============= - -* Fixed bootstrap not working on Windows. This closes issue `Distribute #49 `_. - -* Fixed 2.6 dependencies. This closes issue `Distribute #50 `_. - -* Make sure setuptools is patched when running through easy_install - This closes `Old Setuptools #40 `_. - ------ -0.6.1 ------ - -setuptools -========== - -* package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes `Distribute #16 `_ and `Distribute #18 `_. - -* zip_ok is now False by default. This closes `Old Setuptools #33 `_. - -* Fixed invalid URL error catching. `Old Setuptools #20 `_. - -* Fixed invalid bootstraping with easy_install installation (`Distribute #40 `_). - Thanks to Florian Schulze for the help. - -* Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - -bootstrapping -============= - -* The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes `Distribute #10 `_. - ---- -0.6 ---- - -setuptools -========== - -* Packages required at build time where not fully present at install time. - This closes `Distribute #12 `_. - -* Protected against failures in tarfile extraction. This closes `Distribute #10 `_. - -* Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 `_. - -* sandbox.py replaced builtin type file with builtin function open. This - closes `Distribute #6 `_. - -* Immediately close all file handles. This closes `Distribute #3 `_. - -* Added compatibility with Subversion 1.6. This references `Distribute #1 `_. - -pkg_resources -============= - -* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes issue `#5 `_. - -* Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes `Distribute #13 `_. - -* Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes `Distribute #9 `_. - -* Corrected inconsistency between documentation and code of add_entry. - This closes `Distribute #8 `_. - -* Immediately close all file handles. This closes `Distribute #3 `_. - -easy_install -============ - -* Immediately close all file handles. This closes `Distribute #3 `_. - ------ -0.6c9 ------ - - * Fixed a missing files problem when using Windows source distributions on - non-Windows platforms, due to distutils not handling manifest file line - endings correctly. - - * Updated Pyrex support to work with Pyrex 0.9.6 and higher. - - * Minor changes for Jython compatibility, including skipping tests that can't - work on Jython. - - * Fixed not installing eggs in ``install_requires`` if they were also used for - ``setup_requires`` or ``tests_require``. - - * Fixed not fetching eggs in ``install_requires`` when running tests. - - * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools - installations when called from a standalone ``setup.py``. - - * Added a warning if a namespace package is declared, but its parent package - is not also declared as a namespace. - - * Support Subversion 1.5 - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice - - * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's - ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. - - * Ensure that _full_name is set on all shared libs before extensions are - checked for shared lib usage. (Fixes a bug in the experimental shared - library build support.) - - * Fix to allow unpacked eggs containing native libraries to fail more - gracefully under Google App Engine (with an ``ImportError`` loading the - C-based module, instead of getting a ``NameError``). - ------ -0.6c7 ------ - - * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and - ``egg_info`` command failing on new, uncommitted SVN directories. - - * Fix import problems with nested namespace packages installed via - ``--root`` or ``--single-version-externally-managed``, due to the - parent package not having the child package as an attribute. - ------ -0.6c6 ------ - - * Added ``--egg-path`` option to ``develop`` command, allowing you to force - ``.egg-link`` files to use relative paths (allowing them to be shared across - platforms on a networked drive). - - * Fix not building binary RPMs correctly. - - * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with - bash-compatible shells. - - * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there - was whitespace inside a quoted argument or at the end of the ``#!`` line - (a regression introduced in 0.6c4). - - * Fix ``test`` command possibly failing if an older version of the project - being tested was installed on ``sys.path`` ahead of the test source - directory. - - * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in - their names as packages. - ------ -0.6c5 ------ - - * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` - packages under Python versions less than 2.5. - - * Fix uploaded ``bdist_wininst`` packages being described as suitable for - "any" version by Python 2.5, even if a ``--target-version`` was specified. - ------ -0.6c4 ------ - - * Overhauled Windows script wrapping to support ``bdist_wininst`` better. - Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or - ``#!pythonw.exe`` as the executable name (even when built on non-Windows - platforms!), and the wrappers will look for the executable in the script's - parent directory (which should find the right version of Python). - - * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or - ``bdist_wininst`` under Python 2.3 and 2.4. - - * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is - prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish - platforms. (This is mainly so that setuptools itself can have a single-file - installer on Unix, without doing multiple downloads, dealing with firewalls, - etc.) - - * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files - - * Use cross-platform relative paths in ``easy-install.pth`` when doing - ``develop`` and the source directory is a subdirectory of the installation - target directory. - - * Fix a problem installing eggs with a system packaging tool if the project - contained an implicit namespace package; for example if the ``setup()`` - listed a namespace package ``foo.bar`` without explicitly listing ``foo`` - as a namespace package. - ------ -0.6c3 ------ - - * Fixed breakages caused by Subversion 1.4's new "working copy" format - ------ -0.6c2 ------ - - * The ``ez_setup`` module displays the conflicting version of setuptools (and - its installation location) when a script requests a version that's not - available. - - * Running ``setup.py develop`` on a setuptools-using project will now install - setuptools if needed, instead of only downloading the egg. - ------ -0.6c1 ------ - - * Fixed ``AttributeError`` when trying to download a ``setup_requires`` - dependency when a distribution lacks a ``dependency_links`` setting. - - * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so - as to play better with packaging tools that complain about zero-length - files. - - * Made ``setup.py develop`` respect the ``--no-deps`` option, which it - previously was ignoring. - - * Support ``extra_path`` option to ``setup()`` when ``install`` is run in - backward-compatibility mode. - - * Source distributions now always include a ``setup.cfg`` file that explicitly - sets ``egg_info`` options such that they produce an identical version number - to the source distribution's version number. (Previously, the default - version number could be different due to the use of ``--tag-date``, or if - the version was overridden on the command line that built the source - distribution.) - ------ -0.6b4 ------ - - * Fix ``register`` not obeying name/version set by ``egg_info`` command, if - ``egg_info`` wasn't explicitly run first on the same command line. - - * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` - command, to allow suppressing tags configured in ``setup.cfg``. - - * Fixed redundant warnings about missing ``README`` file(s); it should now - appear only if you are actually a source distribution. - ------ -0.6b3 ------ - - * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. - - * Allow ``.py`` files found by the ``include_package_data`` option to be - automatically included. Remove duplicate data file matches if both - ``include_package_data`` and ``package_data`` are used to refer to the same - files. - ------ -0.6b1 ------ - - * Strip ``module`` from the end of compiled extension modules when computing - the name of a ``.py`` loader/wrapper. (Python's import machinery ignores - this suffix when searching for an extension module.) - ------- -0.6a11 ------- - - * Added ``test_loader`` keyword to support custom test loaders - - * Added ``setuptools.file_finders`` entry point group to allow implementing - revision control plugins. - - * Added ``--identity`` option to ``upload`` command. - - * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. - - * Enhanced test loader to scan packages as well as modules, and call - ``additional_tests()`` if present to get non-unittest tests. - - * Support namespace packages in conjunction with system packagers, by omitting - the installation of any ``__init__.py`` files for namespace packages, and - adding a special ``.pth`` file to create a working package in - ``sys.modules``. - - * Made ``--single-version-externally-managed`` automatic when ``--root`` is - used, so that most system packagers won't require special support for - setuptools. - - * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or - other configuration files for their option defaults when installing, and - also made the install use ``--multi-version`` mode so that the project - directory doesn't need to support .pth files. - - * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading - it. Previously, the file could be left open and the actual error would be - masked by problems trying to remove the open file on Windows systems. - ------- -0.6a10 ------- - - * Fixed the ``develop`` command ignoring ``--find-links``. - ------ -0.6a9 ------ - - * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to - create source distributions. ``MANIFEST.in`` is still read and processed, - as are the standard defaults and pruning. But the manifest is built inside - the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt - every time the ``egg_info`` command is run. - - * Added the ``include_package_data`` keyword to ``setup()``, allowing you to - automatically include any package data listed in revision control or - ``MANIFEST.in`` - - * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to - trim back files included via the ``package_data`` and - ``include_package_data`` options. - - * Fixed ``--tag-svn-revision`` not working when run from a source - distribution. - - * Added warning for namespace packages with missing ``declare_namespace()`` - - * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages - requiring ``nose`` to run unit tests can make this dependency optional - unless the ``test`` command is run. - - * Made all commands that use ``easy_install`` respect its configuration - options, as this was causing some problems with ``setup.py install``. - - * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so - that you can process a directory tree through a processing filter as if it - were a zipfile or tarfile. - - * Added an internal ``install_egg_info`` command to use as part of old-style - ``install`` operations, that installs an ``.egg-info`` directory with the - package. - - * Added a ``--single-version-externally-managed`` option to the ``install`` - command so that you can more easily wrap a "flat" egg in a system package. - - * Enhanced ``bdist_rpm`` so that it installs single-version eggs that - don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, - since all RPMs are now built in a more backwards-compatible format. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - - ------ -0.6a8 ------ - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - - * Made ``develop`` command accept all the same options as ``easy_install``, - and use the ``easy_install`` command's configuration settings as defaults. - - * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision - number from ``PKG-INFO`` in case it is being run on a source distribution of - a snapshot taken from a Subversion-based project. - - * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being - installed as data, adding them to ``native_libs.txt`` automatically. - - * Fixed some problems with fresh checkouts of projects that don't include - ``.egg-info/PKG-INFO`` under revision control and put the project's source - code directly in the project directory. If such a package had any - requirements that get processed before the ``egg_info`` command can be run, - the setup scripts would fail with a "Missing 'Version:' header and/or - PKG-INFO file" error, because the egg runtime interpreted the unbuilt - metadata in a directory on ``sys.path`` (i.e. the current directory) as - being a corrupted egg. Setuptools now monkeypatches the distribution - metadata cache to pretend that the egg has valid version information, until - it has a chance to make it actually be so (via the ``egg_info`` command). - ------ -0.6a5 ------ - - * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. - ------ -0.6a3 ------ - - * Added ``gui_scripts`` entry point group to allow installing GUI scripts - on Windows and other platforms. (The special handling is only for Windows; - other platforms are treated the same as for ``console_scripts``.) - ------ -0.6a2 ------ - - * Added ``console_scripts`` entry point group to allow installing scripts - without the need to create separate script files. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are written without a file extension. - ------ -0.6a1 ------ - - * Added support for building "old-style" RPMs that don't install an egg for - the target package, using a ``--no-egg`` option. - - * The ``build_ext`` command now works better when using the ``--inplace`` - option and multiple Python versions. It now makes sure that all extensions - match the current Python version, even if newer copies were built for a - different Python version. - - * The ``upload`` command no longer attaches an extra ``.zip`` when uploading - eggs, as PyPI now supports egg uploads without trickery. - - * The ``ez_setup`` script/module now displays a warning before downloading - the setuptools egg, and attempts to check the downloaded egg against an - internal MD5 checksum table. - - * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the - latest revision number; it was using the revision number of the directory - containing ``setup.py``, not the highest revision number in the project. - - * Added ``eager_resources`` setup argument - - * The ``sdist`` command now recognizes Subversion "deleted file" entries and - does not include them in source distributions. - - * ``setuptools`` now embeds itself more thoroughly into the distutils, so that - other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' - versions of things, rather than the native distutils ones. - - * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; - ``setup_requires`` allows you to automatically find and download packages - that are needed in order to *build* your project (as opposed to running it). - - * ``setuptools`` now finds its commands, ``setup()`` argument validators, and - metadata writers using entry points, so that they can be extended by - third-party packages. See `Creating distutils Extensions - `_ - for more details. - - * The vestigial ``depends`` command has been removed. It was never finished - or documented, and never would have worked without EasyInstall - which it - pre-dated and was never compatible with. - ------- -0.5a12 ------- - - * The zip-safety scanner now checks for modules that might be used with - ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't - handle ``-m`` on zipped modules. - ------- -0.5a11 ------- - - * Fix breakage of the "develop" command that was caused by the addition of - ``--always-unzip`` to the ``easy_install`` command. - ------ -0.5a9 ------ - - * Include ``svn:externals`` directories in source distributions as well as - normal subversion-controlled files and directories. - - * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` - - * Changed --tag-svn-revision to include an "r" in front of the revision number - for better readability. - - * Added ability to build eggs without including source files (except for any - scripts, of course), using the ``--exclude-source-files`` option to - ``bdist_egg``. - - * ``setup.py install`` now automatically detects when an "unmanaged" package - or module is going to be on ``sys.path`` ahead of a package being installed, - thereby preventing the newer version from being imported. If this occurs, - a warning message is output to ``sys.stderr``, but installation proceeds - anyway. The warning message informs the user what files or directories - need deleting, and advises them they can also use EasyInstall (with the - ``--delete-conflicting`` option) to do it automatically. - - * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata - directory that lists all top-level modules and packages in the distribution. - This is used by the ``easy_install`` command to find possibly-conflicting - "unmanaged" packages when installing the distribution. - - * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. - Added package analysis to determine zip-safety if the ``zip_safe`` flag - is not given, and advise the author regarding what code might need changing. - - * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. - ------ -0.5a8 ------ - - * The "egg_info" command now always sets the distribution metadata to "safe" - forms of the distribution name and version, so that distribution files will - be generated with parseable names (i.e., ones that don't include '-' in the - name or version). Also, this means that if you use the various ``--tag`` - options of "egg_info", any distributions generated will use the tags in the - version, not just egg distributions. - - * Added support for defining command aliases in distutils configuration files, - under the "[aliases]" section. To prevent recursion and to allow aliases to - call the command of the same name, a given alias can be expanded only once - per command-line invocation. You can define new aliases with the "alias" - command, either for the local, global, or per-user configuration. - - * Added "rotate" command to delete old distribution files, given a set of - patterns to match and the number of files to keep. (Keeps the most - recently-modified distribution files matching each pattern.) - - * Added "saveopts" command that saves all command-line options for the current - invocation to the local, global, or per-user configuration file. Useful for - setting defaults without having to hand-edit a configuration file. - - * Added a "setopt" command that sets a single option in a specified distutils - configuration file. - ------ -0.5a7 ------ - - * Added "upload" support for egg and source distributions, including a bug - fix for "upload" and a temporary workaround for lack of .egg support in - PyPI. - ------ -0.5a6 ------ - - * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it - will include all files under revision control (CVS or Subversion) in the - current directory, and it will regenerate the list every time you create a - source distribution, not just when you tell it to. This should make the - default "do what you mean" more often than the distutils' default behavior - did, while still retaining the old behavior in the presence of MANIFEST.in. - - * Fixed the "develop" command always updating .pth files, even if you - specified ``-n`` or ``--dry-run``. - - * Slightly changed the format of the generated version when you use - ``--tag-build`` on the "egg_info" command, so that you can make tagged - revisions compare *lower* than the version specified in setup.py (e.g. by - using ``--tag-build=dev``). - ------ -0.5a5 ------ - - * Added ``develop`` command to ``setuptools``-based packages. This command - installs an ``.egg-link`` pointing to the package's source directory, and - script wrappers that ``execfile()`` the source versions of the package's - scripts. This lets you put your development checkout(s) on sys.path without - having to actually install them. (To uninstall the link, use - use ``setup.py develop --uninstall``.) - - * Added ``egg_info`` command to ``setuptools``-based packages. This command - just creates or updates the "projectname.egg-info" directory, without - building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` - commands.) - - * Enhanced the ``test`` command so that it doesn't install the package, but - instead builds any C extensions in-place, updates the ``.egg-info`` - metadata, adds the source directory to ``sys.path``, and runs the tests - directly on the source. This avoids an "unmanaged" installation of the - package to ``site-packages`` or elsewhere. - - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - ------ -0.5a4 ------ - - * Setup scripts using setuptools can now list their dependencies directly in - the setup.py file, without having to manually create a ``depends.txt`` file. - The ``install_requires`` and ``extras_require`` arguments to ``setup()`` - are used to create a dependencies file automatically. If you are manually - creating ``depends.txt`` right now, please switch to using these setup - arguments as soon as practical, because ``depends.txt`` support will be - removed in the 0.6 release cycle. For documentation on the new arguments, - see the ``setuptools.dist.Distribution`` class. - - * Setup scripts using setuptools now always install using ``easy_install`` - internally, for ease of uninstallation and upgrading. - ------ -0.5a1 ------ - - * Added support for "self-installation" bootstrapping. Packages can now - include ``ez_setup.py`` in their source distribution, and add the following - to their ``setup.py``, in order to automatically bootstrap installation of - setuptools as part of their setup process:: - - from ez_setup import use_setuptools - use_setuptools() - - from setuptools import setup - # etc... - ------ -0.4a2 ------ - - * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools - installation easier, and to allow distributions using setuptools to avoid - having to include setuptools in their source distribution. - - * All downloads are now managed by the ``PackageIndex`` class (which is now - subclassable and replaceable), so that embedders can more easily override - download logic, give download progress reports, etc. The class has also - been moved to the new ``setuptools.package_index`` module. - - * The ``Installer`` class no longer handles downloading, manages a temporary - directory, or tracks the ``zip_ok`` option. Downloading is now handled - by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` - command class based on ``setuptools.Command``. - - * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup - script in a directory sandbox, and a new ``setuptools.archive_util`` module - with an ``unpack_archive()`` API. These were split out of EasyInstall to - allow reuse by other tools and applications. - - * ``setuptools.Command`` now supports reinitializing commands using keyword - arguments to set/reset options. Also, ``Command`` subclasses can now set - their ``command_consumes_arguments`` attribute to ``True`` in order to - receive an ``args`` option containing the rest of the command line. - ------ -0.3a2 ------ - - * Added new options to ``bdist_egg`` to allow tagging the egg's version number - with a subversion revision number, the current date, or an explicit tag - value. Run ``setup.py bdist_egg --help`` to get more information. - - * Misc. bug fixes - ------ -0.3a1 ------ - - * Initial release. diff --git a/libs/setuptools-2.2/CHANGES.txt b/libs/setuptools-2.2/CHANGES.txt deleted file mode 100644 index 1ead936..0000000 --- a/libs/setuptools-2.2/CHANGES.txt +++ /dev/null @@ -1,1578 +0,0 @@ -======= -CHANGES -======= - ---- -2.2 ---- - -* Issue #141: Restored fix for allowing setup_requires dependencies to - override installed dependencies during setup. -* Issue #128: Fixed issue where only the first dependency link was honored - in a distribution where multiple dependency links were supplied. - ------ -2.1.2 ------ - -* Issue #144: Read long_description using codecs module to avoid errors - installing on systems where LANG=C. - ------ -2.1.1 ------ - -* Issue #139: Fix regression in re_finder for CVS repos (and maybe Git repos - as well). - ---- -2.1 ---- - -* Issue #129: Suppress inspection of ``*.whl`` files when searching for files - in a zip-imported file. -* Issue #131: Fix RuntimeError when constructing an egg fetcher. - ------ -2.0.2 ------ - -* Fix NameError during installation with Python implementations (e.g. Jython) - not containing parser module. -* Fix NameError in ``sdist:re_finder``. - ------ -2.0.1 ------ - -* Issue #124: Fixed error in list detection in upload_docs. - ---- -2.0 ---- - -* Issue #121: Exempt lib2to3 pickled grammars from DirectorySandbox. -* Issue #41: Dropped support for Python 2.4 and Python 2.5. Clients requiring - setuptools for those versions of Python should use setuptools 1.x. -* Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients - expecting this boolean variable should use ``site.ENABLE_USER_SITE`` - instead. -* Removed ``pkg_resources.ImpWrapper``. Clients that expected this class - should use ``pkgutil.ImpImporter`` instead. - ------ -1.4.2 ------ - -* Issue #116: Correct TypeError when reading a local package index on Python - 3. - ------ -1.4.1 ------ - -* Issue #114: Use ``sys.getfilesystemencoding`` for decoding config in - ``bdist_wininst`` distributions. - -* Issue #105 and Issue #113: Establish a more robust technique for - determining the terminal encoding:: - - 1. Try ``getpreferredencoding`` - 2. If that returns US_ASCII or None, try the encoding from - ``getdefaultlocale``. If that encoding was a "fallback" because Python - could not figure it out from the environment or OS, encoding remains - unresolved. - 3. If the encoding is resolved, then make sure Python actually implements - the encoding. - 4. On the event of an error or unknown codec, revert to fallbacks - (UTF-8 on Darwin, ASCII on everything else). - 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was - a bug on older Python releases. - - On a side note, it would seem that the encoding only matters for when SVN - does not yet support ``--xml`` and when getting repository and svn version - numbers. The ``--xml`` technique should yield UTF-8 according to some - messages on the SVN mailing lists. So if the version numbers are always - 7-bit ASCII clean, it may be best to only support the file parsing methods - for legacy SVN releases and support for SVN without the subprocess command - would simple go away as support for the older SVNs does. - ---- -1.4 ---- - -* Issue #27: ``easy_install`` will now use credentials from .pypirc if - present for connecting to the package index. -* Pull Request #21: Omit unwanted newlines in ``package_index._encode_auth`` - when the username/password pair length indicates wrapping. - ------ -1.3.2 ------ - -* Issue #99: Fix filename encoding issues in SVN support. - ------ -1.3.1 ------ - -* Remove exuberant warning in SVN support when SVN is not used. - ---- -1.3 ---- - -* Address security vulnerability in SSL match_hostname check as reported in - Python #17997. -* Prefer `backports.ssl_match_hostname - `_ for backport - implementation if present. -* Correct NameError in ``ssl_support`` module (``socket.error``). - ---- -1.2 ---- - -* Issue #26: Add support for SVN 1.7. Special thanks to Philip Thiem for the - contribution. -* Issue #93: Wheels are now distributed with every release. Note that as - reported in Issue #108, as of Pip 1.4, scripts aren't installed properly - from wheels. Therefore, if using Pip to install setuptools from a wheel, - the ``easy_install`` command will not be available. -* Setuptools "natural" launcher support, introduced in 1.0, is now officially - supported. - ------ -1.1.7 ------ - -* Fixed behavior of NameError handling in 'script template (dev).py' (script - launcher for 'develop' installs). -* ``ez_setup.py`` now ensures partial downloads are cleaned up following - a failed download. -* Distribute #363 and Issue #55: Skip an sdist test that fails on locales - other than UTF-8. - ------ -1.1.6 ------ - -* Distribute #349: ``sandbox.execfile`` now opens the target file in binary - mode, thus honoring a BOM in the file when compiled. - ------ -1.1.5 ------ - -* Issue #69: Second attempt at fix (logic was reversed). - ------ -1.1.4 ------ - -* Issue #77: Fix error in upload command (Python 2.4). - ------ -1.1.3 ------ - -* Fix NameError in previous patch. - ------ -1.1.2 ------ - -* Issue #69: Correct issue where 404 errors are returned for URLs with - fragments in them (such as #egg=). - ------ -1.1.1 ------ - -* Issue #75: Add ``--insecure`` option to ez_setup.py to accommodate - environments where a trusted SSL connection cannot be validated. -* Issue #76: Fix AttributeError in upload command with Python 2.4. - ---- -1.1 ---- - -* Issue #71 (Distribute #333): EasyInstall now puts less emphasis on the - condition when a host is blocked via ``--allow-hosts``. -* Issue #72: Restored Python 2.4 compatibility in ``ez_setup.py``. - ---- -1.0 ---- - -* Issue #60: On Windows, Setuptools supports deferring to another launcher, - such as Vinay Sajip's `pylauncher `_ - (included with Python 3.3) to launch console and GUI scripts and not install - its own launcher executables. This experimental functionality is currently - only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to - "natural". In the future, this behavior may become default, but only after - it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` - also accepts "executable" to force the default behavior of creating launcher - executables. -* Issue #63: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or - wget for retrieving the Setuptools tarball for improved security of the - install. The script will still fall back to a simple ``urlopen`` on - platforms that do not have these tools. -* Issue #65: Deprecated the ``Features`` functionality. -* Issue #52: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) - connection. - -Backward-Incompatible Changes -============================= - -This release includes a couple of backward-incompatible changes, but most if -not all users will find 1.0 a drop-in replacement for 0.9. - -* Issue #50: Normalized API of environment marker support. Specifically, - removed line number and filename from SyntaxErrors when returned from - `pkg_resources.invalid_marker`. Any clients depending on the specific - string representation of exceptions returned by that function may need to - be updated to account for this change. -* Issue #50: SyntaxErrors generated by `pkg_resources.invalid_marker` are - normalized for cross-implementation consistency. -* Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` - options to easy_install. These options have been deprecated since 0.6a11. - ------ -0.9.8 ------ - -* Issue #53: Fix NameErrors in `_vcs_split_rev_from_url`. - ------ -0.9.7 ------ - -* Issue #49: Correct AttributeError on PyPy where a hashlib.HASH object does - not have a `.name` attribute. -* Issue #34: Documentation now refers to bootstrap script in code repository - referenced by bookmark. -* Add underscore-separated keys to environment markers (markerlib). - ------ -0.9.6 ------ - -* Issue #44: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` - attribute. - ------ -0.9.5 ------ - -* Python #17980: Fix security vulnerability in SSL certificate validation. - ------ -0.9.4 ------ - -* Issue #43: Fix issue (introduced in 0.9.1) with version resolution when - upgrading over other releases of Setuptools. - ------ -0.9.3 ------ - -* Issue #42: Fix new ``AttributeError`` introduced in last fix. - ------ -0.9.2 ------ - -* Issue #42: Fix regression where blank checksums would trigger an - ``AttributeError``. - ------ -0.9.1 ------ - -* Distribute #386: Allow other positional and keyword arguments to os.open. -* Corrected dependency on certifi mis-referenced in 0.9. - ---- -0.9 ---- - -* `package_index` now validates hashes other than MD5 in download links. - ---- -0.8 ---- - -* Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 - conversion. - ------ -0.7.8 ------ - -* Distribute #375: Yet another fix for yet another regression. - ------ -0.7.7 ------ - -* Distribute #375: Repair AttributeError created in last release (redo). -* Issue #30: Added test for get_cache_path. - ------ -0.7.6 ------ - -* Distribute #375: Repair AttributeError created in last release. - ------ -0.7.5 ------ - -* Issue #21: Restore Python 2.4 compatibility in ``test_easy_install``. -* Distribute #375: Merged additional warning from Distribute 0.6.46. -* Now honor the environment variable - ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now - deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. - ------ -0.7.4 ------ - -* Issue #20: Fix comparison of parsed SVN version on Python 3. - ------ -0.7.3 ------ - -* Issue #1: Disable installation of Windows-specific files on non-Windows systems. -* Use new sysconfig module with Python 2.7 or >=3.2. - ------ -0.7.2 ------ - -* Issue #14: Use markerlib when the `parser` module is not available. -* Issue #10: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. - ------ -0.7.1 ------ - -* Fix NameError (Issue #3) again - broken in bad merge. - ---- -0.7 ---- - -* Merged Setuptools and Distribute. See docs/merge.txt for details. - -Added several features that were slated for setuptools 0.6c12: - -* Index URL now defaults to HTTPS. -* Added experimental environment marker support. Now clients may designate a - PEP-426 environment marker for "extra" dependencies. Setuptools uses this - feature in ``setup.py`` for optional SSL and certificate validation support - on older platforms. Based on Distutils-SIG discussions, the syntax is - somewhat tentative. There should probably be a PEP with a firmer spec before - the feature should be considered suitable for use. -* Added support for SSL certificate validation when installing packages from - an HTTPS service. - ------ -0.7b4 ------ - -* Issue #3: Fixed NameError in SSL support. - ------- -0.6.49 ------- - -* Move warning check in ``get_cache_path`` to follow the directory creation - to avoid errors when the cache path does not yet exist. Fixes the error - reported in Distribute #375. - ------- -0.6.48 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46 (redo). - ------- -0.6.47 ------- - -* Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46. - ------- -0.6.46 ------- - -* Distribute #375: Issue a warning if the PYTHON_EGG_CACHE or otherwise - customized egg cache location specifies a directory that's group- or - world-writable. - ------- -0.6.45 ------- - -* Distribute #379: ``distribute_setup.py`` now traps VersionConflict as well, - restoring ability to upgrade from an older setuptools version. - ------- -0.6.44 ------- - -* ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to - satisfy use_setuptools. - ------- -0.6.43 ------- - -* Distribute #378: Restore support for Python 2.4 Syntax (regression in 0.6.42). - ------- -0.6.42 ------- - -* External links finder no longer yields duplicate links. -* Distribute #337: Moved site.py to setuptools/site-patch.py (graft of very old - patch from setuptools trunk which inspired PR #31). - ------- -0.6.41 ------- - -* Distribute #27: Use public api for loading resources from zip files rather than - the private method `_zip_directory_cache`. -* Added a new function ``easy_install.get_win_launcher`` which may be used by - third-party libraries such as buildout to get a suitable script launcher. - ------- -0.6.40 ------- - -* Distribute #376: brought back cli.exe and gui.exe that were deleted in the - previous release. - ------- -0.6.39 ------- - -* Add support for console launchers on ARM platforms. -* Fix possible issue in GUI launchers where the subsystem was not supplied to - the linker. -* Launcher build script now refactored for robustness. -* Distribute #375: Resources extracted from a zip egg to the file system now also - check the contents of the file against the zip contents during each - invocation of get_resource_filename. - ------- -0.6.38 ------- - -* Distribute #371: The launcher manifest file is now installed properly. - ------- -0.6.37 ------- - -* Distribute #143: Launcher scripts, including easy_install itself, are now - accompanied by a manifest on 32-bit Windows environments to avoid the - Installer Detection Technology and thus undesirable UAC elevation described - in `this Microsoft article - `_. - ------- -0.6.36 ------- - -* Pull Request #35: In Buildout #64, it was reported that - under Python 3, installation of distutils scripts could attempt to copy - the ``__pycache__`` directory as a file, causing an error, apparently only - under Windows. Easy_install now skips all directories when processing - metadata scripts. - ------- -0.6.35 ------- - - -Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in -how it parses version numbers. - -* Distribute #278: Restored compatibility with distribute 0.6.22 and setuptools - 0.6. Updated the documentation to match more closely with the version - parsing as intended in setuptools 0.6. - ------- -0.6.34 ------- - -* Distribute #341: 0.6.33 fails to build under Python 2.4. - ------- -0.6.33 ------- - -* Fix 2 errors with Jython 2.5. -* Fix 1 failure with Jython 2.5 and 2.7. -* Disable workaround for Jython scripts on Linux systems. -* Distribute #336: `setup.py` no longer masks failure exit code when tests fail. -* Fix issue in pkg_resources where try/except around a platform-dependent - import would trigger hook load failures on Mercurial. See pull request 32 - for details. -* Distribute #341: Fix a ResourceWarning. - ------- -0.6.32 ------- - -* Fix test suite with Python 2.6. -* Fix some DeprecationWarnings and ResourceWarnings. -* Distribute #335: Backed out `setup_requires` superceding installed requirements - until regression can be addressed. - ------- -0.6.31 ------- - -* Distribute #303: Make sure the manifest only ever contains UTF-8 in Python 3. -* Distribute #329: Properly close files created by tests for compatibility with - Jython. -* Work around Jython #1980 and Jython #1981. -* Distribute #334: Provide workaround for packages that reference `sys.__stdout__` - such as numpy does. This change should address - `virtualenv #359 `_ as long - as the system encoding is UTF-8 or the IO encoding is specified in the - environment, i.e.:: - - PYTHONIOENCODING=utf8 pip install numpy - -* Fix for encoding issue when installing from Windows executable on Python 3. -* Distribute #323: Allow `setup_requires` requirements to supercede installed - requirements. Added some new keyword arguments to existing pkg_resources - methods. Also had to updated how __path__ is handled for namespace packages - to ensure that when a new egg distribution containing a namespace package is - placed on sys.path, the entries in __path__ are found in the same order they - would have been in had that egg been on the path when pkg_resources was - first imported. - ------- -0.6.30 ------- - -* Distribute #328: Clean up temporary directories in distribute_setup.py. -* Fix fatal bug in distribute_setup.py. - ------- -0.6.29 ------- - -* Pull Request #14: Honor file permissions in zip files. -* Distribute #327: Merged pull request #24 to fix a dependency problem with pip. -* Merged pull request #23 to fix https://github.com/pypa/virtualenv/issues/301. -* If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` - to produce uploadable documentation. -* Distribute #326: `upload_docs` provided mangled auth credentials under Python 3. -* Distribute #320: Fix check for "createable" in distribute_setup.py. -* Distribute #305: Remove a warning that was triggered during normal operations. -* Distribute #311: Print metadata in UTF-8 independent of platform. -* Distribute #303: Read manifest file with UTF-8 encoding under Python 3. -* Distribute #301: Allow to run tests of namespace packages when using 2to3. -* Distribute #304: Prevent import loop in site.py under Python 3.3. -* Distribute #283: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. -* Distribute #299: The develop command didn't work on Python 3, when using 2to3, - as the egg link would go to the Python 2 source. Linking to the 2to3'd code - in build/lib makes it work, although you will have to rebuild the module - before testing it. -* Distribute #306: Even if 2to3 is used, we build in-place under Python 2. -* Distribute #307: Prints the full path when .svn/entries is broken. -* Distribute #313: Support for sdist subcommands (Python 2.7) -* Distribute #314: test_local_index() would fail an OS X. -* Distribute #310: Non-ascii characters in a namespace __init__.py causes errors. -* Distribute #218: Improved documentation on behavior of `package_data` and - `include_package_data`. Files indicated by `package_data` are now included - in the manifest. -* `distribute_setup.py` now allows a `--download-base` argument for retrieving - distribute from a specified location. - ------- -0.6.28 ------- - -* Distribute #294: setup.py can now be invoked from any directory. -* Scripts are now installed honoring the umask. -* Added support for .dist-info directories. -* Distribute #283: Fix and disable scanning of `*.pyc` / `*.pyo` files on - Python 3.3. - ------- -0.6.27 ------- - -* Support current snapshots of CPython 3.3. -* Distribute now recognizes README.rst as a standard, default readme file. -* Exclude 'encodings' modules when removing modules from sys.modules. - Workaround for #285. -* Distribute #231: Don't fiddle with system python when used with buildout - (bootstrap.py) - ------- -0.6.26 ------- - -* Distribute #183: Symlinked files are now extracted from source distributions. -* Distribute #227: Easy_install fetch parameters are now passed during the - installation of a source distribution; now fulfillment of setup_requires - dependencies will honor the parameters passed to easy_install. - ------- -0.6.25 ------- - -* Distribute #258: Workaround a cache issue -* Distribute #260: distribute_setup.py now accepts the --user parameter for - Python 2.6 and later. -* Distribute #262: package_index.open_with_auth no longer throws LookupError - on Python 3. -* Distribute #269: AttributeError when an exception occurs reading Manifest.in - on late releases of Python. -* Distribute #272: Prevent TypeError when namespace package names are unicode - and single-install-externally-managed is used. Also fixes PIP issue - 449. -* Distribute #273: Legacy script launchers now install with Python2/3 support. - ------- -0.6.24 ------- - -* Distribute #249: Added options to exclude 2to3 fixers - ------- -0.6.23 ------- - -* Distribute #244: Fixed a test -* Distribute #243: Fixed a test -* Distribute #239: Fixed a test -* Distribute #240: Fixed a test -* Distribute #241: Fixed a test -* Distribute #237: Fixed a test -* Distribute #238: easy_install now uses 64bit executable wrappers on 64bit Python -* Distribute #208: Fixed parsed_versions, it now honors post-releases as noted in the documentation -* Distribute #207: Windows cli and gui wrappers pass CTRL-C to child python process -* Distribute #227: easy_install now passes its arguments to setup.py bdist_egg -* Distribute #225: Fixed a NameError on Python 2.5, 2.4 - ------- -0.6.21 ------- - -* Distribute #225: FIxed a regression on py2.4 - ------- -0.6.20 ------- - -* Distribute #135: Include url in warning when processing URLs in package_index. -* Distribute #212: Fix issue where easy_instal fails on Python 3 on windows installer. -* Distribute #213: Fix typo in documentation. - ------- -0.6.19 ------- - -* Distribute #206: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' - ------- -0.6.18 ------- - -* Distribute #210: Fixed a regression introduced by Distribute #204 fix. - ------- -0.6.17 ------- - -* Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment - variable to allow to disable installation of easy_install-${version} script. -* Support Python >=3.1.4 and >=3.2.1. -* Distribute #204: Don't try to import the parent of a namespace package in - declare_namespace -* Distribute #196: Tolerate responses with multiple Content-Length headers -* Distribute #205: Sandboxing doesn't preserve working_set. Leads to setup_requires - problems. - ------- -0.6.16 ------- - -* Builds sdist gztar even on Windows (avoiding Distribute #193). -* Distribute #192: Fixed metadata omitted on Windows when package_dir - specified with forward-slash. -* Distribute #195: Cython build support. -* Distribute #200: Issues with recognizing 64-bit packages on Windows. - ------- -0.6.15 ------- - -* Fixed typo in bdist_egg -* Several issues under Python 3 has been solved. -* Distribute #146: Fixed missing DLL files after easy_install of windows exe package. - ------- -0.6.14 ------- - -* Distribute #170: Fixed unittest failure. Thanks to Toshio. -* Distribute #171: Fixed race condition in unittests cause deadlocks in test suite. -* Distribute #143: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. -* Distribute #174: Fixed the edit mode when its used with setuptools itself - ------- -0.6.13 ------- - -* Distribute #160: 2.7 gives ValueError("Invalid IPv6 URL") -* Distribute #150: Fixed using ~/.local even in a --no-site-packages virtualenv -* Distribute #163: scan index links before external links, and don't use the md5 when - comparing two distributions - ------- -0.6.12 ------- - -* Distribute #149: Fixed various failures on 2.3/2.4 - ------- -0.6.11 ------- - -* Found another case of SandboxViolation - fixed -* Distribute #15 and Distribute #48: Introduced a socket timeout of 15 seconds on url openings -* Added indexsidebar.html into MANIFEST.in -* Distribute #108: Fixed TypeError with Python3.1 -* Distribute #121: Fixed --help install command trying to actually install. -* Distribute #112: Added an os.makedirs so that Tarek's solution will work. -* Distribute #133: Added --no-find-links to easy_install -* Added easy_install --user -* Distribute #100: Fixed develop --user not taking '.' in PYTHONPATH into account -* Distribute #134: removed spurious UserWarnings. Patch by VanLindberg -* Distribute #138: cant_write_to_target error when setup_requires is used. -* Distribute #147: respect the sys.dont_write_bytecode flag - ------- -0.6.10 ------- - -* Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - ------ -0.6.9 ------ - -* Distribute #90: unknown setuptools version can be added in the working set -* Distribute #87: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. -* Distribute #89: added a side bar with a download link to the doc. -* Distribute #86: fixed missing sentence in pkg_resources doc. -* Added a nicer error message when a DistributionNotFound is raised. -* Distribute #80: test_develop now works with Python 3.1 -* Distribute #93: upload_docs now works if there is an empty sub-directory. -* Distribute #70: exec bit on non-exec files -* Distribute #99: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). -* Distribute #101: Allowing ``os.devnull`` in Sandbox -* Distribute #92: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) -* Distribute #103: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. -* Distribute #104: remvoved the assertion when the installation fails, - with a nicer message for the end user. -* Distribute #100: making sure there's no SandboxViolation when - the setup script patches setuptools. - ------ -0.6.8 ------ - -* Added "check_packages" in dist. (added in Setuptools 0.6c11) -* Fixed the DONT_PATCH_SETUPTOOLS state. - ------ -0.6.7 ------ - -* Distribute #58: Added --user support to the develop command -* Distribute #11: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" -* Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. -* Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. -* Distribute #21: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. -* Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. -* Distribute #64: use_setuptools no longer rebuilds the distribute egg every - time it is run -* use_setuptools now properly respects the requested version -* use_setuptools will no longer try to import a distribute egg for the - wrong Python version -* Distribute #74: no_fake should be True by default. -* Distribute #72: avoid a bootstrapping issue with easy_install -U - ------ -0.6.6 ------ - -* Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - ------ -0.6.5 ------ - -* Distribute #65: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - -* Distribute #67: Fixed doc typo (PEP 381/382) - -* Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - -* When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - -* Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - ------ -0.6.4 ------ - -* Added the generation of `distribute_setup_3k.py` during the release. - This closes Distribute #52. - -* Added an upload_docs command to easily upload project documentation to - PyPI's https://pythonhosted.org. This close issue Distribute #56. - -* Fixed a bootstrap bug on the use_setuptools() API. - ------ -0.6.3 ------ - -setuptools -========== - -* Fixed a bunch of calls to file() that caused crashes on Python 3. - -bootstrapping -============= - -* Fixed a bug in sorting that caused bootstrap to fail on Python 3. - ------ -0.6.2 ------ - -setuptools -========== - -* Added Python 3 support; see docs/python3.txt. - This closes Old Setuptools #39. - -* Added option to run 2to3 automatically when installing on Python 3. - This closes issue Distribute #31. - -* Fixed invalid usage of requirement.parse, that broke develop -d. - This closes Old Setuptools #44. - -* Fixed script launcher for 64-bit Windows. - This closes Old Setuptools #2. - -* KeyError when compiling extensions. - This closes Old Setuptools #41. - -bootstrapping -============= - -* Fixed bootstrap not working on Windows. This closes issue Distribute #49. - -* Fixed 2.6 dependencies. This closes issue Distribute #50. - -* Make sure setuptools is patched when running through easy_install - This closes Old Setuptools #40. - ------ -0.6.1 ------ - -setuptools -========== - -* package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes Distribute #16 and Distribute #18. - -* zip_ok is now False by default. This closes Old Setuptools #33. - -* Fixed invalid URL error catching. Old Setuptools #20. - -* Fixed invalid bootstraping with easy_install installation (Distribute #40). - Thanks to Florian Schulze for the help. - -* Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - -bootstrapping -============= - -* The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes Distribute #10. - ---- -0.6 ---- - -setuptools -========== - -* Packages required at build time where not fully present at install time. - This closes Distribute #12. - -* Protected against failures in tarfile extraction. This closes Distribute #10. - -* Made Jython api_tests.txt doctest compatible. This closes Distribute #7. - -* sandbox.py replaced builtin type file with builtin function open. This - closes Distribute #6. - -* Immediately close all file handles. This closes Distribute #3. - -* Added compatibility with Subversion 1.6. This references Distribute #1. - -pkg_resources -============= - -* Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes issue #5. - -* Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes Distribute #13. - -* Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes Distribute #9. - -* Corrected inconsistency between documentation and code of add_entry. - This closes Distribute #8. - -* Immediately close all file handles. This closes Distribute #3. - -easy_install -============ - -* Immediately close all file handles. This closes Distribute #3. - ------ -0.6c9 ------ - - * Fixed a missing files problem when using Windows source distributions on - non-Windows platforms, due to distutils not handling manifest file line - endings correctly. - - * Updated Pyrex support to work with Pyrex 0.9.6 and higher. - - * Minor changes for Jython compatibility, including skipping tests that can't - work on Jython. - - * Fixed not installing eggs in ``install_requires`` if they were also used for - ``setup_requires`` or ``tests_require``. - - * Fixed not fetching eggs in ``install_requires`` when running tests. - - * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools - installations when called from a standalone ``setup.py``. - - * Added a warning if a namespace package is declared, but its parent package - is not also declared as a namespace. - - * Support Subversion 1.5 - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice - - * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's - ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. - - * Ensure that _full_name is set on all shared libs before extensions are - checked for shared lib usage. (Fixes a bug in the experimental shared - library build support.) - - * Fix to allow unpacked eggs containing native libraries to fail more - gracefully under Google App Engine (with an ``ImportError`` loading the - C-based module, instead of getting a ``NameError``). - ------ -0.6c7 ------ - - * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and - ``egg_info`` command failing on new, uncommitted SVN directories. - - * Fix import problems with nested namespace packages installed via - ``--root`` or ``--single-version-externally-managed``, due to the - parent package not having the child package as an attribute. - ------ -0.6c6 ------ - - * Added ``--egg-path`` option to ``develop`` command, allowing you to force - ``.egg-link`` files to use relative paths (allowing them to be shared across - platforms on a networked drive). - - * Fix not building binary RPMs correctly. - - * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with - bash-compatible shells. - - * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there - was whitespace inside a quoted argument or at the end of the ``#!`` line - (a regression introduced in 0.6c4). - - * Fix ``test`` command possibly failing if an older version of the project - being tested was installed on ``sys.path`` ahead of the test source - directory. - - * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in - their names as packages. - ------ -0.6c5 ------ - - * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` - packages under Python versions less than 2.5. - - * Fix uploaded ``bdist_wininst`` packages being described as suitable for - "any" version by Python 2.5, even if a ``--target-version`` was specified. - ------ -0.6c4 ------ - - * Overhauled Windows script wrapping to support ``bdist_wininst`` better. - Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or - ``#!pythonw.exe`` as the executable name (even when built on non-Windows - platforms!), and the wrappers will look for the executable in the script's - parent directory (which should find the right version of Python). - - * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or - ``bdist_wininst`` under Python 2.3 and 2.4. - - * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is - prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish - platforms. (This is mainly so that setuptools itself can have a single-file - installer on Unix, without doing multiple downloads, dealing with firewalls, - etc.) - - * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files - - * Use cross-platform relative paths in ``easy-install.pth`` when doing - ``develop`` and the source directory is a subdirectory of the installation - target directory. - - * Fix a problem installing eggs with a system packaging tool if the project - contained an implicit namespace package; for example if the ``setup()`` - listed a namespace package ``foo.bar`` without explicitly listing ``foo`` - as a namespace package. - ------ -0.6c3 ------ - - * Fixed breakages caused by Subversion 1.4's new "working copy" format - ------ -0.6c2 ------ - - * The ``ez_setup`` module displays the conflicting version of setuptools (and - its installation location) when a script requests a version that's not - available. - - * Running ``setup.py develop`` on a setuptools-using project will now install - setuptools if needed, instead of only downloading the egg. - ------ -0.6c1 ------ - - * Fixed ``AttributeError`` when trying to download a ``setup_requires`` - dependency when a distribution lacks a ``dependency_links`` setting. - - * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so - as to play better with packaging tools that complain about zero-length - files. - - * Made ``setup.py develop`` respect the ``--no-deps`` option, which it - previously was ignoring. - - * Support ``extra_path`` option to ``setup()`` when ``install`` is run in - backward-compatibility mode. - - * Source distributions now always include a ``setup.cfg`` file that explicitly - sets ``egg_info`` options such that they produce an identical version number - to the source distribution's version number. (Previously, the default - version number could be different due to the use of ``--tag-date``, or if - the version was overridden on the command line that built the source - distribution.) - ------ -0.6b4 ------ - - * Fix ``register`` not obeying name/version set by ``egg_info`` command, if - ``egg_info`` wasn't explicitly run first on the same command line. - - * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` - command, to allow suppressing tags configured in ``setup.cfg``. - - * Fixed redundant warnings about missing ``README`` file(s); it should now - appear only if you are actually a source distribution. - ------ -0.6b3 ------ - - * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. - - * Allow ``.py`` files found by the ``include_package_data`` option to be - automatically included. Remove duplicate data file matches if both - ``include_package_data`` and ``package_data`` are used to refer to the same - files. - ------ -0.6b1 ------ - - * Strip ``module`` from the end of compiled extension modules when computing - the name of a ``.py`` loader/wrapper. (Python's import machinery ignores - this suffix when searching for an extension module.) - ------- -0.6a11 ------- - - * Added ``test_loader`` keyword to support custom test loaders - - * Added ``setuptools.file_finders`` entry point group to allow implementing - revision control plugins. - - * Added ``--identity`` option to ``upload`` command. - - * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. - - * Enhanced test loader to scan packages as well as modules, and call - ``additional_tests()`` if present to get non-unittest tests. - - * Support namespace packages in conjunction with system packagers, by omitting - the installation of any ``__init__.py`` files for namespace packages, and - adding a special ``.pth`` file to create a working package in - ``sys.modules``. - - * Made ``--single-version-externally-managed`` automatic when ``--root`` is - used, so that most system packagers won't require special support for - setuptools. - - * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or - other configuration files for their option defaults when installing, and - also made the install use ``--multi-version`` mode so that the project - directory doesn't need to support .pth files. - - * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading - it. Previously, the file could be left open and the actual error would be - masked by problems trying to remove the open file on Windows systems. - ------- -0.6a10 ------- - - * Fixed the ``develop`` command ignoring ``--find-links``. - ------ -0.6a9 ------ - - * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to - create source distributions. ``MANIFEST.in`` is still read and processed, - as are the standard defaults and pruning. But the manifest is built inside - the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt - every time the ``egg_info`` command is run. - - * Added the ``include_package_data`` keyword to ``setup()``, allowing you to - automatically include any package data listed in revision control or - ``MANIFEST.in`` - - * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to - trim back files included via the ``package_data`` and - ``include_package_data`` options. - - * Fixed ``--tag-svn-revision`` not working when run from a source - distribution. - - * Added warning for namespace packages with missing ``declare_namespace()`` - - * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages - requiring ``nose`` to run unit tests can make this dependency optional - unless the ``test`` command is run. - - * Made all commands that use ``easy_install`` respect its configuration - options, as this was causing some problems with ``setup.py install``. - - * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so - that you can process a directory tree through a processing filter as if it - were a zipfile or tarfile. - - * Added an internal ``install_egg_info`` command to use as part of old-style - ``install`` operations, that installs an ``.egg-info`` directory with the - package. - - * Added a ``--single-version-externally-managed`` option to the ``install`` - command so that you can more easily wrap a "flat" egg in a system package. - - * Enhanced ``bdist_rpm`` so that it installs single-version eggs that - don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, - since all RPMs are now built in a more backwards-compatible format. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - - ------ -0.6a8 ------ - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - - * Made ``develop`` command accept all the same options as ``easy_install``, - and use the ``easy_install`` command's configuration settings as defaults. - - * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision - number from ``PKG-INFO`` in case it is being run on a source distribution of - a snapshot taken from a Subversion-based project. - - * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being - installed as data, adding them to ``native_libs.txt`` automatically. - - * Fixed some problems with fresh checkouts of projects that don't include - ``.egg-info/PKG-INFO`` under revision control and put the project's source - code directly in the project directory. If such a package had any - requirements that get processed before the ``egg_info`` command can be run, - the setup scripts would fail with a "Missing 'Version:' header and/or - PKG-INFO file" error, because the egg runtime interpreted the unbuilt - metadata in a directory on ``sys.path`` (i.e. the current directory) as - being a corrupted egg. Setuptools now monkeypatches the distribution - metadata cache to pretend that the egg has valid version information, until - it has a chance to make it actually be so (via the ``egg_info`` command). - ------ -0.6a5 ------ - - * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. - ------ -0.6a3 ------ - - * Added ``gui_scripts`` entry point group to allow installing GUI scripts - on Windows and other platforms. (The special handling is only for Windows; - other platforms are treated the same as for ``console_scripts``.) - ------ -0.6a2 ------ - - * Added ``console_scripts`` entry point group to allow installing scripts - without the need to create separate script files. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are written without a file extension. - ------ -0.6a1 ------ - - * Added support for building "old-style" RPMs that don't install an egg for - the target package, using a ``--no-egg`` option. - - * The ``build_ext`` command now works better when using the ``--inplace`` - option and multiple Python versions. It now makes sure that all extensions - match the current Python version, even if newer copies were built for a - different Python version. - - * The ``upload`` command no longer attaches an extra ``.zip`` when uploading - eggs, as PyPI now supports egg uploads without trickery. - - * The ``ez_setup`` script/module now displays a warning before downloading - the setuptools egg, and attempts to check the downloaded egg against an - internal MD5 checksum table. - - * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the - latest revision number; it was using the revision number of the directory - containing ``setup.py``, not the highest revision number in the project. - - * Added ``eager_resources`` setup argument - - * The ``sdist`` command now recognizes Subversion "deleted file" entries and - does not include them in source distributions. - - * ``setuptools`` now embeds itself more thoroughly into the distutils, so that - other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' - versions of things, rather than the native distutils ones. - - * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; - ``setup_requires`` allows you to automatically find and download packages - that are needed in order to *build* your project (as opposed to running it). - - * ``setuptools`` now finds its commands, ``setup()`` argument validators, and - metadata writers using entry points, so that they can be extended by - third-party packages. See `Creating distutils Extensions - `_ - for more details. - - * The vestigial ``depends`` command has been removed. It was never finished - or documented, and never would have worked without EasyInstall - which it - pre-dated and was never compatible with. - ------- -0.5a12 ------- - - * The zip-safety scanner now checks for modules that might be used with - ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't - handle ``-m`` on zipped modules. - ------- -0.5a11 ------- - - * Fix breakage of the "develop" command that was caused by the addition of - ``--always-unzip`` to the ``easy_install`` command. - ------ -0.5a9 ------ - - * Include ``svn:externals`` directories in source distributions as well as - normal subversion-controlled files and directories. - - * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` - - * Changed --tag-svn-revision to include an "r" in front of the revision number - for better readability. - - * Added ability to build eggs without including source files (except for any - scripts, of course), using the ``--exclude-source-files`` option to - ``bdist_egg``. - - * ``setup.py install`` now automatically detects when an "unmanaged" package - or module is going to be on ``sys.path`` ahead of a package being installed, - thereby preventing the newer version from being imported. If this occurs, - a warning message is output to ``sys.stderr``, but installation proceeds - anyway. The warning message informs the user what files or directories - need deleting, and advises them they can also use EasyInstall (with the - ``--delete-conflicting`` option) to do it automatically. - - * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata - directory that lists all top-level modules and packages in the distribution. - This is used by the ``easy_install`` command to find possibly-conflicting - "unmanaged" packages when installing the distribution. - - * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. - Added package analysis to determine zip-safety if the ``zip_safe`` flag - is not given, and advise the author regarding what code might need changing. - - * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. - ------ -0.5a8 ------ - - * The "egg_info" command now always sets the distribution metadata to "safe" - forms of the distribution name and version, so that distribution files will - be generated with parseable names (i.e., ones that don't include '-' in the - name or version). Also, this means that if you use the various ``--tag`` - options of "egg_info", any distributions generated will use the tags in the - version, not just egg distributions. - - * Added support for defining command aliases in distutils configuration files, - under the "[aliases]" section. To prevent recursion and to allow aliases to - call the command of the same name, a given alias can be expanded only once - per command-line invocation. You can define new aliases with the "alias" - command, either for the local, global, or per-user configuration. - - * Added "rotate" command to delete old distribution files, given a set of - patterns to match and the number of files to keep. (Keeps the most - recently-modified distribution files matching each pattern.) - - * Added "saveopts" command that saves all command-line options for the current - invocation to the local, global, or per-user configuration file. Useful for - setting defaults without having to hand-edit a configuration file. - - * Added a "setopt" command that sets a single option in a specified distutils - configuration file. - ------ -0.5a7 ------ - - * Added "upload" support for egg and source distributions, including a bug - fix for "upload" and a temporary workaround for lack of .egg support in - PyPI. - ------ -0.5a6 ------ - - * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it - will include all files under revision control (CVS or Subversion) in the - current directory, and it will regenerate the list every time you create a - source distribution, not just when you tell it to. This should make the - default "do what you mean" more often than the distutils' default behavior - did, while still retaining the old behavior in the presence of MANIFEST.in. - - * Fixed the "develop" command always updating .pth files, even if you - specified ``-n`` or ``--dry-run``. - - * Slightly changed the format of the generated version when you use - ``--tag-build`` on the "egg_info" command, so that you can make tagged - revisions compare *lower* than the version specified in setup.py (e.g. by - using ``--tag-build=dev``). - ------ -0.5a5 ------ - - * Added ``develop`` command to ``setuptools``-based packages. This command - installs an ``.egg-link`` pointing to the package's source directory, and - script wrappers that ``execfile()`` the source versions of the package's - scripts. This lets you put your development checkout(s) on sys.path without - having to actually install them. (To uninstall the link, use - use ``setup.py develop --uninstall``.) - - * Added ``egg_info`` command to ``setuptools``-based packages. This command - just creates or updates the "projectname.egg-info" directory, without - building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` - commands.) - - * Enhanced the ``test`` command so that it doesn't install the package, but - instead builds any C extensions in-place, updates the ``.egg-info`` - metadata, adds the source directory to ``sys.path``, and runs the tests - directly on the source. This avoids an "unmanaged" installation of the - package to ``site-packages`` or elsewhere. - - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - ------ -0.5a4 ------ - - * Setup scripts using setuptools can now list their dependencies directly in - the setup.py file, without having to manually create a ``depends.txt`` file. - The ``install_requires`` and ``extras_require`` arguments to ``setup()`` - are used to create a dependencies file automatically. If you are manually - creating ``depends.txt`` right now, please switch to using these setup - arguments as soon as practical, because ``depends.txt`` support will be - removed in the 0.6 release cycle. For documentation on the new arguments, - see the ``setuptools.dist.Distribution`` class. - - * Setup scripts using setuptools now always install using ``easy_install`` - internally, for ease of uninstallation and upgrading. - ------ -0.5a1 ------ - - * Added support for "self-installation" bootstrapping. Packages can now - include ``ez_setup.py`` in their source distribution, and add the following - to their ``setup.py``, in order to automatically bootstrap installation of - setuptools as part of their setup process:: - - from ez_setup import use_setuptools - use_setuptools() - - from setuptools import setup - # etc... - ------ -0.4a2 ------ - - * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools - installation easier, and to allow distributions using setuptools to avoid - having to include setuptools in their source distribution. - - * All downloads are now managed by the ``PackageIndex`` class (which is now - subclassable and replaceable), so that embedders can more easily override - download logic, give download progress reports, etc. The class has also - been moved to the new ``setuptools.package_index`` module. - - * The ``Installer`` class no longer handles downloading, manages a temporary - directory, or tracks the ``zip_ok`` option. Downloading is now handled - by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` - command class based on ``setuptools.Command``. - - * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup - script in a directory sandbox, and a new ``setuptools.archive_util`` module - with an ``unpack_archive()`` API. These were split out of EasyInstall to - allow reuse by other tools and applications. - - * ``setuptools.Command`` now supports reinitializing commands using keyword - arguments to set/reset options. Also, ``Command`` subclasses can now set - their ``command_consumes_arguments`` attribute to ``True`` in order to - receive an ``args`` option containing the rest of the command line. - ------ -0.3a2 ------ - - * Added new options to ``bdist_egg`` to allow tagging the egg's version number - with a subversion revision number, the current date, or an explicit tag - value. Run ``setup.py bdist_egg --help`` to get more information. - - * Misc. bug fixes - ------ -0.3a1 ------ - - * Initial release. diff --git a/libs/setuptools-2.2/CONTRIBUTORS.txt b/libs/setuptools-2.2/CONTRIBUTORS.txt deleted file mode 100644 index dd0b8c7..0000000 --- a/libs/setuptools-2.2/CONTRIBUTORS.txt +++ /dev/null @@ -1,36 +0,0 @@ -============ -Contributors -============ - -* Alex Grönholm -* Alice Bevan-McGregor -* Arfrever Frehtes Taifersar Arahesis -* Christophe Combelles -* Daniel Stutzbach -* Daniel Holth -* Dirley Rodrigues -* Donald Stufft -* Grigory Petrov -* Hanno Schlichting -* Jannis Leidel -* Jason R. Coombs -* Jim Fulton -* Jonathan Lange -* Justin Azoff -* Lennart Regebro -* Marc Abramowitz -* Martin von Löwis -* Noufal Ibrahim -* Pedro Algarvio -* Pete Hollobon -* Phillip J. Eby -* Philip Jenvey -* Philip Thiem -* Reinout van Rees -* Robert Myers -* Stefan H. Holek -* Tarek Ziadé -* Toshio Kuratomi - -If you think you name is missing, please add it (alpha order by first name) - diff --git a/libs/setuptools-2.2/DEVGUIDE.txt b/libs/setuptools-2.2/DEVGUIDE.txt deleted file mode 100644 index f96d811..0000000 --- a/libs/setuptools-2.2/DEVGUIDE.txt +++ /dev/null @@ -1,22 +0,0 @@ -============================ -Quick notes for contributors -============================ - -Setuptools is developed using the DVCS Mercurial. - -Grab the code at bitbucket:: - - $ hg clone https://bitbucket.org/pypa/setuptools - -If you want to contribute changes, we recommend you fork the repository on -bitbucket, commit the changes to your repository, and then make a pull request -on bitbucket. If you make some changes, don't forget to: - -- add a note in CHANGES.txt - -Please commit bug-fixes against the current maintenance branch and new -features to the default branch. - -You can run the tests via:: - - $ python setup.py test diff --git a/libs/setuptools-2.2/MANIFEST.in b/libs/setuptools-2.2/MANIFEST.in deleted file mode 100644 index 76822cb..0000000 --- a/libs/setuptools-2.2/MANIFEST.in +++ /dev/null @@ -1,10 +0,0 @@ -recursive-include setuptools *.py *.txt *.exe *.xml -recursive-include tests *.py *.c *.pyx *.txt -recursive-include setuptools/tests *.html entries* -recursive-include setuptools/tests/svn_data *.zip -recursive-include docs *.py *.txt *.conf *.css *.css_t Makefile indexsidebar.html -recursive-include _markerlib *.py -include *.py -include *.txt -include MANIFEST.in -include launcher.c diff --git a/libs/setuptools-2.2/PKG-INFO b/libs/setuptools-2.2/PKG-INFO deleted file mode 100644 index 816326d..0000000 --- a/libs/setuptools-2.2/PKG-INFO +++ /dev/null @@ -1,1793 +0,0 @@ -Metadata-Version: 1.1 -Name: setuptools -Version: 2.2 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://pypi.python.org/pypi/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: PSF or ZPL -Description: =============================== - Installing and Using Setuptools - =============================== - - .. contents:: **Table of Contents** - - - ------------------------- - Installation Instructions - ------------------------- - - Upgrading from Distribute - ========================= - - Currently, Distribute disallows installing Setuptools 0.7+ over Distribute. - You must first uninstall any active version of Distribute first (see - `Uninstalling`_). - - Upgrading from Setuptools 0.6 - ============================= - - Upgrading from prior versions of Setuptools is supported. Initial reports - good success in this regard. - - Windows - ======= - - The recommended way to install setuptools on Windows is to download - `ez_setup.py`_ and run it. The script will download the appropriate .egg - file and install it for you. - - .. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - - For best results, uninstall previous versions FIRST (see `Uninstalling`_). - - Once installation is complete, you will find an ``easy_install`` program in - your Python ``Scripts`` subdirectory. For simple invocation and best results, - add this directory to your ``PATH`` environment variable, if it is not already - present. - - - Unix-based Systems including Mac OS X - ===================================== - - Download `ez_setup.py`_ and run it using the target Python version. The script - will download the appropriate version and install it for you:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python - - Note that you will may need to invoke the command with superuser privileges to - install to the system Python:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | sudo python - - Alternatively, on Python 2.6 and later, Setuptools may be installed to a - user-local path:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - > python ez_setup.py --user - - - Python 2.4 and Python 2.5 support - ================================= - - Setuptools 2.0 and later requires Python 2.6 or later. To install setuptools - on Python 2.4 or Python 2.5, use the bootstrap script for Setuptools 1.x: - https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py. - - - Advanced Installation - ===================== - - For more advanced installation options, such as installing to custom - locations or prefixes, download and extract the source - tarball from `Setuptools on PyPI `_ - and run setup.py with any supported distutils and Setuptools options. - For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - - Use ``--help`` to get a full options list, but we recommend consulting - the `EasyInstall manual`_ for detailed instructions, especially `the section - on custom installation locations`_. - - .. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall - .. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - - Downloads - ========= - - All setuptools downloads can be found at `the project's home page in the Python - Package Index`_. Scroll to the very bottom of the page to find the links. - - .. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - - In addition to the PyPI downloads, the development version of ``setuptools`` - is available from the `Bitbucket repo`_, and in-development versions of the - `0.6 branch`_ are available as well. - - .. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev - .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - - Uninstalling - ============ - - On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` - installer, simply use the uninstall feature of "Add/Remove Programs" in the - Control Panel. - - Otherwise, to uninstall Setuptools or Distribute, regardless of the Python - version, delete all ``setuptools*`` and ``distribute*`` files and - directories from your system's ``site-packages`` directory - (and any other ``sys.path`` directories) FIRST. - - If you are upgrading or otherwise plan to re-install Setuptools or Distribute, - nothing further needs to be done. If you want to completely remove Setuptools, - you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts - and associated executables installed to the Python scripts directory. - - -------------------------------- - Using Setuptools and EasyInstall - -------------------------------- - - Here are some of the available manuals, tutorials, and other resources for - learning about Setuptools, Python Eggs, and EasyInstall: - - * `The EasyInstall user's guide and reference manual`_ - * `The setuptools Developer's Guide`_ - * `The pkg_resources API reference`_ - * `Package Compatibility Notes`_ (user-maintained) - * `The Internal Structure of Python Eggs`_ - - Questions, comments, and bug reports should be directed to the `distutils-sig - mailing list`_. If you have written (or know of) any tutorials, documentation, - plug-ins, or other resources for setuptools users, please let us know about - them there, so this reference list can be updated. If you have working, - *tested* patches to correct problems or add features, you may submit them to - the `setuptools bug tracker`_. - - .. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues - .. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes - .. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html - .. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html - .. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html - .. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html - .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - - ------- - Credits - ------- - - * The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - - * Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - - * Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - - * Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - - * Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - - * Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - - * Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - - .. _files: - - ======= - CHANGES - ======= - - --- - 2.2 - --- - - * `Issue #141 `_: Restored fix for allowing setup_requires dependencies to - override installed dependencies during setup. - * `Issue #128 `_: Fixed issue where only the first dependency link was honored - in a distribution where multiple dependency links were supplied. - - ----- - 2.1.2 - ----- - - * `Issue #144 `_: Read long_description using codecs module to avoid errors - installing on systems where LANG=C. - - ----- - 2.1.1 - ----- - - * `Issue #139 `_: Fix regression in re_finder for CVS repos (and maybe Git repos - as well). - - --- - 2.1 - --- - - * `Issue #129 `_: Suppress inspection of ``*.whl`` files when searching for files - in a zip-imported file. - * `Issue #131 `_: Fix RuntimeError when constructing an egg fetcher. - - ----- - 2.0.2 - ----- - - * Fix NameError during installation with Python implementations (e.g. Jython) - not containing parser module. - * Fix NameError in ``sdist:re_finder``. - - ----- - 2.0.1 - ----- - - * `Issue #124 `_: Fixed error in list detection in upload_docs. - - --- - 2.0 - --- - - * `Issue #121 `_: Exempt lib2to3 pickled grammars from DirectorySandbox. - * `Issue #41 `_: Dropped support for Python 2.4 and Python 2.5. Clients requiring - setuptools for those versions of Python should use setuptools 1.x. - * Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients - expecting this boolean variable should use ``site.ENABLE_USER_SITE`` - instead. - * Removed ``pkg_resources.ImpWrapper``. Clients that expected this class - should use ``pkgutil.ImpImporter`` instead. - - ----- - 1.4.2 - ----- - - * `Issue #116 `_: Correct TypeError when reading a local package index on Python - 3. - - ----- - 1.4.1 - ----- - - * `Issue #114 `_: Use ``sys.getfilesystemencoding`` for decoding config in - ``bdist_wininst`` distributions. - - * `Issue #105 `_ and `Issue #113 `_: Establish a more robust technique for - determining the terminal encoding:: - - 1. Try ``getpreferredencoding`` - 2. If that returns US_ASCII or None, try the encoding from - ``getdefaultlocale``. If that encoding was a "fallback" because Python - could not figure it out from the environment or OS, encoding remains - unresolved. - 3. If the encoding is resolved, then make sure Python actually implements - the encoding. - 4. On the event of an error or unknown codec, revert to fallbacks - (UTF-8 on Darwin, ASCII on everything else). - 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was - a bug on older Python releases. - - On a side note, it would seem that the encoding only matters for when SVN - does not yet support ``--xml`` and when getting repository and svn version - numbers. The ``--xml`` technique should yield UTF-8 according to some - messages on the SVN mailing lists. So if the version numbers are always - 7-bit ASCII clean, it may be best to only support the file parsing methods - for legacy SVN releases and support for SVN without the subprocess command - would simple go away as support for the older SVNs does. - - --- - 1.4 - --- - - * `Issue #27 `_: ``easy_install`` will now use credentials from .pypirc if - present for connecting to the package index. - * `Pull Request #21 `_: Omit unwanted newlines in ``package_index._encode_auth`` - when the username/password pair length indicates wrapping. - - ----- - 1.3.2 - ----- - - * `Issue #99 `_: Fix filename encoding issues in SVN support. - - ----- - 1.3.1 - ----- - - * Remove exuberant warning in SVN support when SVN is not used. - - --- - 1.3 - --- - - * Address security vulnerability in SSL match_hostname check as reported in - `Python #17997 `_. - * Prefer `backports.ssl_match_hostname - `_ for backport - implementation if present. - * Correct NameError in ``ssl_support`` module (``socket.error``). - - --- - 1.2 - --- - - * `Issue #26 `_: Add support for SVN 1.7. Special thanks to Philip Thiem for the - contribution. - * `Issue #93 `_: Wheels are now distributed with every release. Note that as - reported in `Issue #108 `_, as of Pip 1.4, scripts aren't installed properly - from wheels. Therefore, if using Pip to install setuptools from a wheel, - the ``easy_install`` command will not be available. - * Setuptools "natural" launcher support, introduced in 1.0, is now officially - supported. - - ----- - 1.1.7 - ----- - - * Fixed behavior of NameError handling in 'script template (dev).py' (script - launcher for 'develop' installs). - * ``ez_setup.py`` now ensures partial downloads are cleaned up following - a failed download. - * `Distribute #363 `_ and `Issue #55 `_: Skip an sdist test that fails on locales - other than UTF-8. - - ----- - 1.1.6 - ----- - - * `Distribute #349 `_: ``sandbox.execfile`` now opens the target file in binary - mode, thus honoring a BOM in the file when compiled. - - ----- - 1.1.5 - ----- - - * `Issue #69 `_: Second attempt at fix (logic was reversed). - - ----- - 1.1.4 - ----- - - * `Issue #77 `_: Fix error in upload command (Python 2.4). - - ----- - 1.1.3 - ----- - - * Fix NameError in previous patch. - - ----- - 1.1.2 - ----- - - * `Issue #69 `_: Correct issue where 404 errors are returned for URLs with - fragments in them (such as #egg=). - - ----- - 1.1.1 - ----- - - * `Issue #75 `_: Add ``--insecure`` option to ez_setup.py to accommodate - environments where a trusted SSL connection cannot be validated. - * `Issue #76 `_: Fix AttributeError in upload command with Python 2.4. - - --- - 1.1 - --- - - * `Issue #71 `_ (`Distribute #333 `_): EasyInstall now puts less emphasis on the - condition when a host is blocked via ``--allow-hosts``. - * `Issue #72 `_: Restored Python 2.4 compatibility in ``ez_setup.py``. - - --- - 1.0 - --- - - * `Issue #60 `_: On Windows, Setuptools supports deferring to another launcher, - such as Vinay Sajip's `pylauncher `_ - (included with Python 3.3) to launch console and GUI scripts and not install - its own launcher executables. This experimental functionality is currently - only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to - "natural". In the future, this behavior may become default, but only after - it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` - also accepts "executable" to force the default behavior of creating launcher - executables. - * `Issue #63 `_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or - wget for retrieving the Setuptools tarball for improved security of the - install. The script will still fall back to a simple ``urlopen`` on - platforms that do not have these tools. - * `Issue #65 `_: Deprecated the ``Features`` functionality. - * `Issue #52 `_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) - connection. - - Backward-Incompatible Changes - ============================= - - This release includes a couple of backward-incompatible changes, but most if - not all users will find 1.0 a drop-in replacement for 0.9. - - * `Issue #50 `_: Normalized API of environment marker support. Specifically, - removed line number and filename from SyntaxErrors when returned from - `pkg_resources.invalid_marker`. Any clients depending on the specific - string representation of exceptions returned by that function may need to - be updated to account for this change. - * `Issue #50 `_: SyntaxErrors generated by `pkg_resources.invalid_marker` are - normalized for cross-implementation consistency. - * Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` - options to easy_install. These options have been deprecated since 0.6a11. - - ----- - 0.9.8 - ----- - - * `Issue #53 `_: Fix NameErrors in `_vcs_split_rev_from_url`. - - ----- - 0.9.7 - ----- - - * `Issue #49 `_: Correct AttributeError on PyPy where a hashlib.HASH object does - not have a `.name` attribute. - * `Issue #34 `_: Documentation now refers to bootstrap script in code repository - referenced by bookmark. - * Add underscore-separated keys to environment markers (markerlib). - - ----- - 0.9.6 - ----- - - * `Issue #44 `_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` - attribute. - - ----- - 0.9.5 - ----- - - * `Python #17980 `_: Fix security vulnerability in SSL certificate validation. - - ----- - 0.9.4 - ----- - - * `Issue #43 `_: Fix issue (introduced in 0.9.1) with version resolution when - upgrading over other releases of Setuptools. - - ----- - 0.9.3 - ----- - - * `Issue #42 `_: Fix new ``AttributeError`` introduced in last fix. - - ----- - 0.9.2 - ----- - - * `Issue #42 `_: Fix regression where blank checksums would trigger an - ``AttributeError``. - - ----- - 0.9.1 - ----- - - * `Distribute #386 `_: Allow other positional and keyword arguments to os.open. - * Corrected dependency on certifi mis-referenced in 0.9. - - --- - 0.9 - --- - - * `package_index` now validates hashes other than MD5 in download links. - - --- - 0.8 - --- - - * Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 - conversion. - - ----- - 0.7.8 - ----- - - * `Distribute #375 `_: Yet another fix for yet another regression. - - ----- - 0.7.7 - ----- - - * `Distribute #375 `_: Repair AttributeError created in last release (redo). - * `Issue #30 `_: Added test for get_cache_path. - - ----- - 0.7.6 - ----- - - * `Distribute #375 `_: Repair AttributeError created in last release. - - ----- - 0.7.5 - ----- - - * `Issue #21 `_: Restore Python 2.4 compatibility in ``test_easy_install``. - * `Distribute #375 `_: Merged additional warning from Distribute 0.6.46. - * Now honor the environment variable - ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now - deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. - - ----- - 0.7.4 - ----- - - * `Issue #20 `_: Fix comparison of parsed SVN version on Python 3. - - ----- - 0.7.3 - ----- - - * `Issue #1 `_: Disable installation of Windows-specific files on non-Windows systems. - * Use new sysconfig module with Python 2.7 or >=3.2. - - ----- - 0.7.2 - ----- - - * `Issue #14 `_: Use markerlib when the `parser` module is not available. - * `Issue #10 `_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. - - ----- - 0.7.1 - ----- - - * Fix NameError (`Issue #3 `_) again - broken in bad merge. - - --- - 0.7 - --- - - * Merged Setuptools and Distribute. See docs/merge.txt for details. - - Added several features that were slated for setuptools 0.6c12: - - * Index URL now defaults to HTTPS. - * Added experimental environment marker support. Now clients may designate a - PEP-426 environment marker for "extra" dependencies. Setuptools uses this - feature in ``setup.py`` for optional SSL and certificate validation support - on older platforms. Based on Distutils-SIG discussions, the syntax is - somewhat tentative. There should probably be a PEP with a firmer spec before - the feature should be considered suitable for use. - * Added support for SSL certificate validation when installing packages from - an HTTPS service. - - ----- - 0.7b4 - ----- - - * `Issue #3 `_: Fixed NameError in SSL support. - - ------ - 0.6.49 - ------ - - * Move warning check in ``get_cache_path`` to follow the directory creation - to avoid errors when the cache path does not yet exist. Fixes the error - reported in `Distribute #375 `_. - - ------ - 0.6.48 - ------ - - * Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46 (redo). - - ------ - 0.6.47 - ------ - - * Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46. - - ------ - 0.6.46 - ------ - - * `Distribute #375 `_: Issue a warning if the PYTHON_EGG_CACHE or otherwise - customized egg cache location specifies a directory that's group- or - world-writable. - - ------ - 0.6.45 - ------ - - * `Distribute #379 `_: ``distribute_setup.py`` now traps VersionConflict as well, - restoring ability to upgrade from an older setuptools version. - - ------ - 0.6.44 - ------ - - * ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to - satisfy use_setuptools. - - ------ - 0.6.43 - ------ - - * `Distribute #378 `_: Restore support for Python 2.4 Syntax (regression in 0.6.42). - - ------ - 0.6.42 - ------ - - * External links finder no longer yields duplicate links. - * `Distribute #337 `_: Moved site.py to setuptools/site-patch.py (graft of very old - patch from setuptools trunk which inspired PR `#31 `_). - - ------ - 0.6.41 - ------ - - * `Distribute #27 `_: Use public api for loading resources from zip files rather than - the private method `_zip_directory_cache`. - * Added a new function ``easy_install.get_win_launcher`` which may be used by - third-party libraries such as buildout to get a suitable script launcher. - - ------ - 0.6.40 - ------ - - * `Distribute #376 `_: brought back cli.exe and gui.exe that were deleted in the - previous release. - - ------ - 0.6.39 - ------ - - * Add support for console launchers on ARM platforms. - * Fix possible issue in GUI launchers where the subsystem was not supplied to - the linker. - * Launcher build script now refactored for robustness. - * `Distribute #375 `_: Resources extracted from a zip egg to the file system now also - check the contents of the file against the zip contents during each - invocation of get_resource_filename. - - ------ - 0.6.38 - ------ - - * `Distribute #371 `_: The launcher manifest file is now installed properly. - - ------ - 0.6.37 - ------ - - * `Distribute #143 `_: Launcher scripts, including easy_install itself, are now - accompanied by a manifest on 32-bit Windows environments to avoid the - Installer Detection Technology and thus undesirable UAC elevation described - in `this Microsoft article - `_. - - ------ - 0.6.36 - ------ - - * `Pull Request #35 `_: In `Buildout #64 `_, it was reported that - under Python 3, installation of distutils scripts could attempt to copy - the ``__pycache__`` directory as a file, causing an error, apparently only - under Windows. Easy_install now skips all directories when processing - metadata scripts. - - ------ - 0.6.35 - ------ - - - Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in - how it parses version numbers. - - * `Distribute #278 `_: Restored compatibility with distribute 0.6.22 and setuptools - 0.6. Updated the documentation to match more closely with the version - parsing as intended in setuptools 0.6. - - ------ - 0.6.34 - ------ - - * `Distribute #341 `_: 0.6.33 fails to build under Python 2.4. - - ------ - 0.6.33 - ------ - - * Fix 2 errors with Jython 2.5. - * Fix 1 failure with Jython 2.5 and 2.7. - * Disable workaround for Jython scripts on Linux systems. - * `Distribute #336 `_: `setup.py` no longer masks failure exit code when tests fail. - * Fix issue in pkg_resources where try/except around a platform-dependent - import would trigger hook load failures on Mercurial. See pull request 32 - for details. - * `Distribute #341 `_: Fix a ResourceWarning. - - ------ - 0.6.32 - ------ - - * Fix test suite with Python 2.6. - * Fix some DeprecationWarnings and ResourceWarnings. - * `Distribute #335 `_: Backed out `setup_requires` superceding installed requirements - until regression can be addressed. - - ------ - 0.6.31 - ------ - - * `Distribute #303 `_: Make sure the manifest only ever contains UTF-8 in Python 3. - * `Distribute #329 `_: Properly close files created by tests for compatibility with - Jython. - * Work around `Jython #1980 `_ and `Jython #1981 `_. - * `Distribute #334 `_: Provide workaround for packages that reference `sys.__stdout__` - such as numpy does. This change should address - `virtualenv `#359 `_ `_ as long - as the system encoding is UTF-8 or the IO encoding is specified in the - environment, i.e.:: - - PYTHONIOENCODING=utf8 pip install numpy - - * Fix for encoding issue when installing from Windows executable on Python 3. - * `Distribute #323 `_: Allow `setup_requires` requirements to supercede installed - requirements. Added some new keyword arguments to existing pkg_resources - methods. Also had to updated how __path__ is handled for namespace packages - to ensure that when a new egg distribution containing a namespace package is - placed on sys.path, the entries in __path__ are found in the same order they - would have been in had that egg been on the path when pkg_resources was - first imported. - - ------ - 0.6.30 - ------ - - * `Distribute #328 `_: Clean up temporary directories in distribute_setup.py. - * Fix fatal bug in distribute_setup.py. - - ------ - 0.6.29 - ------ - - * `Pull Request #14 `_: Honor file permissions in zip files. - * `Distribute #327 `_: Merged pull request `#24 `_ to fix a dependency problem with pip. - * Merged pull request `#23 `_ to fix https://github.com/pypa/virtualenv/issues/301. - * If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` - to produce uploadable documentation. - * `Distribute #326 `_: `upload_docs` provided mangled auth credentials under Python 3. - * `Distribute #320 `_: Fix check for "createable" in distribute_setup.py. - * `Distribute #305 `_: Remove a warning that was triggered during normal operations. - * `Distribute #311 `_: Print metadata in UTF-8 independent of platform. - * `Distribute #303 `_: Read manifest file with UTF-8 encoding under Python 3. - * `Distribute #301 `_: Allow to run tests of namespace packages when using 2to3. - * `Distribute #304 `_: Prevent import loop in site.py under Python 3.3. - * `Distribute #283 `_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. - * `Distribute #299 `_: The develop command didn't work on Python 3, when using 2to3, - as the egg link would go to the Python 2 source. Linking to the 2to3'd code - in build/lib makes it work, although you will have to rebuild the module - before testing it. - * `Distribute #306 `_: Even if 2to3 is used, we build in-place under Python 2. - * `Distribute #307 `_: Prints the full path when .svn/entries is broken. - * `Distribute #313 `_: Support for sdist subcommands (Python 2.7) - * `Distribute #314 `_: test_local_index() would fail an OS X. - * `Distribute #310 `_: Non-ascii characters in a namespace __init__.py causes errors. - * `Distribute #218 `_: Improved documentation on behavior of `package_data` and - `include_package_data`. Files indicated by `package_data` are now included - in the manifest. - * `distribute_setup.py` now allows a `--download-base` argument for retrieving - distribute from a specified location. - - ------ - 0.6.28 - ------ - - * `Distribute #294 `_: setup.py can now be invoked from any directory. - * Scripts are now installed honoring the umask. - * Added support for .dist-info directories. - * `Distribute #283 `_: Fix and disable scanning of `*.pyc` / `*.pyo` files on - Python 3.3. - - ------ - 0.6.27 - ------ - - * Support current snapshots of CPython 3.3. - * Distribute now recognizes README.rst as a standard, default readme file. - * Exclude 'encodings' modules when removing modules from sys.modules. - Workaround for `#285 `_. - * `Distribute #231 `_: Don't fiddle with system python when used with buildout - (bootstrap.py) - - ------ - 0.6.26 - ------ - - * `Distribute #183 `_: Symlinked files are now extracted from source distributions. - * `Distribute #227 `_: Easy_install fetch parameters are now passed during the - installation of a source distribution; now fulfillment of setup_requires - dependencies will honor the parameters passed to easy_install. - - ------ - 0.6.25 - ------ - - * `Distribute #258 `_: Workaround a cache issue - * `Distribute #260 `_: distribute_setup.py now accepts the --user parameter for - Python 2.6 and later. - * `Distribute #262 `_: package_index.open_with_auth no longer throws LookupError - on Python 3. - * `Distribute #269 `_: AttributeError when an exception occurs reading Manifest.in - on late releases of Python. - * `Distribute #272 `_: Prevent TypeError when namespace package names are unicode - and single-install-externally-managed is used. Also fixes PIP issue - 449. - * `Distribute #273 `_: Legacy script launchers now install with Python2/3 support. - - ------ - 0.6.24 - ------ - - * `Distribute #249 `_: Added options to exclude 2to3 fixers - - ------ - 0.6.23 - ------ - - * `Distribute #244 `_: Fixed a test - * `Distribute #243 `_: Fixed a test - * `Distribute #239 `_: Fixed a test - * `Distribute #240 `_: Fixed a test - * `Distribute #241 `_: Fixed a test - * `Distribute #237 `_: Fixed a test - * `Distribute #238 `_: easy_install now uses 64bit executable wrappers on 64bit Python - * `Distribute #208 `_: Fixed parsed_versions, it now honors post-releases as noted in the documentation - * `Distribute #207 `_: Windows cli and gui wrappers pass CTRL-C to child python process - * `Distribute #227 `_: easy_install now passes its arguments to setup.py bdist_egg - * `Distribute #225 `_: Fixed a NameError on Python 2.5, 2.4 - - ------ - 0.6.21 - ------ - - * `Distribute #225 `_: FIxed a regression on py2.4 - - ------ - 0.6.20 - ------ - - * `Distribute #135 `_: Include url in warning when processing URLs in package_index. - * `Distribute #212 `_: Fix issue where easy_instal fails on Python 3 on windows installer. - * `Distribute #213 `_: Fix typo in documentation. - - ------ - 0.6.19 - ------ - - * `Distribute #206 `_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' - - ------ - 0.6.18 - ------ - - * `Distribute #210 `_: Fixed a regression introduced by `Distribute #204 `_ fix. - - ------ - 0.6.17 - ------ - - * Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment - variable to allow to disable installation of easy_install-${version} script. - * Support Python >=3.1.4 and >=3.2.1. - * `Distribute #204 `_: Don't try to import the parent of a namespace package in - declare_namespace - * `Distribute #196 `_: Tolerate responses with multiple Content-Length headers - * `Distribute #205 `_: Sandboxing doesn't preserve working_set. Leads to setup_requires - problems. - - ------ - 0.6.16 - ------ - - * Builds sdist gztar even on Windows (avoiding `Distribute #193 `_). - * `Distribute #192 `_: Fixed metadata omitted on Windows when package_dir - specified with forward-slash. - * `Distribute #195 `_: Cython build support. - * `Distribute #200 `_: Issues with recognizing 64-bit packages on Windows. - - ------ - 0.6.15 - ------ - - * Fixed typo in bdist_egg - * Several issues under Python 3 has been solved. - * `Distribute #146 `_: Fixed missing DLL files after easy_install of windows exe package. - - ------ - 0.6.14 - ------ - - * `Distribute #170 `_: Fixed unittest failure. Thanks to Toshio. - * `Distribute #171 `_: Fixed race condition in unittests cause deadlocks in test suite. - * `Distribute #143 `_: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. - * `Distribute #174 `_: Fixed the edit mode when its used with setuptools itself - - ------ - 0.6.13 - ------ - - * `Distribute #160 `_: 2.7 gives ValueError("Invalid IPv6 URL") - * `Distribute #150 `_: Fixed using ~/.local even in a --no-site-packages virtualenv - * `Distribute #163 `_: scan index links before external links, and don't use the md5 when - comparing two distributions - - ------ - 0.6.12 - ------ - - * `Distribute #149 `_: Fixed various failures on 2.3/2.4 - - ------ - 0.6.11 - ------ - - * Found another case of SandboxViolation - fixed - * `Distribute #15 `_ and `Distribute #48 `_: Introduced a socket timeout of 15 seconds on url openings - * Added indexsidebar.html into MANIFEST.in - * `Distribute #108 `_: Fixed TypeError with Python3.1 - * `Distribute #121 `_: Fixed --help install command trying to actually install. - * `Distribute #112 `_: Added an os.makedirs so that Tarek's solution will work. - * `Distribute #133 `_: Added --no-find-links to easy_install - * Added easy_install --user - * `Distribute #100 `_: Fixed develop --user not taking '.' in PYTHONPATH into account - * `Distribute #134 `_: removed spurious UserWarnings. Patch by VanLindberg - * `Distribute #138 `_: cant_write_to_target error when setup_requires is used. - * `Distribute #147 `_: respect the sys.dont_write_bytecode flag - - ------ - 0.6.10 - ------ - - * Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - - ----- - 0.6.9 - ----- - - * `Distribute #90 `_: unknown setuptools version can be added in the working set - * `Distribute #87 `_: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. - * `Distribute #89 `_: added a side bar with a download link to the doc. - * `Distribute #86 `_: fixed missing sentence in pkg_resources doc. - * Added a nicer error message when a DistributionNotFound is raised. - * `Distribute #80 `_: test_develop now works with Python 3.1 - * `Distribute #93 `_: upload_docs now works if there is an empty sub-directory. - * `Distribute #70 `_: exec bit on non-exec files - * `Distribute #99 `_: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). - * `Distribute #101 `_: Allowing ``os.devnull`` in Sandbox - * `Distribute #92 `_: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) - * `Distribute #103 `_: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. - * `Distribute #104 `_: remvoved the assertion when the installation fails, - with a nicer message for the end user. - * `Distribute #100 `_: making sure there's no SandboxViolation when - the setup script patches setuptools. - - ----- - 0.6.8 - ----- - - * Added "check_packages" in dist. (added in Setuptools 0.6c11) - * Fixed the DONT_PATCH_SETUPTOOLS state. - - ----- - 0.6.7 - ----- - - * `Distribute #58 `_: Added --user support to the develop command - * `Distribute #11 `_: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" - * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. - * Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. - * `Distribute #21 `_: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. - * Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. - * `Distribute #64 `_: use_setuptools no longer rebuilds the distribute egg every - time it is run - * use_setuptools now properly respects the requested version - * use_setuptools will no longer try to import a distribute egg for the - wrong Python version - * `Distribute #74 `_: no_fake should be True by default. - * `Distribute #72 `_: avoid a bootstrapping issue with easy_install -U - - ----- - 0.6.6 - ----- - - * Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - - ----- - 0.6.5 - ----- - - * `Distribute #65 `_: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - - * `Distribute #67 `_: Fixed doc typo (PEP 381/382) - - * Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - - * When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - - * Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - - ----- - 0.6.4 - ----- - - * Added the generation of `distribute_setup_3k.py` during the release. - This closes `Distribute #52 `_. - - * Added an upload_docs command to easily upload project documentation to - PyPI's https://pythonhosted.org. This close issue `Distribute #56 `_. - - * Fixed a bootstrap bug on the use_setuptools() API. - - ----- - 0.6.3 - ----- - - setuptools - ========== - - * Fixed a bunch of calls to file() that caused crashes on Python 3. - - bootstrapping - ============= - - * Fixed a bug in sorting that caused bootstrap to fail on Python 3. - - ----- - 0.6.2 - ----- - - setuptools - ========== - - * Added Python 3 support; see docs/python3.txt. - This closes `Old Setuptools #39 `_. - - * Added option to run 2to3 automatically when installing on Python 3. - This closes issue `Distribute #31 `_. - - * Fixed invalid usage of requirement.parse, that broke develop -d. - This closes `Old Setuptools #44 `_. - - * Fixed script launcher for 64-bit Windows. - This closes `Old Setuptools #2 `_. - - * KeyError when compiling extensions. - This closes `Old Setuptools #41 `_. - - bootstrapping - ============= - - * Fixed bootstrap not working on Windows. This closes issue `Distribute #49 `_. - - * Fixed 2.6 dependencies. This closes issue `Distribute #50 `_. - - * Make sure setuptools is patched when running through easy_install - This closes `Old Setuptools #40 `_. - - ----- - 0.6.1 - ----- - - setuptools - ========== - - * package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes `Distribute #16 `_ and `Distribute #18 `_. - - * zip_ok is now False by default. This closes `Old Setuptools #33 `_. - - * Fixed invalid URL error catching. `Old Setuptools #20 `_. - - * Fixed invalid bootstraping with easy_install installation (`Distribute #40 `_). - Thanks to Florian Schulze for the help. - - * Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - - bootstrapping - ============= - - * The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes `Distribute #10 `_. - - --- - 0.6 - --- - - setuptools - ========== - - * Packages required at build time where not fully present at install time. - This closes `Distribute #12 `_. - - * Protected against failures in tarfile extraction. This closes `Distribute #10 `_. - - * Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 `_. - - * sandbox.py replaced builtin type file with builtin function open. This - closes `Distribute #6 `_. - - * Immediately close all file handles. This closes `Distribute #3 `_. - - * Added compatibility with Subversion 1.6. This references `Distribute #1 `_. - - pkg_resources - ============= - - * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes issue `#5 `_. - - * Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes `Distribute #13 `_. - - * Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes `Distribute #9 `_. - - * Corrected inconsistency between documentation and code of add_entry. - This closes `Distribute #8 `_. - - * Immediately close all file handles. This closes `Distribute #3 `_. - - easy_install - ============ - - * Immediately close all file handles. This closes `Distribute #3 `_. - - ----- - 0.6c9 - ----- - - * Fixed a missing files problem when using Windows source distributions on - non-Windows platforms, due to distutils not handling manifest file line - endings correctly. - - * Updated Pyrex support to work with Pyrex 0.9.6 and higher. - - * Minor changes for Jython compatibility, including skipping tests that can't - work on Jython. - - * Fixed not installing eggs in ``install_requires`` if they were also used for - ``setup_requires`` or ``tests_require``. - - * Fixed not fetching eggs in ``install_requires`` when running tests. - - * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools - installations when called from a standalone ``setup.py``. - - * Added a warning if a namespace package is declared, but its parent package - is not also declared as a namespace. - - * Support Subversion 1.5 - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice - - * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's - ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. - - * Ensure that _full_name is set on all shared libs before extensions are - checked for shared lib usage. (Fixes a bug in the experimental shared - library build support.) - - * Fix to allow unpacked eggs containing native libraries to fail more - gracefully under Google App Engine (with an ``ImportError`` loading the - C-based module, instead of getting a ``NameError``). - - ----- - 0.6c7 - ----- - - * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and - ``egg_info`` command failing on new, uncommitted SVN directories. - - * Fix import problems with nested namespace packages installed via - ``--root`` or ``--single-version-externally-managed``, due to the - parent package not having the child package as an attribute. - - ----- - 0.6c6 - ----- - - * Added ``--egg-path`` option to ``develop`` command, allowing you to force - ``.egg-link`` files to use relative paths (allowing them to be shared across - platforms on a networked drive). - - * Fix not building binary RPMs correctly. - - * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with - bash-compatible shells. - - * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there - was whitespace inside a quoted argument or at the end of the ``#!`` line - (a regression introduced in 0.6c4). - - * Fix ``test`` command possibly failing if an older version of the project - being tested was installed on ``sys.path`` ahead of the test source - directory. - - * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in - their names as packages. - - ----- - 0.6c5 - ----- - - * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` - packages under Python versions less than 2.5. - - * Fix uploaded ``bdist_wininst`` packages being described as suitable for - "any" version by Python 2.5, even if a ``--target-version`` was specified. - - ----- - 0.6c4 - ----- - - * Overhauled Windows script wrapping to support ``bdist_wininst`` better. - Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or - ``#!pythonw.exe`` as the executable name (even when built on non-Windows - platforms!), and the wrappers will look for the executable in the script's - parent directory (which should find the right version of Python). - - * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or - ``bdist_wininst`` under Python 2.3 and 2.4. - - * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is - prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish - platforms. (This is mainly so that setuptools itself can have a single-file - installer on Unix, without doing multiple downloads, dealing with firewalls, - etc.) - - * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files - - * Use cross-platform relative paths in ``easy-install.pth`` when doing - ``develop`` and the source directory is a subdirectory of the installation - target directory. - - * Fix a problem installing eggs with a system packaging tool if the project - contained an implicit namespace package; for example if the ``setup()`` - listed a namespace package ``foo.bar`` without explicitly listing ``foo`` - as a namespace package. - - ----- - 0.6c3 - ----- - - * Fixed breakages caused by Subversion 1.4's new "working copy" format - - ----- - 0.6c2 - ----- - - * The ``ez_setup`` module displays the conflicting version of setuptools (and - its installation location) when a script requests a version that's not - available. - - * Running ``setup.py develop`` on a setuptools-using project will now install - setuptools if needed, instead of only downloading the egg. - - ----- - 0.6c1 - ----- - - * Fixed ``AttributeError`` when trying to download a ``setup_requires`` - dependency when a distribution lacks a ``dependency_links`` setting. - - * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so - as to play better with packaging tools that complain about zero-length - files. - - * Made ``setup.py develop`` respect the ``--no-deps`` option, which it - previously was ignoring. - - * Support ``extra_path`` option to ``setup()`` when ``install`` is run in - backward-compatibility mode. - - * Source distributions now always include a ``setup.cfg`` file that explicitly - sets ``egg_info`` options such that they produce an identical version number - to the source distribution's version number. (Previously, the default - version number could be different due to the use of ``--tag-date``, or if - the version was overridden on the command line that built the source - distribution.) - - ----- - 0.6b4 - ----- - - * Fix ``register`` not obeying name/version set by ``egg_info`` command, if - ``egg_info`` wasn't explicitly run first on the same command line. - - * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` - command, to allow suppressing tags configured in ``setup.cfg``. - - * Fixed redundant warnings about missing ``README`` file(s); it should now - appear only if you are actually a source distribution. - - ----- - 0.6b3 - ----- - - * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. - - * Allow ``.py`` files found by the ``include_package_data`` option to be - automatically included. Remove duplicate data file matches if both - ``include_package_data`` and ``package_data`` are used to refer to the same - files. - - ----- - 0.6b1 - ----- - - * Strip ``module`` from the end of compiled extension modules when computing - the name of a ``.py`` loader/wrapper. (Python's import machinery ignores - this suffix when searching for an extension module.) - - ------ - 0.6a11 - ------ - - * Added ``test_loader`` keyword to support custom test loaders - - * Added ``setuptools.file_finders`` entry point group to allow implementing - revision control plugins. - - * Added ``--identity`` option to ``upload`` command. - - * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. - - * Enhanced test loader to scan packages as well as modules, and call - ``additional_tests()`` if present to get non-unittest tests. - - * Support namespace packages in conjunction with system packagers, by omitting - the installation of any ``__init__.py`` files for namespace packages, and - adding a special ``.pth`` file to create a working package in - ``sys.modules``. - - * Made ``--single-version-externally-managed`` automatic when ``--root`` is - used, so that most system packagers won't require special support for - setuptools. - - * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or - other configuration files for their option defaults when installing, and - also made the install use ``--multi-version`` mode so that the project - directory doesn't need to support .pth files. - - * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading - it. Previously, the file could be left open and the actual error would be - masked by problems trying to remove the open file on Windows systems. - - ------ - 0.6a10 - ------ - - * Fixed the ``develop`` command ignoring ``--find-links``. - - ----- - 0.6a9 - ----- - - * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to - create source distributions. ``MANIFEST.in`` is still read and processed, - as are the standard defaults and pruning. But the manifest is built inside - the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt - every time the ``egg_info`` command is run. - - * Added the ``include_package_data`` keyword to ``setup()``, allowing you to - automatically include any package data listed in revision control or - ``MANIFEST.in`` - - * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to - trim back files included via the ``package_data`` and - ``include_package_data`` options. - - * Fixed ``--tag-svn-revision`` not working when run from a source - distribution. - - * Added warning for namespace packages with missing ``declare_namespace()`` - - * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages - requiring ``nose`` to run unit tests can make this dependency optional - unless the ``test`` command is run. - - * Made all commands that use ``easy_install`` respect its configuration - options, as this was causing some problems with ``setup.py install``. - - * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so - that you can process a directory tree through a processing filter as if it - were a zipfile or tarfile. - - * Added an internal ``install_egg_info`` command to use as part of old-style - ``install`` operations, that installs an ``.egg-info`` directory with the - package. - - * Added a ``--single-version-externally-managed`` option to the ``install`` - command so that you can more easily wrap a "flat" egg in a system package. - - * Enhanced ``bdist_rpm`` so that it installs single-version eggs that - don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, - since all RPMs are now built in a more backwards-compatible format. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - - - ----- - 0.6a8 - ----- - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - - * Made ``develop`` command accept all the same options as ``easy_install``, - and use the ``easy_install`` command's configuration settings as defaults. - - * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision - number from ``PKG-INFO`` in case it is being run on a source distribution of - a snapshot taken from a Subversion-based project. - - * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being - installed as data, adding them to ``native_libs.txt`` automatically. - - * Fixed some problems with fresh checkouts of projects that don't include - ``.egg-info/PKG-INFO`` under revision control and put the project's source - code directly in the project directory. If such a package had any - requirements that get processed before the ``egg_info`` command can be run, - the setup scripts would fail with a "Missing 'Version:' header and/or - PKG-INFO file" error, because the egg runtime interpreted the unbuilt - metadata in a directory on ``sys.path`` (i.e. the current directory) as - being a corrupted egg. Setuptools now monkeypatches the distribution - metadata cache to pretend that the egg has valid version information, until - it has a chance to make it actually be so (via the ``egg_info`` command). - - ----- - 0.6a5 - ----- - - * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. - - ----- - 0.6a3 - ----- - - * Added ``gui_scripts`` entry point group to allow installing GUI scripts - on Windows and other platforms. (The special handling is only for Windows; - other platforms are treated the same as for ``console_scripts``.) - - ----- - 0.6a2 - ----- - - * Added ``console_scripts`` entry point group to allow installing scripts - without the need to create separate script files. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are written without a file extension. - - ----- - 0.6a1 - ----- - - * Added support for building "old-style" RPMs that don't install an egg for - the target package, using a ``--no-egg`` option. - - * The ``build_ext`` command now works better when using the ``--inplace`` - option and multiple Python versions. It now makes sure that all extensions - match the current Python version, even if newer copies were built for a - different Python version. - - * The ``upload`` command no longer attaches an extra ``.zip`` when uploading - eggs, as PyPI now supports egg uploads without trickery. - - * The ``ez_setup`` script/module now displays a warning before downloading - the setuptools egg, and attempts to check the downloaded egg against an - internal MD5 checksum table. - - * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the - latest revision number; it was using the revision number of the directory - containing ``setup.py``, not the highest revision number in the project. - - * Added ``eager_resources`` setup argument - - * The ``sdist`` command now recognizes Subversion "deleted file" entries and - does not include them in source distributions. - - * ``setuptools`` now embeds itself more thoroughly into the distutils, so that - other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' - versions of things, rather than the native distutils ones. - - * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; - ``setup_requires`` allows you to automatically find and download packages - that are needed in order to *build* your project (as opposed to running it). - - * ``setuptools`` now finds its commands, ``setup()`` argument validators, and - metadata writers using entry points, so that they can be extended by - third-party packages. See `Creating distutils Extensions - `_ - for more details. - - * The vestigial ``depends`` command has been removed. It was never finished - or documented, and never would have worked without EasyInstall - which it - pre-dated and was never compatible with. - - ------ - 0.5a12 - ------ - - * The zip-safety scanner now checks for modules that might be used with - ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't - handle ``-m`` on zipped modules. - - ------ - 0.5a11 - ------ - - * Fix breakage of the "develop" command that was caused by the addition of - ``--always-unzip`` to the ``easy_install`` command. - - ----- - 0.5a9 - ----- - - * Include ``svn:externals`` directories in source distributions as well as - normal subversion-controlled files and directories. - - * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` - - * Changed --tag-svn-revision to include an "r" in front of the revision number - for better readability. - - * Added ability to build eggs without including source files (except for any - scripts, of course), using the ``--exclude-source-files`` option to - ``bdist_egg``. - - * ``setup.py install`` now automatically detects when an "unmanaged" package - or module is going to be on ``sys.path`` ahead of a package being installed, - thereby preventing the newer version from being imported. If this occurs, - a warning message is output to ``sys.stderr``, but installation proceeds - anyway. The warning message informs the user what files or directories - need deleting, and advises them they can also use EasyInstall (with the - ``--delete-conflicting`` option) to do it automatically. - - * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata - directory that lists all top-level modules and packages in the distribution. - This is used by the ``easy_install`` command to find possibly-conflicting - "unmanaged" packages when installing the distribution. - - * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. - Added package analysis to determine zip-safety if the ``zip_safe`` flag - is not given, and advise the author regarding what code might need changing. - - * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. - - ----- - 0.5a8 - ----- - - * The "egg_info" command now always sets the distribution metadata to "safe" - forms of the distribution name and version, so that distribution files will - be generated with parseable names (i.e., ones that don't include '-' in the - name or version). Also, this means that if you use the various ``--tag`` - options of "egg_info", any distributions generated will use the tags in the - version, not just egg distributions. - - * Added support for defining command aliases in distutils configuration files, - under the "[aliases]" section. To prevent recursion and to allow aliases to - call the command of the same name, a given alias can be expanded only once - per command-line invocation. You can define new aliases with the "alias" - command, either for the local, global, or per-user configuration. - - * Added "rotate" command to delete old distribution files, given a set of - patterns to match and the number of files to keep. (Keeps the most - recently-modified distribution files matching each pattern.) - - * Added "saveopts" command that saves all command-line options for the current - invocation to the local, global, or per-user configuration file. Useful for - setting defaults without having to hand-edit a configuration file. - - * Added a "setopt" command that sets a single option in a specified distutils - configuration file. - - ----- - 0.5a7 - ----- - - * Added "upload" support for egg and source distributions, including a bug - fix for "upload" and a temporary workaround for lack of .egg support in - PyPI. - - ----- - 0.5a6 - ----- - - * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it - will include all files under revision control (CVS or Subversion) in the - current directory, and it will regenerate the list every time you create a - source distribution, not just when you tell it to. This should make the - default "do what you mean" more often than the distutils' default behavior - did, while still retaining the old behavior in the presence of MANIFEST.in. - - * Fixed the "develop" command always updating .pth files, even if you - specified ``-n`` or ``--dry-run``. - - * Slightly changed the format of the generated version when you use - ``--tag-build`` on the "egg_info" command, so that you can make tagged - revisions compare *lower* than the version specified in setup.py (e.g. by - using ``--tag-build=dev``). - - ----- - 0.5a5 - ----- - - * Added ``develop`` command to ``setuptools``-based packages. This command - installs an ``.egg-link`` pointing to the package's source directory, and - script wrappers that ``execfile()`` the source versions of the package's - scripts. This lets you put your development checkout(s) on sys.path without - having to actually install them. (To uninstall the link, use - use ``setup.py develop --uninstall``.) - - * Added ``egg_info`` command to ``setuptools``-based packages. This command - just creates or updates the "projectname.egg-info" directory, without - building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` - commands.) - - * Enhanced the ``test`` command so that it doesn't install the package, but - instead builds any C extensions in-place, updates the ``.egg-info`` - metadata, adds the source directory to ``sys.path``, and runs the tests - directly on the source. This avoids an "unmanaged" installation of the - package to ``site-packages`` or elsewhere. - - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - - ----- - 0.5a4 - ----- - - * Setup scripts using setuptools can now list their dependencies directly in - the setup.py file, without having to manually create a ``depends.txt`` file. - The ``install_requires`` and ``extras_require`` arguments to ``setup()`` - are used to create a dependencies file automatically. If you are manually - creating ``depends.txt`` right now, please switch to using these setup - arguments as soon as practical, because ``depends.txt`` support will be - removed in the 0.6 release cycle. For documentation on the new arguments, - see the ``setuptools.dist.Distribution`` class. - - * Setup scripts using setuptools now always install using ``easy_install`` - internally, for ease of uninstallation and upgrading. - - ----- - 0.5a1 - ----- - - * Added support for "self-installation" bootstrapping. Packages can now - include ``ez_setup.py`` in their source distribution, and add the following - to their ``setup.py``, in order to automatically bootstrap installation of - setuptools as part of their setup process:: - - from ez_setup import use_setuptools - use_setuptools() - - from setuptools import setup - # etc... - - ----- - 0.4a2 - ----- - - * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools - installation easier, and to allow distributions using setuptools to avoid - having to include setuptools in their source distribution. - - * All downloads are now managed by the ``PackageIndex`` class (which is now - subclassable and replaceable), so that embedders can more easily override - download logic, give download progress reports, etc. The class has also - been moved to the new ``setuptools.package_index`` module. - - * The ``Installer`` class no longer handles downloading, manages a temporary - directory, or tracks the ``zip_ok`` option. Downloading is now handled - by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` - command class based on ``setuptools.Command``. - - * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup - script in a directory sandbox, and a new ``setuptools.archive_util`` module - with an ``unpack_archive()`` API. These were split out of EasyInstall to - allow reuse by other tools and applications. - - * ``setuptools.Command`` now supports reinitializing commands using keyword - arguments to set/reset options. Also, ``Command`` subclasses can now set - their ``command_consumes_arguments`` attribute to ``True`` in order to - receive an ``args`` option containing the rest of the command line. - - ----- - 0.3a2 - ----- - - * Added new options to ``bdist_egg`` to allow tagging the egg's version number - with a subversion revision number, the current date, or an explicit tag - value. Run ``setup.py bdist_egg --help`` to get more information. - - * Misc. bug fixes - - ----- - 0.3a1 - ----- - - * Initial release. - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.1 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities diff --git a/libs/setuptools-2.2/README.txt b/libs/setuptools-2.2/README.txt deleted file mode 100644 index 6079c2e..0000000 --- a/libs/setuptools-2.2/README.txt +++ /dev/null @@ -1,188 +0,0 @@ -=============================== -Installing and Using Setuptools -=============================== - -.. contents:: **Table of Contents** - - -------------------------- -Installation Instructions -------------------------- - -Upgrading from Distribute -========================= - -Currently, Distribute disallows installing Setuptools 0.7+ over Distribute. -You must first uninstall any active version of Distribute first (see -`Uninstalling`_). - -Upgrading from Setuptools 0.6 -============================= - -Upgrading from prior versions of Setuptools is supported. Initial reports -good success in this regard. - -Windows -======= - -The recommended way to install setuptools on Windows is to download -`ez_setup.py`_ and run it. The script will download the appropriate .egg -file and install it for you. - -.. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - -For best results, uninstall previous versions FIRST (see `Uninstalling`_). - -Once installation is complete, you will find an ``easy_install`` program in -your Python ``Scripts`` subdirectory. For simple invocation and best results, -add this directory to your ``PATH`` environment variable, if it is not already -present. - - -Unix-based Systems including Mac OS X -===================================== - -Download `ez_setup.py`_ and run it using the target Python version. The script -will download the appropriate version and install it for you:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python - -Note that you will may need to invoke the command with superuser privileges to -install to the system Python:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | sudo python - -Alternatively, on Python 2.6 and later, Setuptools may be installed to a -user-local path:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - > python ez_setup.py --user - - -Python 2.4 and Python 2.5 support -================================= - -Setuptools 2.0 and later requires Python 2.6 or later. To install setuptools -on Python 2.4 or Python 2.5, use the bootstrap script for Setuptools 1.x: -https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py. - - -Advanced Installation -===================== - -For more advanced installation options, such as installing to custom -locations or prefixes, download and extract the source -tarball from `Setuptools on PyPI `_ -and run setup.py with any supported distutils and Setuptools options. -For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - -Use ``--help`` to get a full options list, but we recommend consulting -the `EasyInstall manual`_ for detailed instructions, especially `the section -on custom installation locations`_. - -.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall -.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - -Downloads -========= - -All setuptools downloads can be found at `the project's home page in the Python -Package Index`_. Scroll to the very bottom of the page to find the links. - -.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Bitbucket repo`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -Uninstalling -============ - -On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` -installer, simply use the uninstall feature of "Add/Remove Programs" in the -Control Panel. - -Otherwise, to uninstall Setuptools or Distribute, regardless of the Python -version, delete all ``setuptools*`` and ``distribute*`` files and -directories from your system's ``site-packages`` directory -(and any other ``sys.path`` directories) FIRST. - -If you are upgrading or otherwise plan to re-install Setuptools or Distribute, -nothing further needs to be done. If you want to completely remove Setuptools, -you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts -and associated executables installed to the Python scripts directory. - --------------------------------- -Using Setuptools and EasyInstall --------------------------------- - -Here are some of the available manuals, tutorials, and other resources for -learning about Setuptools, Python Eggs, and EasyInstall: - -* `The EasyInstall user's guide and reference manual`_ -* `The setuptools Developer's Guide`_ -* `The pkg_resources API reference`_ -* `Package Compatibility Notes`_ (user-maintained) -* `The Internal Structure of Python Eggs`_ - -Questions, comments, and bug reports should be directed to the `distutils-sig -mailing list`_. If you have written (or know of) any tutorials, documentation, -plug-ins, or other resources for setuptools users, please let us know about -them there, so this reference list can be updated. If you have working, -*tested* patches to correct problems or add features, you may submit them to -the `setuptools bug tracker`_. - -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues -.. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes -.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html -.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html -.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html -.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - -------- -Credits -------- - -* The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - -* Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - -* Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - -* Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - -* Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - -* Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - -* Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - -.. _files: diff --git a/libs/setuptools-2.2/__pycache__/pkg_resources.cpython-33.pyc b/libs/setuptools-2.2/__pycache__/pkg_resources.cpython-33.pyc deleted file mode 100644 index 64948b2..0000000 Binary files a/libs/setuptools-2.2/__pycache__/pkg_resources.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/_markerlib/__init__.py b/libs/setuptools-2.2/_markerlib/__init__.py deleted file mode 100644 index e2b237b..0000000 --- a/libs/setuptools-2.2/_markerlib/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -try: - import ast - from _markerlib.markers import default_environment, compile, interpret -except ImportError: - if 'ast' in globals(): - raise - def default_environment(): - return {} - def compile(marker): - def marker_fn(environment=None, override=None): - # 'empty markers are True' heuristic won't install extra deps. - return not marker.strip() - marker_fn.__doc__ = marker - return marker_fn - def interpret(marker, environment=None, override=None): - return compile(marker)() diff --git a/libs/setuptools-2.2/_markerlib/markers.py b/libs/setuptools-2.2/_markerlib/markers.py deleted file mode 100644 index fa83706..0000000 --- a/libs/setuptools-2.2/_markerlib/markers.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -"""Interpret PEP 345 environment markers. - -EXPR [in|==|!=|not in] EXPR [or|and] ... - -where EXPR belongs to any of those: - - python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) - python_full_version = sys.version.split()[0] - os.name = os.name - sys.platform = sys.platform - platform.version = platform.version() - platform.machine = platform.machine() - platform.python_implementation = platform.python_implementation() - a free string, like '2.6', or 'win32' -""" - -__all__ = ['default_environment', 'compile', 'interpret'] - -import ast -import os -import platform -import sys -import weakref - -_builtin_compile = compile - -try: - from platform import python_implementation -except ImportError: - if os.name == "java": - # Jython 2.5 has ast module, but not platform.python_implementation() function. - def python_implementation(): - return "Jython" - else: - raise - - -# restricted set of variables -_VARS = {'sys.platform': sys.platform, - 'python_version': '%s.%s' % sys.version_info[:2], - # FIXME parsing sys.platform is not reliable, but there is no other - # way to get e.g. 2.7.2+, and the PEP is defined with sys.version - 'python_full_version': sys.version.split(' ', 1)[0], - 'os.name': os.name, - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'platform.python_implementation': python_implementation(), - 'extra': None # wheel extension - } - -for var in list(_VARS.keys()): - if '.' in var: - _VARS[var.replace('.', '_')] = _VARS[var] - -def default_environment(): - """Return copy of default PEP 385 globals dictionary.""" - return dict(_VARS) - -class ASTWhitelist(ast.NodeTransformer): - def __init__(self, statement): - self.statement = statement # for error messages - - ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) - # Bool operations - ALLOWED += (ast.And, ast.Or) - # Comparison operations - ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) - - def visit(self, node): - """Ensure statement only contains allowed nodes.""" - if not isinstance(node, self.ALLOWED): - raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % - (self.statement, - (' ' * node.col_offset) + '^')) - return ast.NodeTransformer.visit(self, node) - - def visit_Attribute(self, node): - """Flatten one level of attribute access.""" - new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) - return ast.copy_location(new_node, node) - -def parse_marker(marker): - tree = ast.parse(marker, mode='eval') - new_tree = ASTWhitelist(marker).generic_visit(tree) - return new_tree - -def compile_marker(parsed_marker): - return _builtin_compile(parsed_marker, '', 'eval', - dont_inherit=True) - -_cache = weakref.WeakValueDictionary() - -def compile(marker): - """Return compiled marker as a function accepting an environment dict.""" - try: - return _cache[marker] - except KeyError: - pass - if not marker.strip(): - def marker_fn(environment=None, override=None): - """""" - return True - else: - compiled_marker = compile_marker(parse_marker(marker)) - def marker_fn(environment=None, override=None): - """override updates environment""" - if override is None: - override = {} - if environment is None: - environment = default_environment() - environment.update(override) - return eval(compiled_marker, environment) - marker_fn.__doc__ = marker - _cache[marker] = marker_fn - return _cache[marker] - -def interpret(marker, environment=None): - return compile(marker)(environment) diff --git a/libs/setuptools-2.2/build/lib/_markerlib/__init__.py b/libs/setuptools-2.2/build/lib/_markerlib/__init__.py deleted file mode 100644 index e2b237b..0000000 --- a/libs/setuptools-2.2/build/lib/_markerlib/__init__.py +++ /dev/null @@ -1,16 +0,0 @@ -try: - import ast - from _markerlib.markers import default_environment, compile, interpret -except ImportError: - if 'ast' in globals(): - raise - def default_environment(): - return {} - def compile(marker): - def marker_fn(environment=None, override=None): - # 'empty markers are True' heuristic won't install extra deps. - return not marker.strip() - marker_fn.__doc__ = marker - return marker_fn - def interpret(marker, environment=None, override=None): - return compile(marker)() diff --git a/libs/setuptools-2.2/build/lib/_markerlib/markers.py b/libs/setuptools-2.2/build/lib/_markerlib/markers.py deleted file mode 100644 index fa83706..0000000 --- a/libs/setuptools-2.2/build/lib/_markerlib/markers.py +++ /dev/null @@ -1,119 +0,0 @@ -# -*- coding: utf-8 -*- -"""Interpret PEP 345 environment markers. - -EXPR [in|==|!=|not in] EXPR [or|and] ... - -where EXPR belongs to any of those: - - python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1]) - python_full_version = sys.version.split()[0] - os.name = os.name - sys.platform = sys.platform - platform.version = platform.version() - platform.machine = platform.machine() - platform.python_implementation = platform.python_implementation() - a free string, like '2.6', or 'win32' -""" - -__all__ = ['default_environment', 'compile', 'interpret'] - -import ast -import os -import platform -import sys -import weakref - -_builtin_compile = compile - -try: - from platform import python_implementation -except ImportError: - if os.name == "java": - # Jython 2.5 has ast module, but not platform.python_implementation() function. - def python_implementation(): - return "Jython" - else: - raise - - -# restricted set of variables -_VARS = {'sys.platform': sys.platform, - 'python_version': '%s.%s' % sys.version_info[:2], - # FIXME parsing sys.platform is not reliable, but there is no other - # way to get e.g. 2.7.2+, and the PEP is defined with sys.version - 'python_full_version': sys.version.split(' ', 1)[0], - 'os.name': os.name, - 'platform.version': platform.version(), - 'platform.machine': platform.machine(), - 'platform.python_implementation': python_implementation(), - 'extra': None # wheel extension - } - -for var in list(_VARS.keys()): - if '.' in var: - _VARS[var.replace('.', '_')] = _VARS[var] - -def default_environment(): - """Return copy of default PEP 385 globals dictionary.""" - return dict(_VARS) - -class ASTWhitelist(ast.NodeTransformer): - def __init__(self, statement): - self.statement = statement # for error messages - - ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str) - # Bool operations - ALLOWED += (ast.And, ast.Or) - # Comparison operations - ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn) - - def visit(self, node): - """Ensure statement only contains allowed nodes.""" - if not isinstance(node, self.ALLOWED): - raise SyntaxError('Not allowed in environment markers.\n%s\n%s' % - (self.statement, - (' ' * node.col_offset) + '^')) - return ast.NodeTransformer.visit(self, node) - - def visit_Attribute(self, node): - """Flatten one level of attribute access.""" - new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx) - return ast.copy_location(new_node, node) - -def parse_marker(marker): - tree = ast.parse(marker, mode='eval') - new_tree = ASTWhitelist(marker).generic_visit(tree) - return new_tree - -def compile_marker(parsed_marker): - return _builtin_compile(parsed_marker, '', 'eval', - dont_inherit=True) - -_cache = weakref.WeakValueDictionary() - -def compile(marker): - """Return compiled marker as a function accepting an environment dict.""" - try: - return _cache[marker] - except KeyError: - pass - if not marker.strip(): - def marker_fn(environment=None, override=None): - """""" - return True - else: - compiled_marker = compile_marker(parse_marker(marker)) - def marker_fn(environment=None, override=None): - """override updates environment""" - if override is None: - override = {} - if environment is None: - environment = default_environment() - environment.update(override) - return eval(compiled_marker, environment) - marker_fn.__doc__ = marker - _cache[marker] = marker_fn - return _cache[marker] - -def interpret(marker, environment=None): - return compile(marker)(environment) diff --git a/libs/setuptools-2.2/build/lib/easy_install.py b/libs/setuptools-2.2/build/lib/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/libs/setuptools-2.2/build/lib/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/libs/setuptools-2.2/build/lib/pkg_resources.py b/libs/setuptools-2.2/build/lib/pkg_resources.py deleted file mode 100644 index bde3098..0000000 --- a/libs/setuptools-2.2/build/lib/pkg_resources.py +++ /dev/null @@ -1,2738 +0,0 @@ -""" -Package resource API --------------------- - -A resource is a logical file contained within a package, or a logical -subdirectory thereof. The package resource API expects resource names -to have their path parts separated with ``/``, *not* whatever the local -path separator is. Do not use os.path operations to manipulate resource -names being passed into the API. - -The package resource API is designed to work with normal filesystem packages, -.egg files, and unpacked .egg files. It can also work in a limited way with -.zip files and with custom PEP 302 loaders that support the ``get_data()`` -method. -""" - -import sys -import os -import time -import re -import imp -import zipfile -import zipimport -import warnings -import stat -import functools -import pkgutil -import token -import symbol -import operator -import platform -from pkgutil import get_importer - -try: - from urlparse import urlparse, urlunparse -except ImportError: - from urllib.parse import urlparse, urlunparse - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset -try: - basestring - next = lambda o: o.next() - from cStringIO import StringIO as BytesIO -except NameError: - basestring = str - from io import BytesIO - def execfile(fn, globs=None, locs=None): - if globs is None: - globs = globals() - if locs is None: - locs = globs - exec(compile(open(fn).read(), fn, 'exec'), globs, locs) - -# capture these to bypass sandboxing -from os import utime -try: - from os import mkdir, rename, unlink - WRITE_SUPPORT = True -except ImportError: - # no write support, probably under GAE - WRITE_SUPPORT = False - -from os import open as os_open -from os.path import isdir, split - -# Avoid try/except due to potential problems with delayed import mechanisms. -if sys.version_info >= (3, 3) and sys.implementation.name == "cpython": - import importlib._bootstrap as importlib_bootstrap -else: - importlib_bootstrap = None - -try: - import parser -except ImportError: - pass - -def _bypass_ensure_directory(name, mode=0x1FF): # 0777 - # Sandbox-bypassing version of ensure_directory() - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) - - -_state_vars = {} - -def _declare_state(vartype, **kw): - g = globals() - for name, val in kw.items(): - g[name] = val - _state_vars[name] = vartype - -def __getstate__(): - state = {} - g = globals() - for k, v in _state_vars.items(): - state[k] = g['_sget_'+v](g[k]) - return state - -def __setstate__(state): - g = globals() - for k, v in state.items(): - g['_sset_'+_state_vars[k]](k, g[k], v) - return state - -def _sget_dict(val): - return val.copy() - -def _sset_dict(key, ob, state): - ob.clear() - ob.update(state) - -def _sget_object(val): - return val.__getstate__() - -def _sset_object(key, ob, state): - ob.__setstate__(state) - -_sget_none = _sset_none = lambda *args: None - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform() - m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - pass # not Mac OS X - return plat - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] - -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - -class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - import platform - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - import plistlib - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - try: - # Python 2.7 or >=3.2 - from sysconfig import get_platform - except ImportError: - from distutils.util import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat - - -def compatible_platforms(provided,required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - return True # easy case - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) - return True - return False # egg isn't macosx or legacy darwin - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -run_main = run_script # backward compatibility - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry, True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - def __contains__(self,dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - if item not in self.entry_keys: - # workaround a cache issue - continue - - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True, replace=False): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set, unless `replace=True`. - If it's added, any callbacks registered with the ``subscribe()`` method - will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) - if not replace and dist.key in self.by_key: - return # ignore hidden distros - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, - replace_conflicting=False): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - - Unless `replace_conflicting=True`, raises a VersionConflict exception if - any requirements are found on the path that have the correct name but - the wrong version. Otherwise, if an `installer` is supplied it will be - invoked to obtain the correct version of the requirement and activate - it. - """ - - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist - to_activate = [] - - while requirements: - req = requirements.pop(0) # process dependencies breadth-first - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match(req, ws, installer) - if dist is None: - #msg = ("The '%s' distribution was not found on this " - # "system, and is required by this application.") - #raise DistributionNotFound(msg % req) - - # unfortunately, zc.buildout uses a str(err) - # to get the name of the distribution here.. - raise DistributionNotFound(req) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) - processed[req] = True - - return to_activate # return list of distros to activate - - def find_plugins(self, plugin_env, full_env=None, installer=None, - fallback=True): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print 'Could not load', errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - list(map(shadow_set.add, self)) # put all our entries in shadow_set - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError: - v = sys.exc_info()[1] - error_info[dist] = v # save error info - if fallback: - continue # try the next older version of project - else: - break # give up on this project, keep going - - else: - list(map(shadow_set.add, resolvees)) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - def __getstate__(self): - return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] - ) - - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c - self.entries = entries[:] - self.entry_keys = keys.copy() - self.by_key = by_key.copy() - self.callbacks = callbacks[:] - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'3.3'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self._cache = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self,project_name): - """Return a newest-to-oldest list of distributions for `project_name` - """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] - - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) - if dist not in dists: - dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - return self.obtain(req, installer) # try and download/install - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: yield key - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): - self.add(other) - elif isinstance(other,Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -AvailableDistributions = Environment # XXX backward compatibility - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except: - self.extraction_error() - - self._warn_unsafe_extraction_path(extract_path) - - self.cached_files[target_path] = 1 - return target_path - - @staticmethod - def _warn_unsafe_extraction_path(path): - """ - If the default extraction path is overridden and set to an insecure - location, such as /tmp, it opens up an opportunity for an attacker to - replace an extracted file with an unauthorized payload. Warn the user - if a known insecure location is used. - - See Distribute #375 for more details. - """ - if os.name == 'nt' and not path.startswith(os.environ['windir']): - # On Windows, permissions are generally restrictive by default - # and temp directories are not writable by other users, so - # bypass the warning. - return - mode = os.stat(path).st_mode - if mode & stat.S_IWOTH or mode & stat.S_IWGRP: - msg = ("%s is writable by group/others and vulnerable to attack " - "when " - "used with get_resource_filename. Consider a more secure " - "location (set with .set_extraction_path or the " - "PYTHON_EGG_CACHE environment variable)." % path) - warnings.warn(msg, UserWarning) - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777 - os.chmod(tempname, mode) - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - app_data = 'Application Data' # XXX this may be locale-specific! - app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname,subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - -class MarkerEvaluation(object): - values = { - 'os_name': lambda: os.name, - 'sys_platform': lambda: sys.platform, - 'python_full_version': lambda: sys.version.split()[0], - 'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]), - 'platform_version': platform.version, - 'platform_machine': platform.machine, - 'python_implementation': platform.python_implementation, - } - - @classmethod - def is_invalid_marker(cls, text): - """ - Validate text as a PEP 426 environment marker; return an exception - if invalid or False otherwise. - """ - try: - cls.evaluate_marker(text) - except SyntaxError: - return cls.normalize_exception(sys.exc_info()[1]) - return False - - @staticmethod - def normalize_exception(exc): - """ - Given a SyntaxError from a marker evaluation, normalize the error message: - - Remove indications of filename and line number. - - Replace platform-specific error messages with standard error messages. - """ - subs = { - 'unexpected EOF while parsing': 'invalid syntax', - 'parenthesis is never closed': 'invalid syntax', - } - exc.filename = None - exc.lineno = None - exc.msg = subs.get(exc.msg, exc.msg) - return exc - - @classmethod - def and_test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) - - @classmethod - def test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) - - @classmethod - def atom(cls, nodelist): - t = nodelist[1][0] - if t == token.LPAR: - if nodelist[2][0] == token.RPAR: - raise SyntaxError("Empty parentheses") - return cls.interpret(nodelist[2]) - raise SyntaxError("Language feature not supported in environment markers") - - @classmethod - def comparison(cls, nodelist): - if len(nodelist)>4: - raise SyntaxError("Chained comparison not allowed in environment markers") - comp = nodelist[2][1] - cop = comp[1] - if comp[0] == token.NAME: - if len(nodelist[2]) == 3: - if cop == 'not': - cop = 'not in' - else: - cop = 'is not' - try: - cop = cls.get_op(cop) - except KeyError: - raise SyntaxError(repr(cop)+" operator not allowed in environment markers") - return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) - - @classmethod - def get_op(cls, op): - ops = { - symbol.test: cls.test, - symbol.and_test: cls.and_test, - symbol.atom: cls.atom, - symbol.comparison: cls.comparison, - 'not in': lambda x, y: x not in y, - 'in': lambda x, y: x in y, - '==': operator.eq, - '!=': operator.ne, - } - if hasattr(symbol, 'or_test'): - ops[symbol.or_test] = cls.test - return ops[op] - - @classmethod - def evaluate_marker(cls, text, extra=None): - """ - Evaluate a PEP 426 environment marker on CPython 2.4+. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - - This implementation uses the 'parser' module, which is not implemented on - Jython and has been superseded by the 'ast' module in Python 2.6 and - later. - """ - return cls.interpret(parser.expr(text).totuple(1)[1]) - - @classmethod - def _markerlib_evaluate(cls, text): - """ - Evaluate a PEP 426 environment marker using markerlib. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - """ - import _markerlib - # markerlib implements Metadata 1.2 (PEP 345) environment markers. - # Translate the variables to Metadata 2.0 (PEP 426). - env = _markerlib.default_environment() - for key in env.keys(): - new_key = key.replace('.', '_') - env[new_key] = env.pop(key) - try: - result = _markerlib.interpret(text, env) - except NameError: - e = sys.exc_info()[1] - raise SyntaxError(e.args[0]) - return result - - if 'parser' not in globals(): - # Fall back to less-complete _markerlib implementation if 'parser' module - # is not available. - evaluate_marker = _markerlib_evaluate - - @classmethod - def interpret(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - try: - op = cls.get_op(nodelist[0]) - except KeyError: - raise SyntaxError("Comparison or logical expression expected") - return op(nodelist) - - @classmethod - def evaluate(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - kind = nodelist[0] - name = nodelist[1] - if kind==token.NAME: - try: - op = cls.values[name] - except KeyError: - raise SyntaxError("Unknown name %r" % name) - return op() - if kind==token.STRING: - s = nodelist[1] - if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \ - or '\\' in s: - raise SyntaxError( - "Only plain strings allowed in environment markers") - return s[1:-1] - raise SyntaxError("Language feature not supported in environment markers") - -invalid_marker = MarkerEvaluation.is_invalid_marker -evaluate_marker = MarkerEvaluation.evaluate_marker - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return BytesIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) - - if sys.version_info <= (3,): - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)) - else: - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)).decode("utf-8") - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self,resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) - - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) - - def metadata_listdir(self,name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) - return [] - - def run_script(self,script_name,namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text,script_filename,'exec') - exec(script_code, namespace, namespace) - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self,module): - NullProvider.__init__(self,module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self,path): - return os.path.isdir(path) - - def _listdir(self,path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - stream = open(path, 'rb') - try: - return stream.read() - finally: - stream.close() - -register_loader_type(type(None), DefaultProvider) - -if importlib_bootstrap is not None: - register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - -def build_zipmanifest(path): - """ - This builds a similar dictionary to the zipimport directory - caches. However instead of tuples, ZipInfo objects are stored. - - The translation of the tuple is as follows: - * [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep - on pypy it is the same (one reason why distribute did work - in some cases on pypy and win32). - * [1] - zipinfo.compress_type - * [2] - zipinfo.compress_size - * [3] - zipinfo.file_size - * [4] - len(utf-8 encoding of filename) if zipinfo & 0x800 - len(ascii encoding of filename) otherwise - * [5] - (zipinfo.date_time[0] - 1980) << 9 | - zipinfo.date_time[1] << 5 | zipinfo.date_time[2] - * [6] - (zipinfo.date_time[3] - 1980) << 11 | - zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2) - * [7] - zipinfo.CRC - """ - zipinfo = dict() - zfile = zipfile.ZipFile(path) - #Got ZipFile has not __exit__ on python 3.1 - try: - for zitem in zfile.namelist(): - zpath = zitem.replace('/', os.sep) - zipinfo[zpath] = zfile.getinfo(zitem) - assert zipinfo[zpath] is not None - finally: - zfile.close() - return zipinfo - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - - def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = build_zipmanifest(self.loader.archive) - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) - ) - - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) - ) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - @staticmethod - def _get_date_and_size(zip_stat): - size = zip_stat.file_size - date_time = zip_stat.date_time + (0, 0, -1) # ymdhms+wday, yday, dst - #1980 offset already done - timestamp = time.mktime(date_time) - return timestamp, size - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - return os.path.dirname(last) # return the extracted directory name - - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if self._is_current(real_path, zip_path): - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp,timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - if self._is_current(real_path, zip_path): - # the file became current since it was checked above, - # so proceed. - return real_path - elif os.name=='nt': # Windows, del old file and retry - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - manager.extraction_error() # report a user-friendly error - - return real_path - - def _is_current(self, file_path, zip_path): - """ - Return True if the file_path is current for this zip_path - """ - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - if not os.path.isfile(file_path): - return False - stat = os.stat(file_path) - if stat.st_size!=size or stat.st_mtime!=timestamp: - return False - # check that the contents match - zip_contents = self.loader.get_data(zip_path) - f = open(file_path, 'rb') - file_contents = f.read() - f.close() - return zip_contents == file_contents - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self,fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self,fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) - - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self,path): - self.path = path - - def has_metadata(self,name): - return name=='PKG-INFO' - - def get_metadata(self,name): - if name=='PKG-INFO': - f = open(self.path,'rU') - metadata = f.read() - f.close() - return metadata - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zipinfo = build_zipmanifest(importer.archive) - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - -_declare_state('dict', _distribution_finders = {}) - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_eggs_in_zip(importer, path_item, only=False): - """ - Find eggs in zip files; possibly multiple nested eggs. - """ - if importer.archive.endswith('.whl'): - # wheels are not supported with this finder - # they don't have PKG-INFO metadata, and won't ever contain eggs - return - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - return # don't yield nested distros - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_eggs_in_zip) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object,find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info') or lower.endswith('.dist-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): - yield dist - elif not only and lower.endswith('.egg-link'): - entry_file = open(os.path.join(path_item, entry)) - try: - entry_lines = entry_file.readlines() - finally: - entry_file.close() - for line in entry_lines: - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): - yield item - break -register_finder(pkgutil.ImpImporter,find_on_path) - -if importlib_bootstrap is not None: - register_finder(importlib_bootstrap.FileFinder, find_on_path) - -_declare_state('dict', _namespace_handlers={}) -_declare_state('dict', _namespace_packages={}) - - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer,path_entry,moduleName,module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - - importer = get_importer(path_item) - if importer is None: - return None - loader = importer.find_module(packageName) - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = imp.new_module(packageName) - module.__path__ = [] - _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer, path_item, packageName, module) - if subpath is not None: - path = module.__path__ - path.append(subpath) - loader.load_module(packageName) - for path_item in path: - if path_item not in module.__path__: - module.__path__.append(path_item) - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - if parent not in _namespace_packages: - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) - finally: - imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(pkgutil.ImpImporter,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) - -if importlib_bootstrap is not None: - register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object,null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename,_cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): - for s in strs.splitlines(): - s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r"(?P[^-]+)" - r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", - re.VERBOSE | re.IGNORECASE -).match - -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get - -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - list(map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer))) - - @classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) - - @classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - @classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data,dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urlparse(location) - if parsed[-1].startswith('md5='): - return urlunparse(parsed[:-1] + ('',)) - return location - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - PKG_INFO = 'PKG-INFO' - - def __init__(self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - @classmethod - def from_location(cls,location,basename,metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in _distributionImpl: - # .dist-info gets much metadata differently - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - cls = _distributionImpl[ext.lower()] - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version, - self.platform - ) - ) - def __hash__(self): return hash(self.hashcmp) - def __lt__(self, other): - return self.hashcmp < other.hashcmp - def __le__(self, other): - return self.hashcmp <= other.hashcmp - def __gt__(self, other): - return self.hashcmp > other.hashcmp - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - @property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - - @property - def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv - - @property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata(self.PKG_INFO): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - raise ValueError( - "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self - ) - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): - if extra: - if ':' in extra: - extra, marker = extra.split(':',1) - if invalid_marker(marker): - reqs=[] # XXX warn - elif not evaluate_marker(marker): - reqs=[] - extra = safe_extra(extra) or None - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - - def requires(self,extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None,())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self,name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self,path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - list(map(declare_namespace, self._get_metadata('namespace_packages.txt'))) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-'+self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self,self.location) - else: - return str(self) - - def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) - - def __getattr__(self,attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError(attr) - return getattr(self._provider, attr) - - @classmethod - def from_filename(cls,filename,metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - - loc = loc or self.location - if not loc: - return - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath= [(p and _normalize_cached(p) or p) for p in path] - - for p, item in enumerate(npath): - if item==nloc: - break - elif item==bdir and self.precedence==EGG_DIST: - # if it's an .egg, give it precedence over its directory - if path is sys.path: - self.check_version_conflict() - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - if path is sys.path: - self.check_version_conflict() - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while 1: - try: - np = npath.index(nloc, p+1) - except ValueError: - break - else: - del npath[np], path[np] - p = np # ha! - - return - - def check_version_conflict(self): - if self.key=='setuptools': - return # ignore the inevitable setuptools self-conflicts :( - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - @property - def extras(self): - return [dep for dep in self._dep_map if dep] - - -class DistInfoDistribution(Distribution): - """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" - PKG_INFO = 'METADATA' - EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") - - @property - def _parsed_pkg_info(self): - """Parse and cache metadata""" - try: - return self._pkg_info - except AttributeError: - from email.parser import Parser - self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO)) - return self._pkg_info - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._compute_dependencies() - return self.__dep_map - - def _preparse_requirement(self, requires_dist): - """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') - Split environment marker, add == prefix to version specifiers as - necessary, and remove parenthesis. - """ - parts = requires_dist.split(';', 1) + [''] - distvers = parts[0].strip() - mark = parts[1].strip() - distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) - distvers = distvers.replace('(', '').replace(')', '') - return (distvers, mark) - - def _compute_dependencies(self): - """Recompute this distribution's dependencies.""" - from _markerlib import compile as compile_marker - dm = self.__dep_map = {None: []} - - reqs = [] - # Including any condition expressions - for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: - distvers, mark = self._preparse_requirement(req) - parsed = next(parse_requirements(distvers)) - parsed.marker_fn = compile_marker(mark) - reqs.append(parsed) - - def reqs_for_extra(extra): - for req in reqs: - if req.marker_fn(override={'extra':extra}): - yield req - - common = frozenset(reqs_for_extra(None)) - dm[None].extend(common) - - for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - extra = safe_extra(extra.strip()) - dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) - - return dm - - -_distributionImpl = { - '.egg': Distribution, - '.egg-info': Distribution, - '.dist-info': DistInfoDistribution, - } - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): - - items = [] - - while not TERMINATOR(line,p): - if CONTINUE(line,p): - try: - line = next(lines) - p = 0 - except StopIteration: - raise ValueError( - "\\ must not appear on the last nonblank line" - ) - - match = ITEM(line,p) - if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line,p) - if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) - - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise ValueError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line,p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] - yield Requirement(project_name, specs, extras) - - -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) - self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) - extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) - - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp - - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key != self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 - for parsed,trans,op,ver in self.index: - action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 - if action=='F': - return False - elif action=='T': - return True - elif action=='+': - last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - @staticmethod - def parse(s): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs)==1: - return reqs[0] - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - -state_machine = { - # =>< - '<': '--T', - '<=': 'T-T', - '>': 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - from tempfile import mkstemp - old_open = os.open - try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) - finally: - os.open = old_open # and then put it back - - -# Set up global resource manager (deliberately not state-saved) -_manager = ResourceManager() -def _initialize(g): - for name in dir(_manager): - if not name.startswith('_'): - g[name] = getattr(_manager, name) -_initialize(globals()) - -# Prepare the master working set and make the ``require()`` API available -_declare_state('object', working_set = WorkingSet()) -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path - -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script # backward compatibility -# Activate all distributions already on sys.path, and ensure that -# all distributions added to the working set in the future (e.g. by -# calling ``require()``) will get activated as well. -add_activation_listener(lambda dist: dist.activate()) -working_set.entries=[] -list(map(working_set.add_entry,sys.path)) # match order diff --git a/libs/setuptools-2.2/build/lib/setuptools/__init__.py b/libs/setuptools-2.2/build/lib/setuptools/__init__.py deleted file mode 100644 index fc9b7b9..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" - -import os -import sys -import distutils.core -import distutils.filelist -from distutils.core import Command as _Command -from distutils.util import convert_path - -import setuptools.version -from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched -from setuptools.depends import Require - -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -__version__ = setuptools.version.__version__ - -bootstrap_install_from = None - -# If we run 2to3 on .py files, should we also convert docstrings? -# Default: yes; assume that we can detect doctests reliably -run_2to3_on_doctests = True -# Standard package names for fixer packages -lib2to3_fixer_packages = ['lib2to3.fixes'] - -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - looks_like_package = ( - '.' not in name - and os.path.isdir(fn) - and os.path.isfile(os.path.join(fn, '__init__.py')) - ) - if looks_like_package: - out.append(prefix+name) - stack.append((fn, prefix+name+'.')) - for pat in list(exclude)+['ez_setup']: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -distutils.core.Command = Command # we can't patch distutils.cmd, alas - -def findall(dir = os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - all_files = [] - for base, dirs, files in os.walk(dir): - if base==os.curdir or base.startswith(os.curdir+os.sep): - base = base[2:] - if base: - files = [os.path.join(base, f) for f in files] - all_files.extend(filter(os.path.isfile, files)) - return all_files - -distutils.filelist.findall = findall # fix findall bug in distutils. - -# sys.dont_write_bytecode was introduced in Python 2.6. -_dont_write_bytecode = getattr(sys, 'dont_write_bytecode', - bool(os.environ.get("PYTHONDONTWRITEBYTECODE"))) diff --git a/libs/setuptools-2.2/build/lib/setuptools/archive_util.py b/libs/setuptools-2.2/build/lib/setuptools/archive_util.py deleted file mode 100644 index 1109f34..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/archive_util.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Utilities for extracting common archive formats""" - - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - -import zipfile, tarfile, os, shutil, posixpath -from pkg_resources import ensure_directory -from distutils.errors import DistutilsError - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - -def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" - return dst - - - - - - - - - - - - - - - - - - - - - - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None -): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - - - - - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % (filename,)) - - paths = {filename:('',extract_dir)} - for base, dirs, files in os.walk(filename): - src,dst = paths[base] - for d in dirs: - paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) - for f in files: - name = src+f - target = os.path.join(dst,f) - target = progress_filter(src+f, target) - if not target: - continue # skip non-files - ensure_directory(target) - f = os.path.join(base,f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - - - - - - - - - - - - - - - - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - z = zipfile.ZipFile(filename) - try: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - f = open(target,'wb') - try: - f.write(data) - finally: - f.close() - del data - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) - finally: - z.close() - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - try: - tarobj.chown = lambda *args: None # don't do any chowning! - for member in tarobj: - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name.split('/'): - prelim_dst = os.path.join(extract_dir, *name.split('/')) - - # resolve any links and to extract the link targets as normal files - while member is not None and (member.islnk() or member.issym()): - linkpath = member.linkname - if member.issym(): - linkpath = posixpath.join(posixpath.dirname(member.name), linkpath) - linkpath = posixpath.normpath(linkpath) - member = tarobj._getmember(linkpath) - - if member is not None and (member.isfile() or member.isdir()): - final_dst = progress_filter(name, prelim_dst) - if final_dst: - if final_dst.endswith(os.sep): - final_dst = final_dst[:-1] - try: - tarobj._extract_member(member, final_dst) # XXX Ugh - except tarfile.ExtractError: - pass # chown/chmod/mkfifo/mknode/makedev failed - return True - finally: - tarobj.close() - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/libs/setuptools-2.2/build/lib/setuptools/cli-32.exe b/libs/setuptools-2.2/build/lib/setuptools/cli-32.exe deleted file mode 100644 index b1487b7..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/cli-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/cli-64.exe b/libs/setuptools-2.2/build/lib/setuptools/cli-64.exe deleted file mode 100644 index 675e6bf..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/cli-64.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/cli-arm-32.exe b/libs/setuptools-2.2/build/lib/setuptools/cli-arm-32.exe deleted file mode 100644 index 2f40402..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/cli-arm-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/cli.exe b/libs/setuptools-2.2/build/lib/setuptools/cli.exe deleted file mode 100644 index b1487b7..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/cli.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/__init__.py b/libs/setuptools-2.2/build/lib/setuptools/command/__init__.py deleted file mode 100644 index 29c9d75..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', -] - -from setuptools.command import install_scripts -import sys - -from distutils.command.bdist import bdist - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/alias.py b/libs/setuptools-2.2/build/lib/setuptools/command/alias.py deleted file mode 100644 index 52384e1..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/alias.py +++ /dev/null @@ -1,82 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * -from setuptools.command.setopt import edit_config, option_base, config_file - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: return repr(arg) - if arg.split() != [arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args) != 1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print("Command Aliases") - print("---------------") - for alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - - elif len(self.args)==1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - else: - print("No alias definition found for %r" % alias) - return - else: - alias = self.args[0] - command = ' '.join(map(shquote,self.args[1:])) - - edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source+name+' '+command - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/bdist_egg.py b/libs/setuptools-2.2/build/lib/setuptools/command/bdist_egg.py deleted file mode 100644 index c577615..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,553 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -# This module should be kept compatible with Python 2.3 -import sys, os, marshal -from setuptools import Command -from distutils.dir_util import remove_tree, mkpath -try: - # Python 2.7 or >=3.2 - from sysconfig import get_path, get_python_version - def _get_purelib(): - return get_path("purelib") -except ImportError: - from distutils.sysconfig import get_python_lib, get_python_version - def _get_purelib(): - return get_python_lib(False) - -from distutils import log -from distutils.errors import DistutilsSetupError -from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint -from types import CodeType -from setuptools.compat import basestring, next -from setuptools.extension import Library - -def strip_module(filename): - if '.' in filename: - filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] - return filename - -def write_stub(resource, pyfile): - f = open(pyfile,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __loader__, __file__", - " import sys, pkg_resources, imp", - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % resource, - " __loader__ = None; del __bootstrap__, __loader__", - " imp.load_dynamic(__name__,__file__)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - -# stub __init__.py for packages distributed without one -NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' - -class bdist_egg(Command): - - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - - - - - - - - - - - - - - - - - def initialize_options (self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - - def finalize_options(self): - ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename+'.egg') - - - - - - - - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(_get_purelib())) - old, self.distribution.data_files = self.distribution.data_files,[] - - for item in old: - if isinstance(item,tuple) and len(item)==2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized==site_packages or normalized.startswith( - site_packages+os.sep - ): - item = realpath[len(site_packages)+1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s" % self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - - def get_outputs(self): - return [self.egg_output] - - - def call_command(self,cmdname,**kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname,self.bdist_dir) - kw.setdefault('skip_build',self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - - def run(self): - # Generate metadata first - self.run_command("egg_info") - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root; instcmd.root = None - if self.distribution.has_c_libraries() and not self.skip_build: - self.run_command('build_clib') - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p,ext_name) in enumerate(ext_outputs): - filename,ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep,'/') - - to_compile.extend(self.make_init_files()) - if to_compile: - cmd.byte_compile(to_compile) - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root,'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts',install_dir=script_dir,no_ep=1) - - self.copy_metadata_to(egg_info) - native_libs = os.path.join(egg_info, "native_libs.txt") - if all_outputs: - log.info("writing %s" % native_libs) - if not self.dry_run: - ensure_directory(native_libs) - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) - if not self.dry_run: - os.unlink(native_libs) - - write_safety_flag( - os.path.join(archive_root,'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info,'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution,'dist_files',[]).append( - ('bdist_egg',get_python_version(),self.egg_output)) - - - - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base,dirs,files in walk_egg(self.bdist_dir): - for name in files: - if name.endswith('.py'): - path = os.path.join(base,name) - log.debug("Deleting %s", path) - os.unlink(path) - - def zip_safe(self): - safe = getattr(self.distribution,'zip_safe',None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def make_init_files(self): - """Create missing package __init__ files""" - init_files = [] - for base,dirs,files in walk_egg(self.bdist_dir): - if base==self.bdist_dir: - # don't put an __init__ in the root - continue - for name in files: - if name.endswith('.py'): - if '__init__.py' not in files: - pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') - if self.distribution.has_contents_for(pkg): - log.warn("Creating missing __init__.py for %s",pkg) - filename = os.path.join(base,'__init__.py') - if not self.dry_run: - f = open(filename,'w'); f.write(NS_PKG_STUB) - f.close() - init_files.append(filename) - break - else: - # not a package, don't traverse to subdirectories - dirs[:] = [] - - return init_files - - def gen_header(self): - epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation',{}).get('eggsecutable') - if ep is None: - return 'w' # not an eggsecutable, do it the usual way. - - if not ep.attrs or ep.extras: - raise DistutilsSetupError( - "eggsecutable entry point (%r) cannot have 'extras' " - "or refer to a module" % (ep,) - ) - - pyver = sys.version[:3] - pkg = ep.module_name - full = '.'.join(ep.attrs) - base = ep.attrs[0] - basename = os.path.basename(self.egg_output) - - header = ( - "#!/bin/sh\n" - 'if [ `basename $0` = "%(basename)s" ]\n' - 'then exec python%(pyver)s -c "' - "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " - "from %(pkg)s import %(base)s; sys.exit(%(full)s())" - '" "$@"\n' - 'else\n' - ' echo $0 is not the correct name for this egg file.\n' - ' echo Please rename it back to %(basename)s and try again.\n' - ' exec false\n' - 'fi\n' - - ) % locals() - - if not self.dry_run: - mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) - f = open(self.egg_output, 'w') - f.write(header) - f.close() - return 'a' - - - def copy_metadata_to(self, target_dir): - "Copy metadata (egg info) to the target_dir" - # normalize the path (so that a forward-slash in egg_info will - # match using startswith below) - norm_egg_info = os.path.normpath(self.egg_info) - prefix = os.path.join(norm_egg_info,'') - for path in self.ei_cmd.filelist.files: - if path.startswith(prefix): - target = os.path.join(target_dir, path[len(prefix):]) - ensure_directory(target) - self.copy_file(path, target) - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir:''} - for base, dirs, files in os.walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base]+filename) - for filename in dirs: - paths[os.path.join(base,filename)] = paths[base]+filename+'/' - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext,Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir,filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) - base,dirs,files = next(walker) - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base,dirs,files - for bdf in walker: - yield bdf - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag,fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): - return flag - if not can_scan(): return False - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag,fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe) != flag: - os.unlink(fn) - elif safe is not None and bool(safe)==flag: - f=open(fn,'wt'); f.write('\n'); f.close() - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base,name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir)+1:].replace(os.sep,'.') - module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] - if sys.version_info < (3, 3): - skip = 8 # skip magic & date - else: - skip = 12 # skip magic & date & file size - f = open(filename,'rb'); f.read(skip) - code = marshal.load(f); f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - if '__name__' in symbols and '__main__' in symbols and '.' not in module: - if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5 - log.warn("%s: top-level module may be 'python -m' script", module) - safe = False - return safe - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: yield name - for const in code.co_consts: - if isinstance(const,basestring): - yield const - elif isinstance(const,CodeType): - for name in iter_symbols(const): - yield name - -def can_scan(): - if not sys.platform.startswith('java') and sys.platform != 'cli': - # CPython, PyPy, etc. - return True - log.warn("Unable to analyze compiled code on this platform.") - log.warn("Please ask the author to include a 'zip_safe'" - " setting (either True or False) in the package's setup.py") - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - -def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, - mode='w' -): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit(z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir)+1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'" % p) - - if compress is None: - compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits - - compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] - if not dry_run: - z = zipfile.ZipFile(zip_filename, mode, compression=compression) - for dirname, dirs, files in os.walk(base_dir): - visit(z, dirname, files) - z.close() - else: - for dirname, dirs, files in os.walk(base_dir): - visit(None, dirname, files) - return zip_filename -# diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/bdist_rpm.py b/libs/setuptools-2.2/build/lib/setuptools/command/bdist_rpm.py deleted file mode 100644 index 8c48da3..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,82 +0,0 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, another kludge to allow you to build old-style non-egg RPMs, and -# finally, a kludge to track .rpm files for uploading when run on Python <2.5. - -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -import sys, os - -class bdist_rpm(_bdist_rpm): - - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - if sys.version<"2.5": - # Track for uploading any .rpm file(s) moved to self.dist_dir - def move_file(self, src, dst, level=1): - _bdist_rpm.move_file(self, src, dst, level) - if dst==self.dist_dir and src.endswith('.rpm'): - getattr(self.distribution,'dist_files',[]).append( - ('bdist_rpm', - src.endswith('.src.rpm') and 'any' or sys.version[:3], - os.path.join(dst, os.path.basename(src))) - ) - - def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - - - - - - - - - - - - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) - for line in spec - ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) - return spec - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/bdist_wininst.py b/libs/setuptools-2.2/build/lib/setuptools/command/bdist_wininst.py deleted file mode 100644 index e8521f8..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst -import os, sys - -class bdist_wininst(_bdist_wininst): - _good_upload = _bad_upload = None - - def create_exe(self, arcname, fullname, bitmap=None): - _bdist_wininst.create_exe(self, arcname, fullname, bitmap) - installer_name = self.get_installer_filename(fullname) - if self.target_version: - pyversion = self.target_version - # fix 2.5+ bdist_wininst ignoring --target-version spec - self._bad_upload = ('bdist_wininst', 'any', installer_name) - else: - pyversion = 'any' - self._good_upload = ('bdist_wininst', pyversion, installer_name) - - def _fix_upload_names(self): - good, bad = self._good_upload, self._bad_upload - dist_files = getattr(self.distribution, 'dist_files', []) - if bad in dist_files: - dist_files.remove(bad) - if good not in dist_files: - dist_files.append(good) - - def reinitialize_command (self, command, reinit_subcommands=0): - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None # work around distutils bug - return cmd - - def run(self): - self._is_running = True - try: - _bdist_wininst.run(self) - self._fix_upload_names() - finally: - self._is_running = False - - - if not hasattr(_bdist_wininst, 'get_installer_filename'): - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - # if we create an installer for a specific python version, - # it's better to include this in the name - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - return installer_name - # get_installer_filename() - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/build_ext.py b/libs/setuptools-2.2/build/lib/setuptools/command/build_ext.py deleted file mode 100644 index 50a039c..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/build_ext.py +++ /dev/null @@ -1,298 +0,0 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -try: - # Attempt to use Pyrex for building extensions, if available - from Pyrex.Distutils.build_ext import build_ext as _build_ext -except ImportError: - _build_ext = _du_build_ext - -import os, sys -from distutils.file_util import copy_file -from setuptools.extension import Library -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler -try: - # Python 2.7 or >=3.2 - from sysconfig import _CONFIG_VARS -except ImportError: - from distutils.sysconfig import get_config_var - get_config_var("LDSHARED") # make sure _config_vars is initialized - del get_config_var - from distutils.sysconfig import _config_vars as _CONFIG_VARS -from distutils import log -from distutils.errors import * - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - from dl import RTLD_NOW - have_rtld = True - use_stubs = True - except ImportError: - pass - -def if_dl(s): - if have_rtld: - return s - return '' - - - - - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir,os.path.basename(filename)) - src_filename = os.path.join(self.build_lib,filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - - if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'): - # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 - def swig_sources(self, sources, *otherargs): - # first do any Pyrex processing - sources = _build_ext.swig_sources(self, sources) or sources - # Then do any actual SWIG stuff on the remainder - return _du_build_ext.swig_sources(self, sources, *otherargs) - - - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self,fullname) - if fullname in self.ext_map: - ext = self.ext_map[fullname] - if isinstance(ext,Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn,libtype) - elif use_stubs and ext._links_to_dynamic: - d,fn = os.path.split(filename) - return os.path.join(d,'dl-'+fn) - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext,Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - - # distutils 3.1 will also ask for module names - # XXX what to do with conflicts? - self.ext_map[fullname.split('.')[-1]] = ext - - ltd = ext._links_to_dynamic = \ - self.shlibs and self.links_to_dynamic(ext) or False - ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib,filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - if sys.platform == "darwin": - tmp = _CONFIG_VARS.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" - _CONFIG_VARS['CCSHARED'] = " -dynamiclib" - _CONFIG_VARS['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _CONFIG_VARS.clear() - _CONFIG_VARS.update(tmp) - else: - customize_compiler(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - - - def get_export_symbols(self, ext): - if isinstance(ext,Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self,ext) - - def build_extension(self, ext): - _compiler = self.compiler - try: - if isinstance(ext,Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self,ext) - if ext._needs_stub: - self.write_stub( - self.get_finalized_command('build_py').build_lib, ext - ) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) - for libname in ext.libraries: - if pkg+libname in libnames: return True - return False - - def get_outputs(self): - outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize - for ext in self.extensions: - if ext._needs_stub: - base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base+'.py') - outputs.append(base+'.pyc') - if optimize: - outputs.append(base+'.pyo') - return outputs - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s",ext._full_name, output_dir) - stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file+" already exists! Please delete.") - if not self.dry_run: - f = open(stub_file,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp"+if_dl(", dl"), - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - if compile: - from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name=='nt': - # Build shared libraries - # - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - #libraries=None, library_dirs=None, runtime_library_dirs=None, - #export_symbols=None, extra_preargs=None, extra_postargs=None, - #build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir,filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/build_py.py b/libs/setuptools-2.2/build/lib/setuptools/command/build_py.py deleted file mode 100644 index 090b44d..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/build_py.py +++ /dev/null @@ -1,221 +0,0 @@ -import os -import sys -import fnmatch -import textwrap -from distutils.command.build_py import build_py as _build_py -from distutils.util import convert_path -from glob import glob - -try: - from setuptools.lib2to3_ex import Mixin2to3 -except ImportError: - class Mixin2to3: - def run_2to3(self, files, doctests=True): - "do nothing" - -class build_py(_build_py, Mixin2to3): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - def finalize_options(self): - _build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: del self.__dict__['data_files'] - self.__updated_files = [] - self.__doctests_2to3 = [] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - self.run_2to3(self.__updated_files, False) - self.run_2to3(self.__updated_files, True) - self.run_2to3(self.__doctests_2to3, True) - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self, attr): - if attr=='data_files': # lazily compute data files - self.data_files = files = self._get_data_files() - return files - return _build_py.__getattr__(self,attr) - - def build_module(self, module, module_file, package): - outfile, copied = _build_py.build_module(self, module, module_file, package) - if copied: - self.__updated_files.append(outfile) - return outfile, copied - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - srcfile = os.path.abspath(srcfile) - if copied and srcfile in self.distribution.convert_2to3_doctests: - self.__doctests_2to3.append(outf) - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d,f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d!=prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f==oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d],[]).append(path) - - def get_data_files(self): pass # kludge 2.4 for lazy computation - - if sys.version<"2.4": # Python 2.4 already has this code - def get_outputs(self, include_bytecode=1): - """Return complete list of files copied to the build directory - - This includes both '.py' files and data files, as well as '.pyc' - and '.pyo' files if 'include_bytecode' is true. (This method is - needed for the 'install_lib' command to do its job properly, and to - generate a correct installation manifest.) - """ - return _build_py.get_outputs(self, include_bytecode) + [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir,filenames in self.data_files - for filename in filenames - ] - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = _build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg==package or pkg.startswith(package+'.'): - break - else: - return init_py - - f = open(init_py,'rbU') - if 'declare_namespace'.encode() not in f.read(): - from distutils import log - log.warn( - "WARNING: %s is a namespace package, but its __init__.py does\n" - "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n" - '(See the setuptools manual under "Namespace Packages" for ' - "details.)\n", package - ) - f.close() - return init_py - - def initialize_options(self): - self.packages_checked={} - _build_py.initialize_options(self) - - def get_package_dir(self, package): - res = _build_py.get_package_dir(self, package) - if self.distribution.src_root is not None: - return os.path.join(self.distribution.src_root, res) - return res - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) - ) - bad = dict.fromkeys(bad) - seen = {} - return [ - f for f in files if f not in bad - and f not in seen and seen.setdefault(f,1) # ditch dupes - ] - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - msg = textwrap.dedent(""" - Error: setup script specifies an absolute path: - - %s - - setup() arguments must *always* be /-separated paths relative to the - setup.py directory, *never* absolute paths. - """).lstrip() % path - raise DistutilsSetupError(msg) diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/develop.py b/libs/setuptools-2.2/build/lib/setuptools/command/develop.py deleted file mode 100644 index 1d50004..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/develop.py +++ /dev/null @@ -1,167 +0,0 @@ -from setuptools.command.easy_install import easy_install -from distutils.util import convert_path, subst_vars -from pkg_resources import Distribution, PathMetadata, normalize_path -from distutils import log -from distutils.errors import DistutilsError, DistutilsOptionError -import os, sys, setuptools, glob - -class develop(easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ("egg-path=", None, "Set the path to be used in the .egg-link file"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - self.egg_path = None - easy_install.initialize_options(self) - self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - raise DistutilsError( - "Please rename %r to %r before using 'develop'" - % (ei.egg_info, ei.broken_egg_info) - ) - self.args = [ei.egg_name] - - - - - easy_install.finalize_options(self) - self.expand_basedirs() - self.expand_dirs() - # pick up setup-dir .egg files only: no .egg-info - self.package_index.scan(glob.glob('*.egg')) - - self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') - self.egg_base = ei.egg_base - if self.egg_path is None: - self.egg_path = os.path.abspath(ei.egg_base) - - target = normalize_path(self.egg_base) - if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target: - raise DistutilsOptionError( - "--egg-path must be a relative path from the install" - " directory to "+target - ) - - # Make a distribution for the package's source - self.dist = Distribution( - target, - PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name = ei.egg_name - ) - - p = self.egg_base.replace(os.sep,'/') - if p!= os.curdir: - p = '../' * (p.count('/')+1) - self.setup_path = p - p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): - raise DistutilsOptionError( - "Can't get a consistent path to setup script from" - " installation directory", p, normalize_path(os.curdir)) - - def install_for_development(self): - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - - # Fixup egg-link and easy-install.pth - ei_cmd = self.get_finalized_command("egg_info") - self.egg_path = build_path - self.dist.location = build_path - self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - self.install_site_py() # ensure that target dir is site-safe - if setuptools.bootstrap_install_from: - self.easy_install(setuptools.bootstrap_install_from) - setuptools.bootstrap_install_from = None - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - f = open(self.egg_link,"w") - f.write(self.egg_path + "\n" + self.setup_path) - f.close() - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist, not self.no_deps) - - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - egg_link_file = open(self.egg_link) - contents = [line.rstrip() for line in egg_link_file] - egg_link_file.close() - if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self,dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - f = open(script_path,'rU') - script_text = f.read() - f.close() - self.install_script(dist, script_name, script_text, script_path) - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/easy_install.py b/libs/setuptools-2.2/build/lib/setuptools/command/easy_install.py deleted file mode 100644 index 08ebf3e..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/easy_install.py +++ /dev/null @@ -1,1915 +0,0 @@ -#!/usr/bin/env python - -""" -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ https://pythonhosted.org/setuptools/easy_install.html - -""" - -import sys -import os -import zipimport -import shutil -import tempfile -import zipfile -import re -import stat -import random -import platform -import textwrap -import warnings -import site -import struct -from glob import glob -from distutils import log, dir_util - -import pkg_resources -from setuptools import Command, _dont_write_bytecode -from setuptools.sandbox import run_setup -from setuptools.py31compat import get_path, get_config_vars - -from distutils.util import get_platform -from distutils.util import convert_path, subst_vars -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError, DistutilsPlatformError -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS -from setuptools.command import setopt -from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME -from setuptools.command import bdist_egg, egg_info -from setuptools.compat import (iteritems, maxsize, basestring, unicode, - reraise) -from pkg_resources import ( - yield_lines, normalize_path, resource_string, ensure_directory, - get_distribution, find_distributions, Environment, Requirement, - Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, - VersionConflict, DEVELOP_DIST, -) - -sys_executable = os.environ.get('__VENV_LAUNCHER__', - os.path.normpath(sys.executable)) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - -def is_64bit(): - return struct.calcsize("P") == 8 - -def samefile(p1, p2): - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - -if sys.version_info <= (3,): - def _to_ascii(s): - return s - def isascii(s): - try: - unicode(s, 'ascii') - return True - except UnicodeError: - return False -else: - def _to_ascii(s): - return s.encode('ascii') - def isascii(s): - try: - s.encode('ascii') - return True - except UnicodeError: - return False - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=','S',"list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', - "allow building eggs from local checkouts"), - ('version', None, "print version information and exit"), - ('no-find-links', None, - "Don't load find-links defined in packages being installed") - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'editable', - 'no-deps', 'local-snapshots-ok', 'version' - ] - - if site.ENABLE_USER_SITE: - help_msg = "install in user site-package '%s'" % site.USER_SITE - user_options.append(('user', None, help_msg)) - boolean_options.append('user') - - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - if site.ENABLE_USER_SITE: - whereami = os.path.abspath(__file__) - self.user = whereami.startswith(site.USER_SITE) - else: - self.user = 0 - - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_base = None - self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None - self.no_find_links = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - for filename in blockers: - if os.path.exists(filename) or os.path.islink(filename): - log.info("Deleting %s", filename) - if not self.dry_run: - if os.path.isdir(filename) and not os.path.islink(filename): - rmtree(filename) - else: - os.unlink(filename) - - def finalize_options(self): - if self.version: - print('setuptools %s' % get_distribution('setuptools').version) - sys.exit() - - py_version = sys.version.split()[0] - prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') - - self.config_vars = { - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': py_version[0:3], - 'py_version_nodot': py_version[0] + py_version[2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - # Only python 3.2+ has abiflags - 'abiflags': getattr(sys, 'abiflags', ''), - } - - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - # fix the install_dir if "--user" was used - #XXX: duplicate of the code in the setup command - if self.user and site.ENABLE_USER_SITE: - self.create_home_path() - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - if os.name == 'posix': - self.select_scheme("unix_user") - else: - self.select_scheme(os.name + "_user") - - self.expand_basedirs() - self.expand_dirs() - - self._expand('install_dir','script_dir','build_directory','site_dirs') - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - if self.no_find_links is None: - self.no_find_links = False - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options('install_lib', - ('install_dir','install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options('install_scripts', - ('install_dir', 'script_dir') - ) - - if self.user and self.install_purelib: - self.install_dir = self.install_purelib - self.script_dir = self.install_scripts - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d+" (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "https://pypi.python.org/simple" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path = self.shadow_path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path+sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, basestring): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path+sys.path) - if not self.no_find_links: - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize','optimize')) - if not isinstance(self.optimize,int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) - - def run(self): - if self.verbose != self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except: - pid = random.randint(0, maxsize) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - pass - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir,'easy-install.pth') - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir and not self.multi_version: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname()+'.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: os.unlink(testfile) - open(testfile,'w').close() - os.unlink(testfile) - except (OSError,IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - raise DistutilsError(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep) - if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]): - # only PYTHONPATH dirs need a site.py, so pretend it's there - self.sitepy_installed = True - elif self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file - self.install_dir = instdir - - def cant_write_to_target(self): - template = """can't create or remove files in install directory - -The following error occurred while trying to add or remove files in the -installation directory: - - %s - -The installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s -""" - msg = template % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += """ -This directory does not currently exist. Please create it and try again, or -choose a different installation directory (using the -d or --install-dir -option). -""" - else: - msg += """ -Perhaps your account does not have write access to this directory? If the -installation directory is a system-owned directory, you may need to sign in -as the administrator or "root" account. If you do not have administrative -access to this machine, you may wish to choose a different installation -directory, preferably one that is listed in your PYTHONPATH environment -variable. - -For information on other options, you may wish to consult the -documentation at: - - https://pythonhosted.org/setuptools/easy_install.html - -Please make the appropriate changes for your system and try again. -""" - raise DistutilsError(msg) - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname()+".pth" - ok_file = pth_file+'.ok' - ok_exists = os.path.exists(ok_file) - try: - if ok_exists: os.unlink(ok_file) - dirname = os.path.dirname(ok_file) - if not os.path.exists(dirname): - os.makedirs(dirname) - f = open(pth_file,'w') - except (OSError,IOError): - self.cant_write_to_target() - else: - try: - f.write("import os; f = open(%r, 'w'); f.write('OK'); f.close()\n" % (ok_file,)) - f.close() - f=None - executable = sys.executable - if os.name=='nt': - dirname,basename = os.path.split(executable) - alt = os.path.join(dirname,'pythonw.exe') - if basename.lower()=='python.exe' and os.path.exists(alt): - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - spawn([executable,'-E','-c','pass'],0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: - f.close() - if os.path.exists(ok_file): - os.unlink(ok_file) - if os.path.exists(pth_file): - os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - if dist.metadata_isdir('scripts/' + script_name): - # The "script" is a directory, likely a Python 3 - # __pycache__ directory, so skip it. - continue - self.install_script( - dist, script_name, - dist.get_metadata('scripts/'+script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base,filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self,spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None - if not self.editable: self.install_site_py() - - try: - if not isinstance(spec,Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, not self.always_copy, - self.local_index - ) - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg+=" (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence==DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location==download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.egg_distribution(download)] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if (dist.has_metadata('dependency_links.txt') and - not self.no_find_links): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound: - e = sys.exc_info()[1] - raise DistutilsError( - "Could not find required distribution %s" % e.args - ) - except VersionConflict: - e = sys.exc_info()[1] - raise DistutilsError( - "Installed distribution %s conflicts with requirement %s" - % e.args - ) - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - msg = "%r already exists in %s; build directory %s will not be kept" - log.warn(msg, spec.key, self.build_directory, setup_base) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename)==setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents)==1: - dist_filename = os.path.join(setup_base,contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst) - shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in get_script_args(dist): - self.write_script(*args) - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - def get_template(filename): - """ - There are a couple of template scripts in the package. This - function loads one of them and prepares it for use. - - These templates use triple-quotes to escape variable - substitutions so the scripts get the 2to3 treatment when build - on Python 3. The templates cannot use triple-quotes naturally. - """ - raw_bytes = resource_string('setuptools', template_name) - template_str = raw_bytes.decode('utf-8') - clean_template = template_str.replace('"""', '') - return clean_template - - if is_script: - template_name = 'script template.py' - if dev_path: - template_name = template_name.replace('.py', ' (dev).py') - script_text = (get_script_header(script_text) + - get_template(template_name) % locals()) - self.write_script(script_name, _to_ascii(script_text), 'b') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir,x) for x in blockers]) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - if os.path.exists(target): - os.unlink(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target, 0x1FF-mask) # 0777 - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) - ) - if len(setups)>1: - raise DistutilsError( - "Multiple setup scripts in %s" % os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path,metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir,os.path.basename(egg_path)) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute(os.unlink,(destination,),"Removing "+destination) - uncache_zipdir(destination) - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f,m = self.unpack_and_compile, "Extracting" - elif egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copy2, "Copying" - - self.execute(f, (egg_path, destination), - (m+" %s to %s") % - (os.path.basename(egg_path),os.path.dirname(destination))) - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution( - None, - project_name=cfg.get('metadata','name'), - version=cfg.get('metadata','version'), platform=get_platform(), - ) - - # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') - egg_tmp = egg_path + '.tmp' - _egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(_egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf,'w') - f.write('Metadata-Version: 1.0\n') - for k,v in cfg.items('metadata'): - if k != 'target_version': - f.write('%s: %s\n' % (k.replace('_','-').title(), v)) - f.close() - script_dir = os.path.join(_egg_info,'scripts') - self.delete_blockers( # delete entry-point scripts to avoid duping - [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] - ) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - def process(src,dst): - s = src.lower() - for old,new in prefixes: - if s.startswith(old): - src = new+src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old!='SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1])+'.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile) - stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level','native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') - if not os.path.exists(txt): - f = open(txt,'w') - f.write('\n'.join(locals()[name])+'\n') - f.close() - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += """ - -Because this distribution was installed --multi-version, before you can -import modules from this package in an application, you will need to -'import pkg_resources' and then use a 'require()' call similar to one of -these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher -""" - if self.install_dir not in map(normalize_path,sys.path): - msg += """ - -Note also that the installation directory must be on sys.path at runtime for -this to work. (e.g. by being the application's script directory, by being on -PYTHONPATH, or by being added to sys.path by your code.) -""" - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return """\nExtracted editable version of %(spec)s to %(dirname)s - -If it uses setuptools in its setup script, you can activate it in -"development" mode by going to that directory and running:: - - %(python)s setup.py develop - -See the setuptools documentation for the "develop" command for more info. -""" % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose>2: - v = 'v' * (self.verbose - 1) - args.insert(0,'-'+v) - elif self.verbose<2: - args.insert(0,'-q') - if self.dry_run: - args.insert(0,'-n') - log.info( - "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit: - v = sys.exc_info()[1] - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - self._set_fetcher_options(os.path.dirname(setup_script)) - args.append(dist_dir) - - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def _set_fetcher_options(self, base): - """ - When easy_install is about to run bdist_egg on a source dist, that - source dist might have 'setup_requires' directives, requiring - additional fetching. Ensure the fetcher options given to easy_install - are available to that command as well. - """ - # find the fetch options from easy_install and write them out - # to the setup.cfg file. - ei_opts = self.distribution.get_option_dict('easy_install').copy() - fetch_directives = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts', - ) - fetch_options = {} - for key, val in ei_opts.items(): - if key not in fetch_directives: continue - fetch_options[key.replace('_', '-')] = val[1] - # create a settings dictionary suitable for `edit_config` - settings = dict(easy_install=fetch_options) - cfg_filename = os.path.join(base, 'setup.cfg') - setopt.edit_config(cfg_filename, settings) - - def update_pth(self, dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - - if dist.key=='setuptools': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir,'setuptools.pth') - if os.path.islink(filename): os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location)+'\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = [] - to_chmod = [] - - def pf(src, dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src,dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0x16D) & 0xFED # 0555, 07755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if _dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - def no_default_version_msg(self): - template = """bad install directory or PYTHONPATH - -You are attempting to install a package to a directory that is not -on PYTHONPATH and which Python does not read ".pth" files from. The -installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s - -and your PYTHONPATH environment variable currently contains: - - %r - -Here are some of your options for correcting the problem: - -* You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - -* You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - -* You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations - -Please make the appropriate changes for your system and try again.""" - return template % (self.install_dir, os.environ.get('PYTHONPATH','')) - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string("setuptools", "site-patch.py") - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - f = open(sitepy,'rb') - current = f.read() - # we want str, not bytes - if sys.version_info >= (3,): - current = current.decode() - - f.close() - if not current.startswith('def __boot():'): - raise DistutilsError( - "%s is not a setuptools-generated site.py; please" - " remove it." % sitepy - ) - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - f = open(sitepy,'wb') - f.write(source) - f.close() - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in iteritems(self.config_vars): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0700)" % path) - os.makedirs(path, 0x1C0) # 0700 - - INSTALL_SCHEMES = dict( - posix = dict( - install_dir = '$base/lib/python$py_version_short/site-packages', - script_dir = '$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir = '$base/Lib/site-packages', - script_dir = '$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) - for attr,val in scheme.items(): - if getattr(self,attr,None) is None: - setattr(self,attr,val) - - from distutils.util import subst_vars - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - -def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = [_f for _f in os.environ.get('PYTHONPATH', - '').split(os.pathsep) if _f] - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) - else: - sitedirs.extend( - [prefix, os.path.join(prefix, "lib", "site-packages")] - ) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - lib_paths = get_path('purelib'), get_path('platlib') - for site_lib in lib_paths: - if site_lib not in sitedirs: sitedirs.append(site_lib) - - if site.ENABLE_USER_SITE: - sitedirs.append(site.USER_SITE) - - sitedirs = list(map(normalize_path, sitedirs)) - - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth','setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname,name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a ConfigParser.RawConfigParser, or None - """ - f = open(dist_filename,'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended-12) - - from setuptools.compat import StringIO, ConfigParser - import struct - tag, cfglen, bmlen = struct.unpack("= (2,6): - null_byte = bytes([0]) - else: - null_byte = chr(0) - config = part.split(null_byte, 1)[0] - # Now the config is in bytes, but for RawConfigParser, it should - # be text, so decode it. - config = config.decode(sys.getfilesystemencoding()) - cfg.readfp(StringIO(config)) - except ConfigParser.Error: - return None - if not cfg.has_section('metadata') or not cfg.has_section('Setup'): - return None - return cfg - - finally: - f.close() - - -def get_exe_prefixes(exe_filename): - """Get exe->egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/'), - ('DATA/lib/site-packages', ''), - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts)==3 and parts[2]=='PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts) != 2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB','PLATLIB'): - contents = z.read(name) - if sys.version_info >= (3,): - contents = contents.decode() - for pth in yield_lines(contents): - pth = pth.strip().replace('\\','/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0],pth)), '')) - finally: - z.close() - prefixes = [(x.lower(),y) for x, y in prefixes] - prefixes.sort() - prefixes.reverse() - return prefixes - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename - self.sitedirs = list(map(normalize_path, sitedirs)) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load() - Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - list(map(self.add, find_distributions(path, True))) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - f = open(self.filename,'rt') - for line in f: - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir,path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - f.close() - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - data = '\n'.join(map(self.make_relative,self.paths)) - if data: - log.debug("Saving %s", self.filename) - data = ( - "import sys; sys.__plen = len(sys.path)\n" - "%s\n" - "import sys; new=sys.path[sys.__plen:];" - " del sys.path[sys.__plen:];" - " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;" - " sys.__egginsert = p+len(new)\n" - ) % data - - if os.path.islink(self.filename): - os.unlink(self.filename) - f = open(self.filename,'wt') - f.write(data) - f.close() - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - def add(self, dist): - """Add `dist` to the distribution map""" - if (dist.location not in self.paths and ( - dist.location not in self.sitedirs or - dist.location == os.getcwd() # account for '.' being in PYTHONPATH - )): - self.paths.append(dist.location) - self.dirty = True - Environment.add(self, dist) - - def remove(self, dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location) - self.dirty = True - Environment.remove(self, dist) - - def make_relative(self,path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep=='/' and '/' or os.sep - while len(npath)>=baselen: - if npath==self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - -def get_script_header(script_text, executable=sys_executable, wininst=False): - """Create a #! line, getting options (if any) from script_text""" - from distutils.command.build_scripts import first_line_re - - # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. - if not isinstance(first_line_re.pattern, str): - first_line_re = re.compile(first_line_re.pattern.decode()) - - first = (script_text+'\n').splitlines()[0] - match = first_line_re.match(first) - options = '' - if match: - options = match.group(1) or '' - if options: options = ' '+options - if wininst: - executable = "python.exe" - else: - executable = nt_quote_arg(executable) - hdr = "#!%(executable)s%(options)s\n" % locals() - if not isascii(hdr): - # Non-ascii path to sys.executable, use -x to prevent warnings - if options: - if options.strip().startswith('-'): - options = ' -x'+options.strip()[1:] - # else: punt, we can't do it, let the warning happen anyway - else: - options = ' -x' - executable = fix_jython_executable(executable, options) - hdr = "#!%(executable)s%(options)s\n" % locals() - return hdr - -def auto_chmod(func, arg, exc): - if func is os.remove and os.name=='nt': - chmod(arg, stat.S_IWRITE) - return func(arg) - et, ev, _ = sys.exc_info() - reraise(et, (ev[0], ev[1] + (" %s %s" % (func,arg)))) - -def uncache_zipdir(path): - """Ensure that the importer caches dont have stale info for `path`""" - from zipimport import _zip_directory_cache as zdc - _uncache(path, zdc) - _uncache(path, sys.path_importer_cache) - -def _uncache(path, cache): - if path in cache: - del cache[path] - else: - path = normalize_path(path) - for p in cache: - if normalize_path(p)==path: - del cache[p] - return - -def is_python(text, filename=''): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - fp = open(executable) - magic = fp.read(2) - fp.close() - except (OSError,IOError): return executable - return magic == '#!' - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - - result = [] - needquote = False - nb = 0 - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - nb += 1 - elif c == '"': - # double preceding backslashes, then add a \" - result.append('\\' * (nb*2) + '\\"') - nb = 0 - else: - if nb: - result.append('\\' * nb) - nb = 0 - result.append(c) - - if nb: - result.append('\\' * nb) - - if needquote: - result.append('\\' * nb) # double the trailing backslashes - result.append('"') - - return ''.join(result) - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): pass - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error: - e = sys.exc_info()[1] - log.debug("chmod failed: %s", e) - -def fix_jython_executable(executable, options): - if sys.platform.startswith('java') and is_sh(executable): - # Workaround for Jython is not needed on Linux systems. - import java - if java.lang.System.getProperty("os.name") == "Linux": - return executable - - # Workaround Jython's sys.executable being a .sh (an invalid - # shebang line interpreter) - if options: - # Can't apply the workaround, leave it broken - log.warn( - "WARNING: Unable to adapt shebang line for Jython," - " the following script is NOT executable\n" - " see http://bugs.jython.org/issue1112 for" - " more information.") - else: - return '/usr/bin/env %s' % executable - return executable - - -class ScriptWriter(object): - """ - Encapsulates behavior around writing entry point scripts for console and - gui apps. - """ - - template = textwrap.dedent(""" - # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r - __requires__ = %(spec)r - import sys - from pkg_resources import load_entry_point - - if __name__ == '__main__': - sys.exit( - load_entry_point(%(spec)r, %(group)r, %(name)r)() - ) - """).lstrip() - - @classmethod - def get_script_args(cls, dist, executable=sys_executable, wininst=False): - """ - Yield write_script() argument tuples for a distribution's entrypoints - """ - gen_class = cls.get_writer(wininst) - spec = str(dist.as_requirement()) - header = get_script_header("", executable, wininst) - for type_ in 'console', 'gui': - group = type_ + '_scripts' - for name, ep in dist.get_entry_map(group).items(): - script_text = gen_class.template % locals() - for res in gen_class._get_script_args(type_, name, header, - script_text): - yield res - - @classmethod - def get_writer(cls, force_windows): - if force_windows or sys.platform=='win32': - return WindowsScriptWriter.get_writer() - return cls - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - # Simply write the stub with no extension. - yield (name, header+script_text) - - -class WindowsScriptWriter(ScriptWriter): - @classmethod - def get_writer(cls): - """ - Get a script writer suitable for Windows - """ - writer_lookup = dict( - executable=WindowsExecutableLauncherWriter, - natural=cls, - ) - # for compatibility, use the executable launcher by default - launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') - return writer_lookup[launcher] - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - "For Windows, add a .py extension" - ext = dict(console='.pya', gui='.pyw')[type_] - if ext not in os.environ['PATHEXT'].lower().split(';'): - warnings.warn("%s not listed in PATHEXT; scripts will not be " - "recognized as executables." % ext, UserWarning) - old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] - old.remove(ext) - header = cls._adjust_header(type_, header) - blockers = [name+x for x in old] - yield name+ext, header+script_text, 't', blockers - - @staticmethod - def _adjust_header(type_, orig_header): - """ - Make sure 'pythonw' is used for gui and and 'python' is used for - console (regardless of what sys.executable is). - """ - pattern = 'pythonw.exe' - repl = 'python.exe' - if type_ == 'gui': - pattern, repl = repl, pattern - pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) - new_header = pattern_ob.sub(string=orig_header, repl=repl) - clean_header = new_header[2:-1].strip('"') - if sys.platform == 'win32' and not os.path.exists(clean_header): - # the adjusted version doesn't exist, so return the original - return orig_header - return new_header - - -class WindowsExecutableLauncherWriter(WindowsScriptWriter): - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - """ - For Windows, add a .py extension and an .exe launcher - """ - if type_=='gui': - launcher_type = 'gui' - ext = '-script.pyw' - old = ['.pyw'] - else: - launcher_type = 'cli' - ext = '-script.py' - old = ['.py','.pyc','.pyo'] - hdr = cls._adjust_header(type_, header) - blockers = [name+x for x in old] - yield (name+ext, hdr+script_text, 't', blockers) - yield ( - name+'.exe', get_win_launcher(launcher_type), - 'b' # write in binary mode - ) - if not is_64bit(): - # install a manifest for the launcher to prevent Windows - # from detecting it as an installer (which it will for - # launchers like easy_install.exe). Consider only - # adding a manifest for launchers detected as installers. - # See Distribute #143 for details. - m_name = name + '.exe.manifest' - yield (m_name, load_launcher_manifest(name), 't') - -# for backward-compatibility -get_script_args = ScriptWriter.get_script_args - -def get_win_launcher(type): - """ - Load the Windows launcher (executable) suitable for launching a script. - - `type` should be either 'cli' or 'gui' - - Returns the executable as a byte string. - """ - launcher_fn = '%s.exe' % type - if platform.machine().lower()=='arm': - launcher_fn = launcher_fn.replace(".", "-arm.") - if is_64bit(): - launcher_fn = launcher_fn.replace(".", "-64.") - else: - launcher_fn = launcher_fn.replace(".", "-32.") - return resource_string('setuptools', launcher_fn) - -def load_launcher_manifest(name): - manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - if sys.version_info[0] < 3: - return manifest % vars() - else: - return manifest.decode('utf-8') % vars() - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - -def current_umask(): - tmp = os.umask(0x12) # 022 - os.umask(tmp) - return tmp - -def bootstrap(): - # This function is called when setuptools*.egg is run using /bin/sh - import setuptools - argv0 = os.path.dirname(setuptools.__path__[0]) - sys.argv[0] = argv0 - sys.argv.append(argv0) - main() - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - import distutils.core - - USAGE = """\ -usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help -""" - - def gen_usage(script_name): - return USAGE % dict( - script=os.path.basename(script_name), - ) - - def with_ei_usage(f): - old_gen_usage = distutils.core.gen_usage - try: - distutils.core.gen_usage = gen_usage - return f() - finally: - distutils.core.gen_usage = old_gen_usage - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - - def _show_help(self,*args,**kw): - with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) - - if argv is None: - argv = sys.argv[1:] - - with_ei_usage(lambda: - setup( - script_args = ['-q','easy_install', '-v']+argv, - script_name = sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - ) diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/egg_info.py b/libs/setuptools-2.2/build/lib/setuptools/command/egg_info.py deleted file mode 100644 index 5953aad..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/egg_info.py +++ /dev/null @@ -1,392 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -import os -import re -import sys - -from setuptools import Command -import distutils.errors -from distutils import log -from setuptools.command.sdist import sdist -from setuptools.compat import basestring -from setuptools import svn_utils -from distutils.util import convert_path -from distutils.filelist import FileList as _FileList -from pkg_resources import (parse_requirements, safe_name, parse_version, - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) -from setuptools.command.sdist import walk_revctrl - - -class egg_info(Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} - - def initialize_options(self): - self.egg_name = None - self.egg_version = None - self.egg_base = None - self.egg_info = None - self.tag_build = None - self.tag_svn_revision = 0 - self.tag_date = 0 - self.broken_egg_info = False - self.vtags = None - - def save_version_info(self, filename): - from setuptools.command.setopt import edit_config - values = dict( - egg_info=dict( - tag_svn_revision=0, - tag_date=0, - tag_build=self.tags(), - ) - ) - edit_config(filename, values) - - def finalize_options(self): - self.egg_name = safe_name(self.distribution.get_name()) - self.vtags = self.tags() - self.egg_version = self.tagged_version() - - try: - list( - parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) - ) - except ValueError: - raise distutils.errors.DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name,self.egg_version) - ) - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('',os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name)+'.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key==self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - if sys.version_info >= (3,): - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def tagged_version(self): - version = self.distribution.get_version() - # egg_info may be called more than once for a distribution, - # in which case the version string already contains all tags. - if self.vtags and version.endswith(self.vtags): - return safe_version(version) - return safe_version(version + self.vtags) - - def run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - writer = ep.load(installer=installer) - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def tags(self): - version = '' - if self.tag_build: - version+=self.tag_build - if self.tag_svn_revision and ( - os.path.exists('.svn') or os.path.exists('PKG-INFO') - ): version += '-r%s' % self.get_svn_revision() - if self.tag_date: - import time - version += time.strftime("-%Y%m%d") - return version - - @staticmethod - def get_svn_revision(): - return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name+'.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-"*78+'\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n'+'-'*78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - -class FileList(_FileList): - """File list that accepts only existing, platform-independent paths""" - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - - if sys.version_info >= (3,): - try: - if os.path.exists(path) or os.path.exists(path.encode('utf-8')): - self.files.append(path) - except UnicodeEncodeError: - # Accept UTF-8 filenames even if LANG=C - if os.path.exists(path.encode('utf-8')): - self.files.append(path) - else: - log.warn("'%s' not %s encodable -- skipping", path, - sys.getfilesystemencoding()) - else: - if os.path.exists(path): - self.files.append(path) - - -class manifest_maker(sdist): - - template = "MANIFEST.in" - - def initialize_options(self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def write_manifest(self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - # The manifest must be UTF-8 encodable. See #303. - if sys.version_info >= (3,): - files = [] - for file in self.filelist.files: - try: - file.encode("utf-8") - except UnicodeEncodeError: - log.warn("'%s' not UTF-8 encodable -- skipping" % file) - else: - files.append(file) - self.filelist.files = files - - files = self.filelist.files - if os.sep!='/': - files = [f.replace(os.sep,'/') for f in files] - self.execute(write_file, (self.manifest, files), - "writing manifest file '%s'" % self.manifest) - - def warn(self, msg): # suppress missing-file warnings from sdist - if not msg.startswith("standard file not found:"): - sdist.warn(self, msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def prune_file_list(self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) - - -def write_file(filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - if sys.version_info >= (3,): - contents = contents.encode("utf-8") - f = open(filename, "wb") # always write POSIX-style manifest - f.write(contents) - f.close() - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution,'zip_safe',None) - from setuptools.command import bdist_egg - bdist_egg.write_safety_flag(cmd.egg_info, safe) - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = ['\n'.join(yield_lines(dist.install_requires or ()))] - for extra,reqs in (dist.extras_require or {}).items(): - data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) - cmd.write_or_delete_file("requirements", filename, ''.join(data)) - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [ - k.split('.',1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value)+'\n' - cmd.write_or_delete_file(argname, filename, value, force) - -def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep,basestring) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in ep.items(): - if not isinstance(contents,basestring): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(map(str,contents.values())) - data.append('[%s]\n%s\n\n' % (section,contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) - -def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # - if os.path.exists('PKG-INFO'): - f = open('PKG-INFO','rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - f.close() - return 0 diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/install.py b/libs/setuptools-2.2/build/lib/setuptools/command/install.py deleted file mode 100644 index 459cd3c..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/install.py +++ /dev/null @@ -1,103 +0,0 @@ -import setuptools -import sys -import glob -from distutils.command.install import install as _install -from distutils.errors import DistutilsArgError - -class install(_install): - """Use easy_install to install the package, w/dependencies""" - - user_options = _install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = _install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - - def initialize_options(self): - _install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - self.no_compile = None # make DISTUTILS_DEBUG work right! - - def finalize_options(self): - _install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return _install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return _install.run(self) - - # Attempt to detect whether we were called from setup() or by another - # command. If we were called by setup(), our caller will be the - # 'run_command' method in 'distutils.dist', and *its* caller will be - # the 'run_commands' method. If we were called any other way, our - # immediate caller *might* be 'run_command', but it won't have been - # called by 'run_commands'. This is slightly kludgy, but seems to - # work. - # - caller = sys._getframe(2) - caller_module = caller.f_globals.get('__name__','') - caller_name = caller.f_code.co_name - - if caller_module != 'distutils.dist' or caller_name!='run_commands': - # We weren't called from the command line or setup(), so we - # should run in backward-compatibility mode to support bdist_* - # commands. - _install.run(self) - else: - self.do_egg_install() - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None - -# XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = [ - cmd for cmd in _install.sub_commands if cmd[0] not in install._nc - ] + install.new_commands diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/install_egg_info.py b/libs/setuptools-2.2/build/lib/setuptools/command/install_egg_info.py deleted file mode 100644 index f44b34b..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,125 +0,0 @@ -from setuptools import Command -from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - target = self.target - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src,dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - unpack_archive(self.source, self.target, skimmer) - - - - - - - - - - - - - - - - - - - - - - - - - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: return - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wt') - for pkg in nsp: - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) - pth = tuple(pkg.split('.')) - trailer = '\n' - if '.' in pkg: - trailer = ( - "; m and setattr(sys.modules[%r], %r, m)\n" - % ('.'.join(pth[:-1]), pth[-1]) - ) - f.write( - "import sys,types,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)%(trailer)s" - % locals() - ) - f.close() - - def _get_all_ns_packages(self): - nsp = {} - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp['.'.join(pkg)] = 1 - pkg.pop() - nsp=list(nsp) - nsp.sort() # set up shorter names first - return nsp - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/install_lib.py b/libs/setuptools-2.2/build/lib/setuptools/command/install_lib.py deleted file mode 100644 index 82afa14..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/install_lib.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.install_lib import install_lib as _install_lib -import os - -class install_lib(_install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages - - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return _install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = _install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/install_scripts.py b/libs/setuptools-2.2/build/lib/setuptools/command/install_scripts.py deleted file mode 100644 index 105dabc..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/install_scripts.py +++ /dev/null @@ -1,54 +0,0 @@ -from distutils.command.install_scripts import install_scripts \ - as _install_scripts -from pkg_resources import Distribution, PathMetadata, ensure_directory -import os -from distutils import log - -class install_scripts(_install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - _install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - from setuptools.command.easy_install import get_script_args - from setuptools.command.easy_install import sys_executable - - self.run_command("egg_info") - if self.distribution.scripts: - _install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - executable = getattr(bs_cmd,'executable',sys_executable) - is_wininst = getattr( - self.get_finalized_command("bdist_wininst"), '_is_running', False - ) - for args in get_script_args(dist, executable, is_wininst): - self.write_script(*args) - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - from setuptools.command.easy_install import chmod, current_umask - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target, 0x1FF-mask) # 0777 - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/launcher manifest.xml b/libs/setuptools-2.2/build/lib/setuptools/command/launcher manifest.xml deleted file mode 100644 index 844d226..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/launcher manifest.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/register.py b/libs/setuptools-2.2/build/lib/setuptools/command/register.py deleted file mode 100644 index 3b2e085..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/register.py +++ /dev/null @@ -1,10 +0,0 @@ -from distutils.command.register import register as _register - -class register(_register): - __doc__ = _register.__doc__ - - def run(self): - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - _register.run(self) - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/rotate.py b/libs/setuptools-2.2/build/lib/setuptools/command/rotate.py deleted file mode 100644 index b10acfb..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/rotate.py +++ /dev/null @@ -1,83 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.compat import basestring -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - for pattern in self.match: - pattern = self.distribution.get_name()+'*'+pattern - files = glob(os.path.join(self.dist_dir,pattern)) - files = [(os.path.getmtime(f),f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t,f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - os.unlink(f) - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/saveopts.py b/libs/setuptools-2.2/build/lib/setuptools/command/saveopts.py deleted file mode 100644 index 7209be4..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/saveopts.py +++ /dev/null @@ -1,24 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.command.setopt import edit_config, option_base - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - settings = {} - - for cmd in dist.command_options: - - if cmd=='saveopts': - continue # don't save our own options! - - for opt,(src,val) in dist.get_option_dict(cmd).items(): - if src=="command line": - settings.setdefault(cmd,{})[opt] = val - - edit_config(self.filename, settings, self.dry_run) - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/sdist.py b/libs/setuptools-2.2/build/lib/setuptools/command/sdist.py deleted file mode 100644 index 76e1c5f..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/sdist.py +++ /dev/null @@ -1,244 +0,0 @@ -import os -import re -import sys -from glob import glob - -import pkg_resources -from distutils.command.sdist import sdist as _sdist -from distutils.util import convert_path -from distutils import log -from setuptools import svn_utils - -READMES = ('README', 'README.rst', 'README.txt') - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - - -#TODO will need test case -class re_finder(object): - """ - Finder that locates files based on entries in a file matched by a - regular expression. - """ - - def __init__(self, path, pattern, postproc=lambda x: x): - self.pattern = pattern - self.postproc = postproc - self.entries_path = convert_path(path) - - def _finder(self, dirname, filename): - f = open(filename,'rU') - try: - data = f.read() - finally: - f.close() - for match in self.pattern.finditer(data): - path = match.group(1) - # postproc was formerly used when the svn finder - # was an re_finder for calling unescape - path = self.postproc(path) - yield svn_utils.joinpath(dirname, path) - - def find(self, dirname=''): - path = svn_utils.joinpath(dirname, self.entries_path) - - if not os.path.isfile(path): - # entries file doesn't exist - return - for path in self._finder(dirname,path): - if os.path.isfile(path): - yield path - elif os.path.isdir(path): - for item in self.find(path): - yield item - __call__ = find - - -def _default_revctrl(dirname=''): - 'Primary svn_cvs entry point' - for finder in finders: - for item in finder(dirname): - yield item - - -finders = [ - re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)), - svn_utils.svn_finder, -] - - -class sdist(_sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) - self.check_readme() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # Call check_metadata only if no 'check' command - # (distutils <= 2.6) - import distutils.command - if 'check' not in distutils.command.__all__: - self.check_metadata() - - self.make_distribution() - - dist_files = getattr(self.distribution,'dist_files',[]) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def __read_template_hack(self): - # This grody hack closes the template file (MANIFEST.in) if an - # exception occurs during read_template. - # Doing so prevents an error when easy_install attempts to delete the - # file. - try: - _sdist.read_template(self) - except: - sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() - raise - # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle - # has been fixed, so only override the method if we're using an earlier - # Python. - has_leaky_handle = ( - sys.version_info < (2,7,2) - or (3,0) <= sys.version_info < (3,1,4) - or (3,2) <= sys.version_info < (3,2,1) - ) - if has_leaky_handle: - read_template = __read_template_hack - - def add_defaults(self): - standards = [READMES, - self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = 0 - for fn in alts: - if os.path.exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if os.path.exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = list(filter(os.path.isfile, glob(pattern))) - if files: - self.filelist.extend(files) - - # getting python files - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - # This functionality is incompatible with include_package_data, and - # will in fact create an infinite recursion if include_package_data - # is True. Use of include_package_data will imply that - # distutils-style automatic handling of package_data is disabled - if not self.distribution.include_package_data: - for _, src_dir, _, filenames in build_py.data_files: - self.filelist.extend([os.path.join(src_dir, filename) - for filename in filenames]) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def check_readme(self): - for f in READMES: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " +', '.join(READMES) - ) - - def make_release_tree(self, base_dir, files): - _sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os,'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - def _manifest_is_not_generated(self): - # check for special comment used in 2.7.1 and higher - if not os.path.isfile(self.manifest): - return False - - fp = open(self.manifest, 'rbU') - try: - first_line = fp.readline() - finally: - fp.close() - return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode() - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rbU') - for line in manifest: - # The manifest must contain UTF-8. See #303. - if sys.version_info >= (3,): - try: - line = line.decode('UTF-8') - except UnicodeDecodeError: - log.warn("%r not UTF-8 decodable -- skipping" % line) - continue - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - manifest.close() diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/setopt.py b/libs/setuptools-2.2/build/lib/setuptools/command/setopt.py deleted file mode 100644 index aa468c8..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/setopt.py +++ /dev/null @@ -1,164 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind=='local': - return 'setup.cfg' - if kind=='global': - return os.path.join( - os.path.dirname(distutils.__file__),'distutils.cfg' - ) - if kind=='user': - dot = os.name=='posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - - - - - - - - - - - - - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - from setuptools.compat import ConfigParser - log.debug("Reading configuration from %s", filename) - opts = ConfigParser.RawConfigParser() - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option,value in options.items(): - if value is None: - log.debug("Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section,option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section,option,value) - - log.info("Writing %s", filename) - if not dry_run: - f = open(filename,'w'); opts.write(f); f.close() - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames)>1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-','_'):self.set_value} - }, - self.dry_run - ) - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/test.py b/libs/setuptools-2.2/build/lib/setuptools/command/test.py deleted file mode 100644 index db2fc7b..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/test.py +++ /dev/null @@ -1,198 +0,0 @@ -from setuptools import Command -from distutils.errors import DistutilsOptionError -import sys -from pkg_resources import * -from pkg_resources import _namespace_packages -from unittest import TestLoader, main - -class ScanningLoader(TestLoader): - - def loadTestsFromModule(self, module): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - tests = [] - if module.__name__!='setuptools.tests.doctest': # ugh - tests.append(TestLoader.loadTestsFromModule(self,module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file!='__init__.py': - submodule = module.__name__+'.'+file[:-3] - else: - if resource_exists( - module.__name__, file+'/__init__.py' - ): - submodule = module.__name__+'.'+file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests)!=1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -class test(Command): - - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=','m', "Run 'test_suite' in specified module"), - ('test-suite=','s', - "Test suite to run (e.g. 'some_module.test_suite')"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - - - def finalize_options(self): - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module+".test_suite" - elif self.test_module: - raise DistutilsOptionError( - "You may specify a module or a suite, but not both" - ) - - self.test_args = [self.test_suite] - - if self.verbose: - self.test_args.insert(0,'--verbose') - if self.test_loader is None: - self.test_loader = getattr(self.distribution,'test_loader',None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - - - - def with_project_on_sys_path(self, func): - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - func() - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs(self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - if self.test_suite: - cmd = ' '.join(self.test_args) - if self.dry_run: - self.announce('skipping "unittest %s" (dry run)' % cmd) - else: - self.announce('running "unittest %s"' % cmd) - self.with_project_on_sys_path(self.run_tests) - - - def run_tests(self): - import unittest - - # Purge modules under test from sys.modules. The test loader will - # re-import them from the build location. Required when 2to3 is used - # with namespace packages. - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): - module = self.test_args[-1].split('.')[0] - if module in _namespace_packages: - del_modules = [] - if module in sys.modules: - del_modules.append(module) - module += '.' - for name in sys.modules: - if name.startswith(module): - del_modules.append(name) - list(map(sys.modules.__delitem__, del_modules)) - - loader_ep = EntryPoint.parse("x="+self.test_loader) - loader_class = loader_ep.load(require=False) - cks = loader_class() - unittest.main( - None, None, [unittest.__file__]+self.test_args, - testLoader = cks - ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/command/upload_docs.py b/libs/setuptools-2.2/build/lib/setuptools/command/upload_docs.py deleted file mode 100644 index cad7a52..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/command/upload_docs.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -"""upload_docs - -Implements a Distutils 'upload_docs' subcommand (upload documentation to -PyPI's pythonhosted.org). -""" - -import os -import socket -import zipfile -import tempfile -import sys -import shutil - -from base64 import standard_b64encode -from pkg_resources import iter_entry_points - -from distutils import log -from distutils.errors import DistutilsOptionError -from distutils.command.upload import upload - -from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 - -errors = 'surrogateescape' if PY3 else 'strict' - - -# This is not just a replacement for byte literals -# but works as a general purpose encoder -def b(s, encoding='utf-8'): - if isinstance(s, unicode): - return s.encode(encoding, errors) - return s - - -class upload_docs(upload): - - description = 'Upload documentation to PyPI' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('upload-dir=', None, 'directory to upload'), - ] - boolean_options = upload.boolean_options - - def has_sphinx(self): - if self.upload_dir is None: - for ep in iter_entry_points('distutils.commands', 'build_sphinx'): - return True - - sub_commands = [('build_sphinx', has_sphinx)] - - def initialize_options(self): - upload.initialize_options(self) - self.upload_dir = None - self.target_dir = None - - def finalize_options(self): - upload.finalize_options(self) - if self.upload_dir is None: - if self.has_sphinx(): - build_sphinx = self.get_finalized_command('build_sphinx') - self.target_dir = build_sphinx.builder_target_dir - else: - build = self.get_finalized_command('build') - self.target_dir = os.path.join(build.build_base, 'docs') - else: - self.ensure_dirname('upload_dir') - self.target_dir = self.upload_dir - self.announce('Using upload directory %s' % self.target_dir) - - def create_zipfile(self, filename): - zip_file = zipfile.ZipFile(filename, "w") - try: - self.mkpath(self.target_dir) # just in case - for root, dirs, files in os.walk(self.target_dir): - if root == self.target_dir and not files: - raise DistutilsOptionError( - "no files found in upload directory '%s'" - % self.target_dir) - for name in files: - full = os.path.join(root, name) - relative = root[len(self.target_dir):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - finally: - zip_file.close() - - def run(self): - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - tmp_dir = tempfile.mkdtemp() - name = self.distribution.metadata.get_name() - zip_file = os.path.join(tmp_dir, "%s.zip" % name) - try: - self.create_zipfile(zip_file) - self.upload_file(zip_file) - finally: - shutil.rmtree(tmp_dir) - - def upload_file(self, filename): - f = open(filename, 'rb') - content = f.read() - f.close() - meta = self.distribution.metadata - data = { - ':action': 'doc_upload', - 'name': meta.get_name(), - 'content': (os.path.basename(filename), content), - } - # set up the authentication - credentials = b(self.username + ':' + self.password) - credentials = standard_b64encode(credentials) - if PY3: - credentials = credentials.decode('ascii') - auth = "Basic " + credentials - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b('\n--') + b(boundary) - end_boundary = sep_boundary + b('--') - body = [] - for key, values in iteritems(data): - title = '\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(values, list): - values = [values] - for value in values: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = b(value) - body.append(sep_boundary) - body.append(b(title)) - body.append(b("\n\n")) - body.append(value) - if value and value[-1:] == b('\r'): - body.append(b('\n')) # write an extra newline (lurve Macs) - body.append(end_boundary) - body.append(b("\n")) - body = b('').join(body) - - self.announce("Submitting documentation to %s" % (self.repository), - log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - conn = httplib.HTTPConnection(netloc) - elif schema == 'https': - conn = httplib.HTTPSConnection(netloc) - else: - raise AssertionError("unsupported schema "+schema) - - data = '' - try: - conn.connect() - conn.putrequest("POST", url) - content_type = 'multipart/form-data; boundary=%s' % boundary - conn.putheader('Content-type', content_type) - conn.putheader('Content-length', str(len(body))) - conn.putheader('Authorization', auth) - conn.endheaders() - conn.send(body) - except socket.error: - e = sys.exc_info()[1] - self.announce(str(e), log.ERROR) - return - - r = conn.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - elif r.status == 301: - location = r.getheader('Location') - if location is None: - location = 'https://pythonhosted.org/%s/' % meta.get_name() - self.announce('Upload successful. Visit %s' % location, - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print('-'*75, r.read(), '-'*75) diff --git a/libs/setuptools-2.2/build/lib/setuptools/compat.py b/libs/setuptools-2.2/build/lib/setuptools/compat.py deleted file mode 100644 index 7b824ba..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/compat.py +++ /dev/null @@ -1,83 +0,0 @@ -import sys -import itertools - -if sys.version_info[0] < 3: - PY3 = False - - basestring = basestring - import __builtin__ as builtins - import ConfigParser - from StringIO import StringIO - BytesIO = StringIO - execfile = execfile - func_code = lambda o: o.func_code - func_globals = lambda o: o.func_globals - im_func = lambda o: o.im_func - from htmlentitydefs import name2codepoint - import httplib - from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler - from BaseHTTPServer import BaseHTTPRequestHandler - iteritems = lambda o: o.iteritems() - long_type = long - maxsize = sys.maxint - next = lambda o: o.next() - numeric_types = (int, long, float) - unichr = unichr - unicode = unicode - bytes = str - from urllib import url2pathname, splittag, pathname2url - import urllib2 - from urllib2 import urlopen, HTTPError, URLError, unquote, splituser - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit - filterfalse = itertools.ifilterfalse - - exec("""def reraise(tp, value, tb=None): - raise tp, value, tb""") -else: - PY3 = True - - basestring = str - import builtins - import configparser as ConfigParser - from io import StringIO, BytesIO - func_code = lambda o: o.__code__ - func_globals = lambda o: o.__globals__ - im_func = lambda o: o.__func__ - from html.entities import name2codepoint - import http.client as httplib - from http.server import HTTPServer, SimpleHTTPRequestHandler - from http.server import BaseHTTPRequestHandler - iteritems = lambda o: o.items() - long_type = int - maxsize = sys.maxsize - next = next - numeric_types = (int, float) - unichr = chr - unicode = str - bytes = bytes - from urllib.error import HTTPError, URLError - import urllib.request as urllib2 - from urllib.request import urlopen, url2pathname, pathname2url - from urllib.parse import ( - urlparse, urlunparse, unquote, splituser, urljoin, urlsplit, - urlunsplit, splittag, - ) - filterfalse = itertools.filterfalse - - def execfile(fn, globs=None, locs=None): - if globs is None: - globs = globals() - if locs is None: - locs = globs - f = open(fn, 'rb') - try: - source = f.read() - finally: - f.close() - exec(compile(source, fn, 'exec'), globs, locs) - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value diff --git a/libs/setuptools-2.2/build/lib/setuptools/depends.py b/libs/setuptools-2.2/build/lib/setuptools/depends.py deleted file mode 100644 index 8b9d121..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/depends.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import generators -import sys, imp, marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion, LooseVersion - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__(self,name,requested_version,module,homepage='', - attribute=None,format=None - ): - - if format is None and requested_version is not None: - format = StrictVersion - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) - return self.name - - - def version_ok(self,version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version) != "unknown" and version >= self.requested_version - - - def get_version(self, paths=None, default="unknown"): - - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f,p,i = find_module(self.module,paths) - if f: f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module,self.attribute,default,paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - - def is_present(self,paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - - def is_current(self,paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(version) - - -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - extended_arg = arg * long_type(65536) - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - - - - - - - - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) - - if kind==PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) - - return info - - - - - - - - - - - - - - - - - - - - - - - - -def get_module_constant(module, symbol, default=-1, paths=None): - - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix,mode,kind) = find_module(module,paths) - except ImportError: - # Module doesn't exist - return None - - try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind==PY_FROZEN: - code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module,f,path,(suffix,mode,kind)) - return getattr(sys.modules[module],symbol,None) - - finally: - if f: - f.close() - - return extract_constant(code,symbol,default) - - - - - - - - -def extract_constant(code,symbol,default=-1): - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for op, arg in _iter_code(code): - - if op==LOAD_CONST: - const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): - return const - else: - const = default - -if sys.platform.startswith('java') or sys.platform == 'cli': - # XXX it'd be better to test assertions about bytecode instead... - del extract_constant, get_module_constant - __all__.remove('extract_constant') - __all__.remove('get_module_constant') - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/dist.py b/libs/setuptools-2.2/build/lib/setuptools/dist.py deleted file mode 100644 index 0801ae7..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/dist.py +++ /dev/null @@ -1,797 +0,0 @@ -__all__ = ['Distribution'] - -import re -import os -import sys -import warnings -import distutils.log -import distutils.core -import distutils.cmd -from distutils.core import Distribution as _Distribution -from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, - DistutilsSetupError) - -from setuptools.depends import Require -from setuptools.compat import numeric_types, basestring -import pkg_resources - -def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - -_Distribution = _get_unpatched(_Distribution) - -sequence = tuple, list - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x='+value) - assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list or None""" - try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) - ) -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - for nsp in value: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - if '.' in nsp: - parent = '.'.join(nsp.split('.')[:-1]) - if parent not in value: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", nsp, parent - ) - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - for k,v in value.items(): - if ':' in k: - k,m = k.split(':',1) - if pkg_resources.invalid_marker(m): - raise DistutilsSetupError("Invalid environment marker: "+m) - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - except (TypeError,ValueError): - raise DistutilsSetupError( - "%r must be a string or list of strings " - "containing valid project/version requirement specifiers" % (attr,) - ) -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError: - e = sys.exc_info()[1] - raise DistutilsSetupError(e) - -def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): - raise DistutilsSetupError("test_suite must be a string") - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) - except TypeError: - break - else: - return - raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " - "wildcard patterns" - ) - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only" - ".-separated package names in setup.py", pkgname - ) - - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' **deprecated** -- a dictionary mapping option names to - 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__(self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - _attrs_dict = attrs or {} - if 'features' in _attrs_dict or 'require_features' in _attrs_dict: - Feature.warn_deprecated() - self.require_features = [] - self.features = {} - self.dist_files = [] - self.src_root = attrs and attrs.pop("src_root", None) - self.patch_missing_pkg_info(attrs) - # Make sure we have any eggs needed to interpret 'attrs' - if attrs is not None: - self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) - if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs.pop('setup_requires')) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) - _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, numeric_types): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self,name): - """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - from pkg_resources import working_set, parse_requirements - for dist in working_set.resolve( - parse_requirements(requires), installer=self.fetch_build_egg, - replace_conflicting=True - ): - working_set.add(dist, replace=True) - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - if getattr(self, 'convert_2to3_doctests', None): - # XXX may convert to set here when we can rely on set being builtin - self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] - else: - self.convert_2to3_doctests = [] - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - - try: - cmd = self._egg_fetcher - cmd.package_index.to_scan = [] - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in list(opts): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - cmd = easy_install( - dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report=True, user=False - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name,feature in self.features.items(): - self._set_feature(name,None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef='' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name,1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name,0) - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - for ep in pkg_resources.iter_entry_points('distutils.commands',command): - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load(False) # don't require extras, we're not running - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - def _set_feature(self,name,status): - """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) - - def feature_is_included(self,name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) - - def include_feature(self,name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name)==0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name,1) - - def include(self,**attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) - if include: - include(v) - else: - self._include_misc(k,v) - - def exclude_package(self,package): - """Remove packages, modules, and extensions in named package""" - - pfx = package+'.' - if self.packages: - self.packages = [ - p for p in self.packages - if p != package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p != package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) - ] - - def has_contents_for(self,package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package+'.' - - for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): - return True - - def _exclude_misc(self,name,value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self,name,[item for item in old if item not in value]) - - def _include_misc(self,name,value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - else: - setattr(self,name,old+[item for item in value if item not in old]) - - def exclude(self,**attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k,v) - - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - list(map(self.exclude_package, packages)) - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias,True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd,opts in self.command_options.items(): - - for opt,(src,val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_','-') - - if val==0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val==1: - val = None - - d.setdefault(cmd,{})[opt] = val - - return d - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext,tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - import sys - - if sys.version_info < (3,) or self.help_commands: - return _Distribution.handle_display_options(self, option_order) - - # Stdout may be StringIO (e.g. in tests) - import io - if not isinstance(sys.stdout, io.TextIOWrapper): - return _Distribution.handle_display_options(self, option_order) - - # Don't wrap stdout if utf-8 is already the encoding. Provides - # workaround for #334. - if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): - return _Distribution.handle_display_options(self, option_order) - - # Print metadata in UTF-8 no matter the platform - encoding = sys.stdout.encoding - errors = sys.stdout.errors - newline = sys.platform != 'win32' and '\n' or None - line_buffering = sys.stdout.line_buffering - - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) - try: - return _Distribution.handle_display_options(self, option_order) - finally: - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), encoding, errors, newline, line_buffering) - - -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - -class Feature: - """ - **deprecated** -- The `Feature` facility was never completely implemented - or supported, `has reported issues - `_ and will be removed in - a future version. - - A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - - @staticmethod - def warn_deprecated(): - warnings.warn( - "Features are deprecated and will be removed in a future " - "version. See http://bitbucket.org/pypa/setuptools/65.", - DeprecationWarning, - stacklevel=3, - ) - - def __init__(self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras): - self.warn_deprecated() - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features,(str,Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r,str) - ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er - - if isinstance(remove,str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self,dist): - - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description+" is required," - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - def exclude_from(self,dist): - - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - def validate(self,dist): - - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) diff --git a/libs/setuptools-2.2/build/lib/setuptools/extension.py b/libs/setuptools-2.2/build/lib/setuptools/extension.py deleted file mode 100644 index d7892d3..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/extension.py +++ /dev/null @@ -1,46 +0,0 @@ -import sys -import distutils.core -import distutils.extension - -from setuptools.dist import _get_unpatched - -_Extension = _get_unpatched(distutils.core.Extension) - -def have_pyrex(): - """ - Return True if Cython or Pyrex can be imported. - """ - pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext' - for pyrex_impl in pyrex_impls: - try: - # from (pyrex_impl) import build_ext - __import__(pyrex_impl, fromlist=['build_ext']).build_ext - return True - except Exception: - pass - return False - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - def __init__(self, *args, **kw): - _Extension.__init__(self, *args, **kw) - if not have_pyrex(): - self._convert_pyx_sources_to_c() - - def _convert_pyx_sources_to_c(self): - "convert .pyx extensions to .c" - def pyx_to_c(source): - if source.endswith('.pyx'): - source = source[:-4] + '.c' - return source - self.sources = list(map(pyx_to_c, self.sources)) - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/libs/setuptools-2.2/build/lib/setuptools/gui-32.exe b/libs/setuptools-2.2/build/lib/setuptools/gui-32.exe deleted file mode 100644 index f8d3509..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/gui-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/gui-64.exe b/libs/setuptools-2.2/build/lib/setuptools/gui-64.exe deleted file mode 100644 index 330c51a..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/gui-64.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/gui-arm-32.exe b/libs/setuptools-2.2/build/lib/setuptools/gui-arm-32.exe deleted file mode 100644 index 537aff3..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/gui-arm-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/gui.exe b/libs/setuptools-2.2/build/lib/setuptools/gui.exe deleted file mode 100644 index f8d3509..0000000 Binary files a/libs/setuptools-2.2/build/lib/setuptools/gui.exe and /dev/null differ diff --git a/libs/setuptools-2.2/build/lib/setuptools/lib2to3_ex.py b/libs/setuptools-2.2/build/lib/setuptools/lib2to3_ex.py deleted file mode 100644 index feef591..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/lib2to3_ex.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Customized Mixin2to3 support: - - - adds support for converting doctests - - -This module raises an ImportError on Python 2. -""" - -from distutils.util import Mixin2to3 as _Mixin2to3 -from distutils import log -from lib2to3.refactor import RefactoringTool, get_fixers_from_package -import setuptools - -class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - -class Mixin2to3(_Mixin2to3): - def run_2to3(self, files, doctests = False): - # See of the distribution option has been set, otherwise check the - # setuptools default. - if self.distribution.use_2to3 is not True: - return - if not files: - return - log.info("Fixing "+" ".join(files)) - self.__build_fixer_names() - self.__exclude_fixers() - if doctests: - if setuptools.run_2to3_on_doctests: - r = DistutilsRefactoringTool(self.fixer_names) - r.refactor(files, write=True, doctests_only=True) - else: - _Mixin2to3.run_2to3(self, files) - - def __build_fixer_names(self): - if self.fixer_names: return - self.fixer_names = [] - for p in setuptools.lib2to3_fixer_packages: - self.fixer_names.extend(get_fixers_from_package(p)) - if self.distribution.use_2to3_fixers is not None: - for p in self.distribution.use_2to3_fixers: - self.fixer_names.extend(get_fixers_from_package(p)) - - def __exclude_fixers(self): - excluded_fixers = getattr(self, 'exclude_fixers', []) - if self.distribution.use_2to3_exclude_fixers is not None: - excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) - for fixer_name in excluded_fixers: - if fixer_name in self.fixer_names: - self.fixer_names.remove(fixer_name) diff --git a/libs/setuptools-2.2/build/lib/setuptools/package_index.py b/libs/setuptools-2.2/build/lib/setuptools/package_index.py deleted file mode 100644 index 167c34e..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/package_index.py +++ /dev/null @@ -1,1058 +0,0 @@ -"""PyPI and direct package downloading""" -import sys -import os -import re -import shutil -import socket -import base64 -import hashlib -from functools import wraps - -from pkg_resources import ( - CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, - require, Environment, find_distributions, safe_name, safe_version, - to_filename, Requirement, DEVELOP_DIST, -) -from setuptools import ssl_support -from distutils import log -from distutils.errors import DistutilsError -from setuptools.compat import (urllib2, httplib, StringIO, HTTPError, - urlparse, urlunparse, unquote, splituser, - url2pathname, name2codepoint, - unichr, urljoin, urlsplit, urlunsplit, - ConfigParser) -from setuptools.compat import filterfalse -from fnmatch import translate -from setuptools.py26compat import strip_fragment -from setuptools.py27compat import get_all_headers - -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -# this is here to fix emacs' cruddy broken syntax highlighting -PYPI_MD5 = re.compile( - '([^<]+)\n\s+\\(md5\\)' -) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - -_SOCKET_TIMEOUT = 15 - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver, plat = None, None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - plat = 'win32' - elif lower.startswith('.win32-py',-16): - py_ver = name[-7:-4] - base = name[:-16] - plat = 'win32' - elif lower.endswith('.win-amd64.exe'): - base = name[:-14] - plat = 'win-amd64' - elif lower.startswith('.win-amd64-py',-20): - py_ver = name[-7:-4] - base = name[:-20] - plat = 'win-amd64' - return base,py_ver,plat - - -def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse(url) - base = unquote(path.split('/')[-1]) - if server=='sourceforge.net' and base=='download': # XXX Yuck - base = unquote(path.split('/')[-2]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist - if fragment: - match = EGG_FRAGMENT.match(fragment) - if match: - for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST - ): - yield dist - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg') and '-' in basename: - # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - if basename.endswith('.exe'): - win_base, py_ver, platform = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, platform - ) - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name( - location, basename, metadata, py_version=None, precedence=SOURCE_DIST, - platform=None - ): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - if not py_version: - for i,p in enumerate(parts[2:]): - if len(p)==5 and p.startswith('py2.'): - return # It's a bdist_dumb, not an sdist -- bail out - - for p in range(1,len(parts)+1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform - ) - -# From Python 2.7 docs -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - -def unique_values(func): - """ - Wrap a function returning an iterable such that the resulting iterable - only ever yields unique items. - """ - @wraps(func) - def wrapper(*args, **kwargs): - return unique_everseen(func(*args, **kwargs)) - return wrapper - -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) -# this line is here to fix emacs' cruddy broken syntax highlighting - -@unique_values -def find_external_links(url, page): - """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" - - for match in REL.finditer(page): - tag, rel = match.groups() - rels = set(map(str.strip, rel.lower().split(','))) - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - yield urljoin(url, htmldecode(match.group(1))) - - for tag in ("Home Page", "Download URL"): - pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) - if match: - yield urljoin(url, htmldecode(match.group(1))) - -user_agent = "Python-urllib/%s setuptools/%s" % ( - sys.version[:3], require('setuptools')[0].version -) - -class ContentChecker(object): - """ - A null content checker that defines the interface for checking content - """ - def feed(self, block): - """ - Feed a block of data to the hash. - """ - return - - def is_valid(self): - """ - Check the hash. Return False if validation fails. - """ - return True - - def report(self, reporter, template): - """ - Call reporter with information about the checker (hash name) - substituted into the template. - """ - return - -class HashChecker(ContentChecker): - pattern = re.compile( - r'(?Psha1|sha224|sha384|sha256|sha512|md5)=' - r'(?P[a-f0-9]+)' - ) - - def __init__(self, hash_name, expected): - self.hash_name = hash_name - self.hash = hashlib.new(hash_name) - self.expected = expected - - @classmethod - def from_url(cls, url): - "Construct a (possibly null) ContentChecker from a URL" - fragment = urlparse(url)[-1] - if not fragment: - return ContentChecker() - match = cls.pattern.search(fragment) - if not match: - return ContentChecker() - return cls(**match.groupdict()) - - def feed(self, block): - self.hash.update(block) - - def is_valid(self): - return self.hash.hexdigest() == self.expected - - def report(self, reporter, template): - msg = template % self.hash_name - return reporter(msg) - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__( - self, index_url="https://pypi.python.org/simple", hosts=('*',), - ca_bundle=None, verify_ssl=True, *args, **kw - ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match - self.to_scan = [] - if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): - self.opener = ssl_support.opener_for(ca_bundle) - else: self.opener = urllib2.urlopen - - def process_url(self, url, retrieve=False): - """Evaluate a URL as a possible download, and maybe retrieve it""" - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - list(map(self.add, dists)) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - self.fetched_urls[url] = True # prevent multiple fetch attempts - f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) - if f is None: return - self.fetched_urls[f.url] = True - if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. - if isinstance(f, HTTPError): - # Errors have no charset, assume latin1: - charset = 'latin-1' - else: - charset = f.headers.get_param('charset') or 'latin-1' - page = page.decode(charset, "ignore") - f.close() - for match in HREF.finditer(page): - link = urljoin(base, htmldecode(match.group(1))) - self.process_url(link) - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: - page = self.process_index(url, page) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", fn) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - list(map(self.add, dists)) - - def url_ok(self, url, fatal=False): - s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]): - return True - msg = ("\nNote: Bypassing %s (disallowed host; see " - "http://bit.ly/1dg9ijs for details).\n") - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - def scan_egg_links(self, search_path): - for item in search_path: - if os.path.isdir(item): - for entry in os.listdir(item): - if entry.endswith('.egg-link'): - self.scan_egg_link(item, entry) - - def scan_egg_link(self, path, entry): - lines = [_f for _f in map(str.strip, - open(os.path.join(path, entry))) if _f] - if len(lines)==2: - for dist in find_distributions(os.path.join(path, lines[0])): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) - - def process_index(self,url,page): - """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = list(map( - unquote, link[len(self.index_url):].split('/') - )) - if len(parts)==2 and '#' not in parts[1]: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - - # process an index page into the package-page index - for match in HREF.finditer(page): - try: - scan(urljoin(url, htmldecode(match.group(1)))) - except ValueError: - pass - - pkg, ver = scan(url) # ensure this page is in the page index - if pkg: - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url+='#egg=%s-%s' % (pkg,ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '%s' % m.group(1,3,2), page - ) - else: - return "" # no sense double-scanning non-package pages - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) - self.info( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.not_found_in_index(requirement) - - for url in list(self.package_pages.get(requirement.key,())): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan() - self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) - - def check_hash(self, checker, filename, tfp): - """ - checker is a ContentChecker - """ - checker.report(self.debug, - "Validating %%s checksum for %s" % filename) - if not checker.is_valid(): - tfp.close() - os.unlink(filename) - raise DistutilsError( - "%s validation failed for %s; " - "possible download problem?" % ( - checker.hash.name, os.path.basename(filename)) - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - list(map(self.scan_url, self.to_scan)) - self.to_scan = None # from now on, go ahead and process immediately - - def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro - meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled - meth, msg = (self.warn, - "Couldn't find index page for %r (maybe misspelled?)") - meth(msg, requirement.unsafe_name) - self.scan_all() - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec,Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) - - def fetch_distribution( - self, requirement, tmpdir, force_scan=False, source=False, - develop_ok=False, local_index=None - ): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - dist = None - - def find(req, env=None): - if env is None: - env = self - # Find a matching distribution; may be called more than once - - for dist in env[req.key]: - - if dist.precedence==DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) - skipped[dist] = 1 - continue - - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - return dist - - if force_scan: - self.prescan() - self.find_packages(requirement) - dist = find(requirement) - - if local_index is not None: - dist = dist or find(requirement, local_index) - - if dist is None: - if self.to_scan is not None: - self.prescan() - dist = find(requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(requirement) - - if dist is None: - self.warn( - "No local packages or download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - else: - self.info("Best match: %s", dist) - return dist.clone(location=self.download(dist.location, tmpdir)) - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) - if dist is not None: - return dist.location - return None - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment) - dists = match and [ - d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists)==1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename=dst - - file = open(os.path.join(tmpdir, 'setup.py'), 'w') - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - file.close() - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - def _download_to(self, url, filename): - self.info("Downloading %s", url) - # Download the file - fp, tfp, info = None, None, None - try: - checker = HashChecker.from_url(url) - fp = self.open_url(strip_fragment(url)) - if isinstance(fp, HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) - ) - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - # Some servers return multiple Content-Length headers :( - sizes = get_all_headers(headers, 'Content-Length') - size = max(map(int, sizes)) - self.reporthook(url, filename, blocknum, bs, size) - tfp = open(filename,'wb') - while True: - block = fp.read(bs) - if block: - checker.feed(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - self.check_hash(checker, filename, tfp) - return headers - finally: - if fp: fp.close() - if tfp: tfp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - def open_url(self, url, warning=None): - if url.startswith('file:'): - return local_open(url) - try: - return open_with_auth(url, self.opener) - except (ValueError, httplib.InvalidURL): - v = sys.exc_info()[1] - msg = ' '.join([str(arg) for arg in v.args]) - if warning: - self.warn(warning, msg) - else: - raise DistutilsError('%s %s' % (url, msg)) - except urllib2.HTTPError: - v = sys.exc_info()[1] - return v - except urllib2.URLError: - v = sys.exc_info()[1] - if warning: - self.warn(warning, v.reason) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v.reason)) - except httplib.BadStatusLine: - v = sys.exc_info()[1] - if warning: - self.warn(warning, v.line) - else: - raise DistutilsError( - '%s returned a bad status line. The server might be ' - 'down, %s' % - (url, v.line) - ) - except httplib.HTTPException: - v = sys.exc_info()[1] - if warning: - self.warn(warning, v) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v)) - - def _download_url(self, scheme, url, tmpdir): - # Determine download filename - # - name, fragment = egg_info_for_url(url) - if name: - while '..' in name: - name = name.replace('..','.').replace('\\','_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir,name) - - # Download the file - # - if scheme=='svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - elif scheme=='git' or scheme.startswith('git+'): - return self._download_git(url, filename) - elif scheme.startswith('hg+'): - return self._download_hg(url, filename) - elif scheme=='file': - return url2pathname(urlparse(url)[2]) - else: - self.url_ok(url, True) # raises error if not allowed - return self._attempt_download(url, filename) - - def scan_url(self, url): - self.process_url(url, True) - - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _download_html(self, url, headers, filename): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'([^- ]+ - )?Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) - - def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake - creds = '' - if url.lower().startswith('svn:') and '@' in url: - scheme, netloc, path, p, q, f = urlparse(url) - if not netloc and path.startswith('//') and '/' in path[2:]: - netloc, path = path[2:].split('/',1) - auth, host = splituser(netloc) - if auth: - if ':' in auth: - user, pw = auth.split(':',1) - creds = " --username=%s --password=%s" % (user, pw) - else: - creds = " --username="+auth - netloc = host - url = urlunparse((scheme, netloc, url, p, q, f)) - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout%s -q %s %s" % (creds, url, filename)) - return filename - - @staticmethod - def _vcs_split_rev_from_url(url, pop_prefix=False): - scheme, netloc, path, query, frag = urlsplit(url) - - scheme = scheme.split('+', 1)[-1] - - # Some fragment identification fails - path = path.split('#',1)[0] - - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - - # Also, discard fragment - url = urlunsplit((scheme, netloc, path, query, '')) - - return url, rev - - def _download_git(self, url, filename): - filename = filename.split('#',1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing git clone from %s to %s", url, filename) - os.system("git clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Checking out %s", rev) - os.system("(cd %s && git checkout --quiet %s)" % ( - filename, - rev, - )) - - return filename - - def _download_hg(self, url, filename): - filename = filename.split('#',1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing hg clone from %s to %s", url, filename) - os.system("hg clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Updating to %s", rev) - os.system("(cd %s && hg up -C -r %s >&-)" % ( - filename, - rev, - )) - - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub - -def uchr(c): - if not isinstance(c, int): - return c - if c>255: return unichr(c) - return chr(c) - -def decode_entity(match): - what = match.group(1) - if what.startswith('#x'): - what = int(what[2:], 16) - elif what.startswith('#'): - what = int(what[1:]) - else: - what = name2codepoint.get(what, match.group(0)) - return uchr(what) - -def htmldecode(text): - """Decode HTML entities in the given text.""" - return entity_sub(decode_entity, text) - -def socket_timeout(timeout=15): - def _socket_timeout(func): - def _socket_timeout(*args, **kwargs): - old_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(timeout) - try: - return func(*args, **kwargs) - finally: - socket.setdefaulttimeout(old_timeout) - return _socket_timeout - return _socket_timeout - -def _encode_auth(auth): - """ - A function compatible with Python 2.3-3.3 that will encode - auth from a URL suitable for an HTTP header. - >>> str(_encode_auth('username%3Apassword')) - 'dXNlcm5hbWU6cGFzc3dvcmQ=' - - Long auth strings should not cause a newline to be inserted. - >>> long_auth = 'username:' + 'password'*10 - >>> chr(10) in str(_encode_auth(long_auth)) - False - """ - auth_s = unquote(auth) - # convert to bytes - auth_bytes = auth_s.encode() - # use the legacy interface for Python 2.3 support - encoded_bytes = base64.encodestring(auth_bytes) - # convert back to a string - encoded = encoded_bytes.decode() - # strip the trailing carriage return - return encoded.replace('\n','') - -class Credential(object): - """ - A username/password pair. Use like a namedtuple. - """ - def __init__(self, username, password): - self.username = username - self.password = password - - def __iter__(self): - yield self.username - yield self.password - - def __str__(self): - return '%(username)s:%(password)s' % vars(self) - -class PyPIConfig(ConfigParser.ConfigParser): - - def __init__(self): - """ - Load from ~/.pypirc - """ - defaults = dict.fromkeys(['username', 'password', 'repository'], '') - ConfigParser.ConfigParser.__init__(self, defaults) - - rc = os.path.join(os.path.expanduser('~'), '.pypirc') - if os.path.exists(rc): - self.read(rc) - - @property - def creds_by_repository(self): - sections_with_repositories = [ - section for section in self.sections() - if self.get(section, 'repository').strip() - ] - - return dict(map(self._get_repo_cred, sections_with_repositories)) - - def _get_repo_cred(self, section): - repo = self.get(section, 'repository').strip() - return repo, Credential( - self.get(section, 'username').strip(), - self.get(section, 'password').strip(), - ) - - def find_credential(self, url): - """ - If the URL indicated appears to be a repository defined in this - config, return the credential for that repository. - """ - for repository, cred in self.creds_by_repository.items(): - if url.startswith(repository): - return cred - - -def open_with_auth(url, opener=urllib2.urlopen): - """Open a urllib2 request, handling HTTP authentication""" - - scheme, netloc, path, params, query, frag = urlparse(url) - - # Double scheme does not raise on Mac OS X as revealed by a - # failing test. We would expect "nonnumeric port". Refs #20. - if netloc.endswith(':'): - raise httplib.InvalidURL("nonnumeric port: ''") - - if scheme in ('http', 'https'): - auth, host = splituser(netloc) - else: - auth = None - - if not auth: - cred = PyPIConfig().find_credential(url) - if cred: - auth = str(cred) - info = cred.username, url - log.info('Authenticating as %s for %s (from .pypirc)' % info) - - if auth: - auth = "Basic " + _encode_auth(auth) - new_url = urlunparse((scheme,host,path,params,query,frag)) - request = urllib2.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib2.Request(url) - - request.add_header('User-Agent', user_agent) - fp = opener(request) - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urlparse(fp.url) - if s2==scheme and h2==host: - fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2)) - - return fp - -# adding a timeout to avoid freezing package_index -open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) - - -def fix_sf_url(url): - return url # backward compatibility - -def local_open(url): - """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urlparse(url) - filename = url2pathname(path) - if os.path.isfile(filename): - return urllib2.urlopen(url) - elif path.endswith('/') and os.path.isdir(filename): - files = [] - for f in os.listdir(filename): - if f=='index.html': - fp = open(os.path.join(filename,f),'r') - body = fp.read() - fp.close() - break - elif os.path.isdir(os.path.join(filename,f)): - f+='/' - files.append("<a href=%r>%s</a>" % (f,f)) - else: - body = ("<html><head><title>%s" % url) + \ - "%s" % '\n'.join(files) - status, message = 200, "OK" - else: - status, message, body = 404, "Path not found", "Not found" - - headers = {'content-type': 'text/html'} - return HTTPError(url, status, message, headers, StringIO(body)) diff --git a/libs/setuptools-2.2/build/lib/setuptools/py26compat.py b/libs/setuptools-2.2/build/lib/setuptools/py26compat.py deleted file mode 100644 index 738b0cc..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/py26compat.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Compatibility Support for Python 2.6 and earlier -""" - -import sys - -from setuptools.compat import splittag - -def strip_fragment(url): - """ - In `Python 8280 `_, Python 2.7 and - later was patched to disregard the fragment when making URL requests. - Do the same for Python 2.6 and earlier. - """ - url, fragment = splittag(url) - return url - -if sys.version_info >= (2,7): - strip_fragment = lambda x: x diff --git a/libs/setuptools-2.2/build/lib/setuptools/py27compat.py b/libs/setuptools-2.2/build/lib/setuptools/py27compat.py deleted file mode 100644 index 9d2886d..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/py27compat.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Compatibility Support for Python 2.7 and earlier -""" - -import sys - -def get_all_headers(message, key): - """ - Given an HTTPMessage, return all headers matching a given key. - """ - return message.get_all(key) - -if sys.version_info < (3,): - def get_all_headers(message, key): - return message.getheaders(key) diff --git a/libs/setuptools-2.2/build/lib/setuptools/py31compat.py b/libs/setuptools-2.2/build/lib/setuptools/py31compat.py deleted file mode 100644 index dbb324b..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/py31compat.py +++ /dev/null @@ -1,11 +0,0 @@ -__all__ = ['get_config_vars', 'get_path'] - -try: - # Python 2.7 or >=3.2 - from sysconfig import get_config_vars, get_path -except ImportError: - from distutils.sysconfig import get_config_vars, get_python_lib - def get_path(name): - if name not in ('platlib', 'purelib'): - raise ValueError("Name must be purelib or platlib") - return get_python_lib(name=='platlib') diff --git a/libs/setuptools-2.2/build/lib/setuptools/sandbox.py b/libs/setuptools-2.2/build/lib/setuptools/sandbox.py deleted file mode 100644 index 042c595..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/sandbox.py +++ /dev/null @@ -1,322 +0,0 @@ -import os -import sys -import tempfile -import operator -import functools -import itertools -import re - -import pkg_resources - -if os.name == "java": - import org.python.modules.posix.PosixModule as _os -else: - _os = sys.modules[os.name] -try: - _file = file -except NameError: - _file = None -_open = open -from distutils.errors import DistutilsError -from pkg_resources import working_set - -from setuptools.compat import builtins, execfile - -__all__ = [ - "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", -] - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - old_dir = os.getcwd() - save_argv = sys.argv[:] - save_path = sys.path[:] - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - temp_dir = os.path.join(setup_dir,'temp') - if not os.path.isdir(temp_dir): os.makedirs(temp_dir) - save_tmp = tempfile.tempdir - save_modules = sys.modules.copy() - pr_state = pkg_resources.__getstate__() - try: - tempfile.tempdir = temp_dir - os.chdir(setup_dir) - try: - sys.argv[:] = [setup_script]+list(args) - sys.path.insert(0, setup_dir) - # reset to include setup dir, w/clean callback list - working_set.__init__() - working_set.callbacks.append(lambda dist:dist.activate()) - DirectorySandbox(setup_dir).run( - lambda: execfile( - "setup.py", - {'__file__':setup_script, '__name__':'__main__'} - ) - ) - except SystemExit: - v = sys.exc_info()[1] - if v.args and v.args[0]: - raise - # Normal exit, just return - finally: - pkg_resources.__setstate__(pr_state) - sys.modules.update(save_modules) - # remove any modules imported within the sandbox - del_modules = [ - mod_name for mod_name in sys.modules - if mod_name not in save_modules - # exclude any encodings modules. See #285 - and not mod_name.startswith('encodings.') - ] - list(map(sys.modules.__delitem__, del_modules)) - os.chdir(old_dir) - sys.path[:] = save_path - sys.argv[:] = save_argv - tempfile.tempdir = save_tmp - - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source,name)) - - def run(self, func): - """Run 'func' under os sandboxing""" - try: - self._copy(self) - if _file: - builtins.file = self._file - builtins.open = self._open - self._active = True - return func() - finally: - self._active = False - if _file: - builtins.file = _file - builtins.open = _open - self._copy(_os) - - def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): - if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) - return wrap - - for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) - return wrap - - if _file: - _file = _mk_single_path_wrapper('file', _file) - _open = _mk_single_path_wrapper('open', _open) - for name in [ - "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", - "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", - "startfile", "mkfifo", "mknod", "pathconf", "access" - ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) - - def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) - if self._active: - return self._remap_output(name, retval) - return retval - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) - - def _validate_path(self,path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self,operation,path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) - ) - - -if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull,] -else: - _EXCEPTIONS = [] - -try: - from win32com.client.gencache import GetGeneratePath - _EXCEPTIONS.append(GetGeneratePath()) - del GetGeneratePath -except ImportError: - # it appears pywin32 is not installed, so no need to exclude. - pass - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys([ - "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", - "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", - ]) - - _exception_patterns = [ - # Allow lib2to3 to attempt to save a pickled grammar object (#121) - '.*lib2to3.*\.pickle$', - ] - "exempt writing to paths that match the pattern" - - def __init__(self, sandbox, exceptions=_EXCEPTIONS): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') - self._exceptions = [ - os.path.normcase(os.path.realpath(path)) - for path in exceptions - ] - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - raise SandboxViolation(operation, args, kw) - - if _file: - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self, path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - return ( - self._exempted(realpath) - or realpath == self._sandbox - or realpath.startswith(self._prefix) - ) - finally: - self._active = active - - def _exempted(self, filepath): - start_matches = ( - filepath.startswith(exception) - for exception in self._exceptions - ) - pattern_matches = ( - re.match(pattern, filepath) - for pattern in self._exception_patterns - ) - candidates = itertools.chain(start_matches, pattern_matches) - return any(candidates) - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src,dst) - - def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777 - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file,flags,mode, *args, **kw) - -WRITE_FLAGS = functools.reduce( - operator.or_, [getattr(_os, a, 0) for a in - "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] -) - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - - - -# diff --git a/libs/setuptools-2.2/build/lib/setuptools/script template (dev).py b/libs/setuptools-2.2/build/lib/setuptools/script template (dev).py deleted file mode 100644 index b3fe209..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/script template (dev).py +++ /dev/null @@ -1,11 +0,0 @@ -# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r -__requires__ = """%(spec)r""" -import sys -from pkg_resources import require -require("""%(spec)r""") -del require -__file__ = """%(dev_path)r""" -if sys.version_info < (3, 0): - execfile(__file__) -else: - exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/libs/setuptools-2.2/build/lib/setuptools/script template.py b/libs/setuptools-2.2/build/lib/setuptools/script template.py deleted file mode 100644 index 8dd5d51..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/script template.py +++ /dev/null @@ -1,4 +0,0 @@ -# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r -__requires__ = """%(spec)r""" -import pkg_resources -pkg_resources.run_script("""%(spec)r""", """%(script_name)r""") diff --git a/libs/setuptools-2.2/build/lib/setuptools/site-patch.py b/libs/setuptools-2.2/build/lib/setuptools/site-patch.py deleted file mode 100644 index a7166f1..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/site-patch.py +++ /dev/null @@ -1,83 +0,0 @@ -def __boot(): - import sys, os, os.path - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys,'path_importer_cache',{}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path - - for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - import imp # Avoid import loop in Python >= 3.3 - stream, path, descr = imp.find_module('site',[item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site',stream,path,descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - #print "loaded", __file__ - - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d,nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p,np = makepath(item) - - if np==nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - -if __name__=='site': - __boot() - del __boot - - - - - - - - diff --git a/libs/setuptools-2.2/build/lib/setuptools/ssl_support.py b/libs/setuptools-2.2/build/lib/setuptools/ssl_support.py deleted file mode 100644 index 7b5f429..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/ssl_support.py +++ /dev/null @@ -1,234 +0,0 @@ -import os -import socket -import atexit -import re - -import pkg_resources -from pkg_resources import ResolutionError, ExtractionError -from setuptools.compat import urllib2 - -try: - import ssl -except ImportError: - ssl = None - -__all__ = [ - 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', - 'opener_for' -] - -cert_paths = """ -/etc/pki/tls/certs/ca-bundle.crt -/etc/ssl/certs/ca-certificates.crt -/usr/share/ssl/certs/ca-bundle.crt -/usr/local/share/certs/ca-root.crt -/etc/ssl/cert.pem -/System/Library/OpenSSL/certs/cert.pem -""".strip().split() - - -HTTPSHandler = HTTPSConnection = object - -for what, where in ( - ('HTTPSHandler', ['urllib2','urllib.request']), - ('HTTPSConnection', ['httplib', 'http.client']), -): - for module in where: - try: - exec("from %s import %s" % (module, what)) - except ImportError: - pass - -is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) - - -try: - from ssl import CertificateError, match_hostname -except ImportError: - try: - from backports.ssl_match_hostname import CertificateError - from backports.ssl_match_hostname import match_hostname - except ImportError: - CertificateError = None - match_hostname = None - -if not CertificateError: - class CertificateError(ValueError): - pass - -if not match_hostname: - def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - # Ported from python3-syntax: - # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') - leftmost = parts[0] - remainder = parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate") - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") - - -class VerifyingHTTPSHandler(HTTPSHandler): - """Simple verifying handler: no auth, subclasses, timeouts, etc.""" - - def __init__(self, ca_bundle): - self.ca_bundle = ca_bundle - HTTPSHandler.__init__(self) - - def https_open(self, req): - return self.do_open( - lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req - ) - - -class VerifyingHTTPSConn(HTTPSConnection): - """Simple verifying connection: no auth, subclasses, timeouts, etc.""" - def __init__(self, host, ca_bundle, **kw): - HTTPSConnection.__init__(self, host, **kw) - self.ca_bundle = ca_bundle - - def connect(self): - sock = socket.create_connection( - (self.host, self.port), getattr(self, 'source_address', None) - ) - - # Handle the socket if a (proxy) tunnel is present - if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): - self.sock = sock - self._tunnel() - - self.sock = ssl.wrap_socket( - sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle - ) - try: - match_hostname(self.sock.getpeercert(), self.host) - except CertificateError: - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - -def opener_for(ca_bundle=None): - """Get a urlopen() replacement that uses ca_bundle for verification""" - return urllib2.build_opener( - VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) - ).open - - -_wincerts = None - -def get_win_certfile(): - global _wincerts - if _wincerts is not None: - return _wincerts.name - - try: - from wincertstore import CertFile - except ImportError: - return None - - class MyCertFile(CertFile): - def __init__(self, stores=(), certs=()): - CertFile.__init__(self) - for store in stores: - self.addstore(store) - self.addcerts(certs) - atexit.register(self.close) - - _wincerts = MyCertFile(stores=['CA', 'ROOT']) - return _wincerts.name - - -def find_ca_bundle(): - """Return an existing CA bundle path, or None""" - if os.name=='nt': - return get_win_certfile() - else: - for cert_path in cert_paths: - if os.path.isfile(cert_path): - return cert_path - try: - return pkg_resources.resource_filename('certifi', 'cacert.pem') - except (ImportError, ResolutionError, ExtractionError): - return None diff --git a/libs/setuptools-2.2/build/lib/setuptools/svn_utils.py b/libs/setuptools-2.2/build/lib/setuptools/svn_utils.py deleted file mode 100644 index e6d09d1..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/svn_utils.py +++ /dev/null @@ -1,564 +0,0 @@ -import os -import re -import sys -from distutils import log -import xml.dom.pulldom -import shlex -import locale -import codecs -import unicodedata -import warnings -from setuptools.compat import unicode -from xml.sax.saxutils import unescape - -try: - import urlparse -except ImportError: - import urllib.parse as urlparse - -from subprocess import Popen as _Popen, PIPE as _PIPE - -#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005) -# and SVN 1.3 hasn't been supported by the developers since mid 2008. - -#subprocess is called several times with shell=(sys.platform=='win32') -#see the follow for more information: -# http://bugs.python.org/issue8557 -# http://stackoverflow.com/questions/5658622/ -# python-subprocess-popen-environment-path - - -def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0): - #regarding the shell argument, see: http://bugs.python.org/issue8557 - try: - proc = _Popen(args, stdout=stdout, stderr=stderr, - shell=(sys.platform == 'win32')) - - data = proc.communicate()[stream] - except OSError: - return 1, '' - - #doubled checked and - data = decode_as_string(data, encoding) - - #communciate calls wait() - return proc.returncode, data - - -def _get_entry_schedule(entry): - schedule = entry.getElementsByTagName('schedule')[0] - return "".join([t.nodeValue - for t in schedule.childNodes - if t.nodeType == t.TEXT_NODE]) - - -def _get_target_property(target): - property_text = target.getElementsByTagName('property')[0] - return "".join([t.nodeValue - for t in property_text.childNodes - if t.nodeType == t.TEXT_NODE]) - - -def _get_xml_data(decoded_str): - if sys.version_info < (3, 0): - #old versions want an encoded string - data = decoded_str.encode('utf-8') - else: - data = decoded_str - return data - - -def joinpath(prefix, *suffix): - if not prefix or prefix == '.': - return os.path.join(*suffix) - return os.path.join(prefix, *suffix) - -def determine_console_encoding(): - try: - #try for the preferred encoding - encoding = locale.getpreferredencoding() - - #see if the locale.getdefaultlocale returns null - #some versions of python\platforms return US-ASCII - #when it cannot determine an encoding - if not encoding or encoding == "US-ASCII": - encoding = locale.getdefaultlocale()[1] - - if encoding: - codecs.lookup(encoding) # make sure a lookup error is not made - - except (locale.Error, LookupError): - encoding = None - - is_osx = sys.platform == "darwin" - if not encoding: - return ["US-ASCII", "utf-8"][is_osx] - elif encoding.startswith("mac-") and is_osx: - #certain versions of python would return mac-roman as default - #OSX as a left over of earlier mac versions. - return "utf-8" - else: - return encoding - -_console_encoding = determine_console_encoding() - -def decode_as_string(text, encoding=None): - """ - Decode the console or file output explicitly using getpreferredencoding. - The text paraemeter should be a encoded string, if not no decode occurs - If no encoding is given, getpreferredencoding is used. If encoding is - specified, that is used instead. This would be needed for SVN --xml - output. Unicode is explicitly put in composed NFC form. - - --xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion - DEV List from 2007 seems to indicate the same. - """ - #text should be a byte string - - if encoding is None: - encoding = _console_encoding - - if not isinstance(text, unicode): - text = text.decode(encoding) - - text = unicodedata.normalize('NFC', text) - - return text - - -def parse_dir_entries(decoded_str): - '''Parse the entries from a recursive info xml''' - doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) - entries = list() - - for event, node in doc: - if event == 'START_ELEMENT' and node.nodeName == 'entry': - doc.expandNode(node) - if not _get_entry_schedule(node).startswith('delete'): - entries.append((node.getAttribute('path'), - node.getAttribute('kind'))) - - return entries[1:] # do not want the root directory - - -def parse_externals_xml(decoded_str, prefix=''): - '''Parse a propget svn:externals xml''' - prefix = os.path.normpath(prefix) - prefix = os.path.normcase(prefix) - - doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) - externals = list() - - for event, node in doc: - if event == 'START_ELEMENT' and node.nodeName == 'target': - doc.expandNode(node) - path = os.path.normpath(node.getAttribute('path')) - - if os.path.normcase(path).startswith(prefix): - path = path[len(prefix)+1:] - - data = _get_target_property(node) - #data should be decoded already - for external in parse_external_prop(data): - externals.append(joinpath(path, external)) - - return externals # do not want the root directory - - -def parse_external_prop(lines): - """ - Parse the value of a retrieved svn:externals entry. - - possible token setups (with quotng and backscaping in laters versions) - URL[@#] EXT_FOLDERNAME - [-r#] URL EXT_FOLDERNAME - EXT_FOLDERNAME [-r#] URL - """ - externals = [] - for line in lines.splitlines(): - line = line.lstrip() # there might be a "\ " - if not line: - continue - - if sys.version_info < (3, 0): - #shlex handles NULLs just fine and shlex in 2.7 tries to encode - #as ascii automatiically - line = line.encode('utf-8') - line = shlex.split(line) - if sys.version_info < (3, 0): - line = [x.decode('utf-8') for x in line] - - #EXT_FOLDERNAME is either the first or last depending on where - #the URL falls - if urlparse.urlsplit(line[-1])[0]: - external = line[0] - else: - external = line[-1] - - external = decode_as_string(external, encoding="utf-8") - externals.append(os.path.normpath(external)) - - return externals - - -def parse_prop_file(filename, key): - found = False - f = open(filename, 'rt') - data = '' - try: - for line in iter(f.readline, ''): # can't use direct iter! - parts = line.split() - if len(parts) == 2: - kind, length = parts - data = f.read(int(length)) - if kind == 'K' and data == key: - found = True - elif kind == 'V' and found: - break - finally: - f.close() - - return data - - -class SvnInfo(object): - ''' - Generic svn_info object. No has little knowledge of how to extract - information. Use cls.load to instatiate according svn version. - - Paths are not filesystem encoded. - ''' - - @staticmethod - def get_svn_version(): - code, data = _run_command(['svn', '--version', '--quiet']) - if code == 0 and data: - return data.strip() - else: - return '' - - #svnversion return values (previous implementations return max revision) - # 4123:4168 mixed revision working copy - # 4168M modified working copy - # 4123S switched working copy - # 4123:4168MS mixed revision, modified, switched working copy - revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I) - - @classmethod - def load(cls, dirname=''): - normdir = os.path.normpath(dirname) - code, data = _run_command(['svn', 'info', normdir]) - # Must check for some contents, as some use empty directories - # in testcases - svn_dir = os.path.join(normdir, '.svn') - has_svn = (os.path.isfile(os.path.join(svn_dir, 'entries')) or - os.path.isfile(os.path.join(svn_dir, 'dir-props')) or - os.path.isfile(os.path.join(svn_dir, 'dir-prop-base'))) - - svn_version = tuple(cls.get_svn_version().split('.')) - - try: - base_svn_version = tuple(int(x) for x in svn_version[:2]) - except ValueError: - base_svn_version = tuple() - - if not has_svn: - return SvnInfo(dirname) - - if code or not base_svn_version or base_svn_version < (1, 3): - warnings.warn(("No SVN 1.3+ command found: falling back " - "on pre 1.7 .svn parsing"), DeprecationWarning) - return SvnFileInfo(dirname) - - if base_svn_version < (1, 5): - return Svn13Info(dirname) - - return Svn15Info(dirname) - - def __init__(self, path=''): - self.path = path - self._entries = None - self._externals = None - - def get_revision(self): - 'Retrieve the directory revision informatino using svnversion' - code, data = _run_command(['svnversion', '-c', self.path]) - if code: - log.warn("svnversion failed") - return 0 - - parsed = self.revision_re.match(data) - if parsed: - return int(parsed.group(2)) - else: - return 0 - - @property - def entries(self): - if self._entries is None: - self._entries = self.get_entries() - return self._entries - - @property - def externals(self): - if self._externals is None: - self._externals = self.get_externals() - return self._externals - - def iter_externals(self): - ''' - Iterate over the svn:external references in the repository path. - ''' - for item in self.externals: - yield item - - def iter_files(self): - ''' - Iterate over the non-deleted file entries in the repository path - ''' - for item, kind in self.entries: - if kind.lower() == 'file': - yield item - - def iter_dirs(self, include_root=True): - ''' - Iterate over the non-deleted file entries in the repository path - ''' - if include_root: - yield self.path - for item, kind in self.entries: - if kind.lower() == 'dir': - yield item - - def get_entries(self): - return [] - - def get_externals(self): - return [] - - -class Svn13Info(SvnInfo): - def get_entries(self): - code, data = _run_command(['svn', 'info', '-R', '--xml', self.path], - encoding="utf-8") - - if code: - log.debug("svn info failed") - return [] - - return parse_dir_entries(data) - - def get_externals(self): - #Previous to 1.5 --xml was not supported for svn propget and the -R - #output format breaks the shlex compatible semantics. - cmd = ['svn', 'propget', 'svn:externals'] - result = [] - for folder in self.iter_dirs(): - code, lines = _run_command(cmd + [folder], encoding="utf-8") - if code != 0: - log.warn("svn propget failed") - return [] - #lines should a str - for external in parse_external_prop(lines): - if folder: - external = os.path.join(folder, external) - result.append(os.path.normpath(external)) - - return result - - -class Svn15Info(Svn13Info): - def get_externals(self): - cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml'] - code, lines = _run_command(cmd, encoding="utf-8") - if code: - log.debug("svn propget failed") - return [] - return parse_externals_xml(lines, prefix=os.path.abspath(self.path)) - - -class SvnFileInfo(SvnInfo): - - def __init__(self, path=''): - super(SvnFileInfo, self).__init__(path) - self._directories = None - self._revision = None - - def _walk_svn(self, base): - entry_file = joinpath(base, '.svn', 'entries') - if os.path.isfile(entry_file): - entries = SVNEntriesFile.load(base) - yield (base, False, entries.parse_revision()) - for path in entries.get_undeleted_records(): - path = decode_as_string(path) - path = joinpath(base, path) - if os.path.isfile(path): - yield (path, True, None) - elif os.path.isdir(path): - for item in self._walk_svn(path): - yield item - - def _build_entries(self): - entries = list() - - rev = 0 - for path, isfile, dir_rev in self._walk_svn(self.path): - if isfile: - entries.append((path, 'file')) - else: - entries.append((path, 'dir')) - rev = max(rev, dir_rev) - - self._entries = entries - self._revision = rev - - def get_entries(self): - if self._entries is None: - self._build_entries() - return self._entries - - def get_revision(self): - if self._revision is None: - self._build_entries() - return self._revision - - def get_externals(self): - prop_files = [['.svn', 'dir-prop-base'], - ['.svn', 'dir-props']] - externals = [] - - for dirname in self.iter_dirs(): - prop_file = None - for rel_parts in prop_files: - filename = joinpath(dirname, *rel_parts) - if os.path.isfile(filename): - prop_file = filename - - if prop_file is not None: - ext_prop = parse_prop_file(prop_file, 'svn:externals') - #ext_prop should be utf-8 coming from svn:externals - ext_prop = decode_as_string(ext_prop, encoding="utf-8") - externals.extend(parse_external_prop(ext_prop)) - - return externals - - -def svn_finder(dirname=''): - #combined externals due to common interface - #combined externals and entries due to lack of dir_props in 1.7 - info = SvnInfo.load(dirname) - for path in info.iter_files(): - yield path - - for path in info.iter_externals(): - sub_info = SvnInfo.load(path) - for sub_path in sub_info.iter_files(): - yield sub_path - - -class SVNEntriesFile(object): - def __init__(self, data): - self.data = data - - @classmethod - def load(class_, base): - filename = os.path.join(base, '.svn', 'entries') - f = open(filename) - try: - result = SVNEntriesFile.read(f) - finally: - f.close() - return result - - @classmethod - def read(class_, fileobj): - data = fileobj.read() - is_xml = data.startswith(' revision_line_number - and section[revision_line_number]) - ] - return rev_numbers - - def get_undeleted_records(self): - undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete') - result = [ - section[0] - for section in self.get_sections() - if undeleted(section) - ] - return result - - -class SVNEntriesFileXML(SVNEntriesFile): - def is_valid(self): - return True - - def get_url(self): - "Get repository URL" - urlre = re.compile('url="([^"]+)"') - return urlre.search(self.data).group(1) - - def parse_revision_numbers(self): - revre = re.compile(r'committed-rev="(\d+)"') - return [ - int(m.group(1)) - for m in revre.finditer(self.data) - ] - - def get_undeleted_records(self): - entries_pattern = \ - re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) - results = [ - unescape(match.group(1)) - for match in entries_pattern.finditer(self.data) - ] - return results - - -if __name__ == '__main__': - for name in svn_finder(sys.argv[1]): - print(name) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/__init__.py b/libs/setuptools-2.2/build/lib/setuptools/tests/__init__.py deleted file mode 100644 index b5328ce..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/__init__.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Tests for the 'setuptools' package""" -import sys -import os -import unittest -from setuptools.tests import doctest -import distutils.core -import distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -from distutils.core import Extension -from distutils.version import LooseVersion -from setuptools.compat import func_code - -from setuptools.compat import func_code -import setuptools.dist -import setuptools.depends as dep -from setuptools import Feature -from setuptools.depends import Require - -def additional_tests(): - import doctest, unittest - suite = unittest.TestSuite(( - doctest.DocFileSuite( - os.path.join('tests', 'api_tests.txt'), - optionflags=doctest.ELLIPSIS, package='pkg_resources', - ), - )) - if sys.platform == 'win32': - suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) - return suite - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core._setup_stop_after = None - - -class DependsTests(unittest.TestCase): - - def testExtractConst(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platforms - return - - def f1(): - global x, y, z - x = "test" - y = z - - fc = func_code(f1) - # unrecognized name - self.assertEqual(dep.extract_constant(fc,'q', -1), None) - - # constant assigned - self.assertEqual(dep.extract_constant(fc,'x', -1), "test") - - # expression assigned - self.assertEqual(dep.extract_constant(fc,'y', -1), -1) - - # recognized name, not assigned - self.assertEqual(dep.extract_constant(fc,'z', -1), None) - - def testFindModule(self): - self.assertRaises(ImportError, dep.find_module, 'no-such.-thing') - self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent') - f,p,i = dep.find_module('setuptools.tests') - f.close() - - def testModuleExtract(self): - if not hasattr(dep, 'get_module_constant'): - # skip on non-bytecode platforms - return - - from email import __version__ - self.assertEqual( - dep.get_module_constant('email','__version__'), __version__ - ) - self.assertEqual( - dep.get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - dep.get_module_constant('setuptools.tests','__doc__'),__doc__ - ) - - def testRequire(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platformsh - return - - req = Require('Email','1.0.3','email') - - self.assertEqual(req.name, 'Email') - self.assertEqual(req.module, 'email') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Email-1.0.3') - - from email import __version__ - self.assertEqual(req.get_version(), __version__) - self.assertTrue(req.version_ok('1.0.9')) - self.assertTrue(not req.version_ok('0.9.1')) - self.assertTrue(not req.version_ok('unknown')) - - self.assertTrue(req.is_present()) - self.assertTrue(req.is_current()) - - req = Require('Email 3000','03000','email',format=LooseVersion) - self.assertTrue(req.is_present()) - self.assertTrue(not req.is_current()) - self.assertTrue(not req.version_ok('unknown')) - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.assertTrue(not req.is_present()) - self.assertTrue(not req.is_current()) - - req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') - - paths = [os.path.dirname(p) for p in __path__] - self.assertTrue(req.is_present(paths)) - self.assertTrue(req.is_current(paths)) - - -class DistroTests(unittest.TestCase): - - def setUp(self): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - def testDistroType(self): - self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution)) - - def testExcludePackage(self): - self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) - - self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) - - self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - # test removals from unspecified options - makeSetup().exclude_package('x') - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) - - # add it back in - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - self.assertTrue(self.dist.has_contents_for('a')) - self.dist.exclude_package('a') - self.assertTrue(not self.dist.has_contents_for('a')) - - self.assertTrue(self.dist.has_contents_for('b')) - self.dist.exclude_package('b') - self.assertTrue(not self.dist.has_contents_for('b')) - - self.assertTrue(self.dist.has_contents_for('c')) - self.dist.exclude_package('c') - self.assertTrue(not self.dist.has_contents_for('c')) - - def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) - - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) - - -class FeatureTests(unittest.TestCase): - - def setUp(self): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - self.assertTrue(not - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.assertTrue( - Feature("test",standard=True,remove='x').include_by_default() - ) - # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") - - def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) - - def testFeatureOptions(self): - dist = self.dist - self.assertTrue( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - self.assertTrue( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - self.assertTrue( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - self.assertTrue( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.assertTrue(not 'without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.assertTrue(not 'bar_et' in dist.py_modules) - self.assertTrue(not 'pkg.bar' in dist.packages) - self.assertTrue('pkg.baz' in dist.packages) - self.assertTrue('scripts/baz_it' in dist.scripts) - self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') - - def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) - -class TestCommandTests(unittest.TestCase): - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - self.assertTrue(isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/doctest.py b/libs/setuptools-2.2/build/lib/setuptools/tests/doctest.py deleted file mode 100644 index 47293c3..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/doctest.py +++ /dev/null @@ -1,2683 +0,0 @@ -# Module doctest. -# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org). -# Major enhancements and refactoring by: -# Jim Fulton -# Edward Loper - -# Provided as-is; use at your own risk; no warranty; no promises; enjoy! - -try: - basestring -except NameError: - basestring = str - -try: - enumerate -except NameError: - def enumerate(seq): - return zip(range(len(seq)),seq) - -r"""Module doctest -- a framework for running examples in docstrings. - -In simplest use, end each module M to be tested with: - -def _test(): - import doctest - doctest.testmod() - -if __name__ == "__main__": - _test() - -Then running the module as a script will cause the examples in the -docstrings to get executed and verified: - -python M.py - -This won't display anything unless an example fails, in which case the -failing example(s) and the cause(s) of the failure(s) are printed to stdout -(why not stderr? because stderr is a lame hack <0.2 wink>), and the final -line of output is "Test failed.". - -Run it with the -v switch instead: - -python M.py -v - -and a detailed report of all examples tried is printed to stdout, along -with assorted summaries at the end. - -You can force verbose mode by passing "verbose=True" to testmod, or prohibit -it by passing "verbose=False". In either of those cases, sys.argv is not -examined by testmod. - -There are a variety of other ways to run doctests, including integration -with the unittest framework, and support for running non-Python text -files containing doctests. There are also many ways to override parts -of doctest's default behaviors. See the Library Reference Manual for -details. -""" - -__docformat__ = 'reStructuredText en' - -__all__ = [ - # 0, Option Flags - 'register_optionflag', - 'DONT_ACCEPT_TRUE_FOR_1', - 'DONT_ACCEPT_BLANKLINE', - 'NORMALIZE_WHITESPACE', - 'ELLIPSIS', - 'IGNORE_EXCEPTION_DETAIL', - 'COMPARISON_FLAGS', - 'REPORT_UDIFF', - 'REPORT_CDIFF', - 'REPORT_NDIFF', - 'REPORT_ONLY_FIRST_FAILURE', - 'REPORTING_FLAGS', - # 1. Utility Functions - 'is_private', - # 2. Example & DocTest - 'Example', - 'DocTest', - # 3. Doctest Parser - 'DocTestParser', - # 4. Doctest Finder - 'DocTestFinder', - # 5. Doctest Runner - 'DocTestRunner', - 'OutputChecker', - 'DocTestFailure', - 'UnexpectedException', - 'DebugRunner', - # 6. Test Functions - 'testmod', - 'testfile', - 'run_docstring_examples', - # 7. Tester - 'Tester', - # 8. Unittest Support - 'DocTestSuite', - 'DocFileSuite', - 'set_unittest_reportflags', - # 9. Debugging Support - 'script_from_examples', - 'testsource', - 'debug_src', - 'debug', -] - -import __future__ - -import sys, traceback, inspect, linecache, os, re, types -import unittest, difflib, pdb, tempfile -import warnings -from setuptools.compat import StringIO, execfile, func_code, im_func - -# Don't whine about the deprecated is_private function in this -# module's tests. -warnings.filterwarnings("ignore", "is_private", DeprecationWarning, - __name__, 0) - -# There are 4 basic classes: -# - Example: a pair, plus an intra-docstring line number. -# - DocTest: a collection of examples, parsed from a docstring, plus -# info about where the docstring came from (name, filename, lineno). -# - DocTestFinder: extracts DocTests from a given object's docstring and -# its contained objects' docstrings. -# - DocTestRunner: runs DocTest cases, and accumulates statistics. -# -# So the basic picture is: -# -# list of: -# +------+ +---------+ +-------+ -# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results| -# +------+ +---------+ +-------+ -# | Example | -# | ... | -# | Example | -# +---------+ - -# Option constants. - -OPTIONFLAGS_BY_NAME = {} -def register_optionflag(name): - flag = 1 << len(OPTIONFLAGS_BY_NAME) - OPTIONFLAGS_BY_NAME[name] = flag - return flag - -DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1') -DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE') -NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE') -ELLIPSIS = register_optionflag('ELLIPSIS') -IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL') - -COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 | - DONT_ACCEPT_BLANKLINE | - NORMALIZE_WHITESPACE | - ELLIPSIS | - IGNORE_EXCEPTION_DETAIL) - -REPORT_UDIFF = register_optionflag('REPORT_UDIFF') -REPORT_CDIFF = register_optionflag('REPORT_CDIFF') -REPORT_NDIFF = register_optionflag('REPORT_NDIFF') -REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE') - -REPORTING_FLAGS = (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF | - REPORT_ONLY_FIRST_FAILURE) - -# Special string markers for use in `want` strings: -BLANKLINE_MARKER = '' -ELLIPSIS_MARKER = '...' - -###################################################################### -## Table of Contents -###################################################################### -# 1. Utility Functions -# 2. Example & DocTest -- store test cases -# 3. DocTest Parser -- extracts examples from strings -# 4. DocTest Finder -- extracts test cases from objects -# 5. DocTest Runner -- runs test cases -# 6. Test Functions -- convenient wrappers for testing -# 7. Tester Class -- for backwards compatibility -# 8. Unittest Support -# 9. Debugging Support -# 10. Example Usage - -###################################################################### -## 1. Utility Functions -###################################################################### - -def is_private(prefix, base): - """prefix, base -> true iff name prefix + "." + base is "private". - - Prefix may be an empty string, and base does not contain a period. - Prefix is ignored (although functions you write conforming to this - protocol may make use of it). - Return true iff base begins with an (at least one) underscore, but - does not both begin and end with (at least) two underscores. - - >>> is_private("a.b", "my_func") - False - >>> is_private("____", "_my_func") - True - >>> is_private("someclass", "__init__") - False - >>> is_private("sometypo", "__init_") - True - >>> is_private("x.y.z", "_") - True - >>> is_private("_x.y.z", "__") - False - >>> is_private("", "") # senseless but consistent - False - """ - warnings.warn("is_private is deprecated; it wasn't useful; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning, stacklevel=2) - return base[:1] == "_" and not base[:2] == "__" == base[-2:] - -def _extract_future_flags(globs): - """ - Return the compiler-flags associated with the future features that - have been imported into the given namespace (globs). - """ - flags = 0 - for fname in __future__.all_feature_names: - feature = globs.get(fname, None) - if feature is getattr(__future__, fname): - flags |= feature.compiler_flag - return flags - -def _normalize_module(module, depth=2): - """ - Return the module specified by `module`. In particular: - - If `module` is a module, then return module. - - If `module` is a string, then import and return the - module with that name. - - If `module` is None, then return the calling module. - The calling module is assumed to be the module of - the stack frame at the given depth in the call stack. - """ - if inspect.ismodule(module): - return module - elif isinstance(module, basestring): - return __import__(module, globals(), locals(), ["*"]) - elif module is None: - return sys.modules[sys._getframe(depth).f_globals['__name__']] - else: - raise TypeError("Expected a module, string, or None") - -def _indent(s, indent=4): - """ - Add the given number of space characters to the beginning every - non-blank line in `s`, and return the result. - """ - # This regexp matches the start of non-blank lines: - return re.sub('(?m)^(?!$)', indent*' ', s) - -def _exception_traceback(exc_info): - """ - Return a string containing a traceback message for the given - exc_info tuple (as returned by sys.exc_info()). - """ - # Get a traceback message. - excout = StringIO() - exc_type, exc_val, exc_tb = exc_info - traceback.print_exception(exc_type, exc_val, exc_tb, file=excout) - return excout.getvalue() - -# Override some StringIO methods. -class _SpoofOut(StringIO): - def getvalue(self): - result = StringIO.getvalue(self) - # If anything at all was written, make sure there's a trailing - # newline. There's no way for the expected output to indicate - # that a trailing newline is missing. - if result and not result.endswith("\n"): - result += "\n" - # Prevent softspace from screwing up the next test case, in - # case they used print with a trailing comma in an example. - if hasattr(self, "softspace"): - del self.softspace - return result - - def truncate(self, size=None): - StringIO.truncate(self, size) - if hasattr(self, "softspace"): - del self.softspace - -# Worst-case linear-time ellipsis matching. -def _ellipsis_match(want, got): - """ - Essentially the only subtle case: - >>> _ellipsis_match('aa...aa', 'aaa') - False - """ - if want.find(ELLIPSIS_MARKER)==-1: - return want == got - - # Find "the real" strings. - ws = want.split(ELLIPSIS_MARKER) - assert len(ws) >= 2 - - # Deal with exact matches possibly needed at one or both ends. - startpos, endpos = 0, len(got) - w = ws[0] - if w: # starts with exact match - if got.startswith(w): - startpos = len(w) - del ws[0] - else: - return False - w = ws[-1] - if w: # ends with exact match - if got.endswith(w): - endpos -= len(w) - del ws[-1] - else: - return False - - if startpos > endpos: - # Exact end matches required more characters than we have, as in - # _ellipsis_match('aa...aa', 'aaa') - return False - - # For the rest, we only need to find the leftmost non-overlapping - # match for each piece. If there's no overall match that way alone, - # there's no overall match period. - for w in ws: - # w may be '' at times, if there are consecutive ellipses, or - # due to an ellipsis at the start or end of `want`. That's OK. - # Search for an empty string succeeds, and doesn't change startpos. - startpos = got.find(w, startpos, endpos) - if startpos < 0: - return False - startpos += len(w) - - return True - -def _comment_line(line): - "Return a commented form of the given line" - line = line.rstrip() - if line: - return '# '+line - else: - return '#' - -class _OutputRedirectingPdb(pdb.Pdb): - """ - A specialized version of the python debugger that redirects stdout - to a given stream when interacting with the user. Stdout is *not* - redirected when traced code is executed. - """ - def __init__(self, out): - self.__out = out - pdb.Pdb.__init__(self) - - def trace_dispatch(self, *args): - # Redirect stdout to the given stream. - save_stdout = sys.stdout - sys.stdout = self.__out - # Call Pdb's trace dispatch method. - try: - return pdb.Pdb.trace_dispatch(self, *args) - finally: - sys.stdout = save_stdout - -# [XX] Normalize with respect to os.path.pardir? -def _module_relative_path(module, path): - if not inspect.ismodule(module): - raise TypeError('Expected a module: %r' % module) - if path.startswith('/'): - raise ValueError('Module-relative files may not have absolute paths') - - # Find the base directory for the path. - if hasattr(module, '__file__'): - # A normal module/package - basedir = os.path.split(module.__file__)[0] - elif module.__name__ == '__main__': - # An interactive session. - if len(sys.argv)>0 and sys.argv[0] != '': - basedir = os.path.split(sys.argv[0])[0] - else: - basedir = os.curdir - else: - # A module w/o __file__ (this includes builtins) - raise ValueError("Can't resolve paths relative to the module " + - module + " (it has no __file__)") - - # Combine the base directory and the path. - return os.path.join(basedir, *(path.split('/'))) - -###################################################################### -## 2. Example & DocTest -###################################################################### -## - An "example" is a pair, where "source" is a -## fragment of source code, and "want" is the expected output for -## "source." The Example class also includes information about -## where the example was extracted from. -## -## - A "doctest" is a collection of examples, typically extracted from -## a string (such as an object's docstring). The DocTest class also -## includes information about where the string was extracted from. - -class Example: - """ - A single doctest example, consisting of source code and expected - output. `Example` defines the following attributes: - - - source: A single Python statement, always ending with a newline. - The constructor adds a newline if needed. - - - want: The expected output from running the source code (either - from stdout, or a traceback in case of exception). `want` ends - with a newline unless it's empty, in which case it's an empty - string. The constructor adds a newline if needed. - - - exc_msg: The exception message generated by the example, if - the example is expected to generate an exception; or `None` if - it is not expected to generate an exception. This exception - message is compared against the return value of - `traceback.format_exception_only()`. `exc_msg` ends with a - newline unless it's `None`. The constructor adds a newline - if needed. - - - lineno: The line number within the DocTest string containing - this Example where the Example begins. This line number is - zero-based, with respect to the beginning of the DocTest. - - - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the - example's first prompt. - - - options: A dictionary mapping from option flags to True or - False, which is used to override default options for this - example. Any option flags not contained in this dictionary - are left at their default value (as specified by the - DocTestRunner's optionflags). By default, no options are set. - """ - def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, - options=None): - # Normalize inputs. - if not source.endswith('\n'): - source += '\n' - if want and not want.endswith('\n'): - want += '\n' - if exc_msg is not None and not exc_msg.endswith('\n'): - exc_msg += '\n' - # Store properties. - self.source = source - self.want = want - self.lineno = lineno - self.indent = indent - if options is None: options = {} - self.options = options - self.exc_msg = exc_msg - -class DocTest: - """ - A collection of doctest examples that should be run in a single - namespace. Each `DocTest` defines the following attributes: - - - examples: the list of examples. - - - globs: The namespace (aka globals) that the examples should - be run in. - - - name: A name identifying the DocTest (typically, the name of - the object whose docstring this DocTest was extracted from). - - - filename: The name of the file that this DocTest was extracted - from, or `None` if the filename is unknown. - - - lineno: The line number within filename where this DocTest - begins, or `None` if the line number is unavailable. This - line number is zero-based, with respect to the beginning of - the file. - - - docstring: The string that the examples were extracted from, - or `None` if the string is unavailable. - """ - def __init__(self, examples, globs, name, filename, lineno, docstring): - """ - Create a new DocTest containing the given examples. The - DocTest's globals are initialized with a copy of `globs`. - """ - assert not isinstance(examples, basestring), \ - "DocTest no longer accepts str; use DocTestParser instead" - self.examples = examples - self.docstring = docstring - self.globs = globs.copy() - self.name = name - self.filename = filename - self.lineno = lineno - - def __repr__(self): - if len(self.examples) == 0: - examples = 'no examples' - elif len(self.examples) == 1: - examples = '1 example' - else: - examples = '%d examples' % len(self.examples) - return ('' % - (self.name, self.filename, self.lineno, examples)) - - - # This lets us sort tests by name: - def __cmp__(self, other): - if not isinstance(other, DocTest): - return -1 - return cmp((self.name, self.filename, self.lineno, id(self)), - (other.name, other.filename, other.lineno, id(other))) - -###################################################################### -## 3. DocTestParser -###################################################################### - -class DocTestParser: - """ - A class used to parse strings containing doctest examples. - """ - # This regular expression is used to find doctest examples in a - # string. It defines three groups: `source` is the source code - # (including leading indentation and prompts); `indent` is the - # indentation of the first (PS1) line of the source code; and - # `want` is the expected output (including leading indentation). - _EXAMPLE_RE = re.compile(r''' - # Source consists of a PS1 line followed by zero or more PS2 lines. - (?P - (?:^(?P [ ]*) >>> .*) # PS1 line - (?:\n [ ]* \.\.\. .*)*) # PS2 lines - \n? - # Want consists of any non-blank lines that do not start with PS1. - (?P (?:(?![ ]*$) # Not a blank line - (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line - )*) - ''', re.MULTILINE | re.VERBOSE) - - # A regular expression for handling `want` strings that contain - # expected exceptions. It divides `want` into three pieces: - # - the traceback header line (`hdr`) - # - the traceback stack (`stack`) - # - the exception message (`msg`), as generated by - # traceback.format_exception_only() - # `msg` may have multiple lines. We assume/require that the - # exception message is the first non-indented line starting with a word - # character following the traceback header line. - _EXCEPTION_RE = re.compile(r""" - # Grab the traceback header. Different versions of Python have - # said different things on the first traceback line. - ^(?P Traceback\ \( - (?: most\ recent\ call\ last - | innermost\ last - ) \) : - ) - \s* $ # toss trailing whitespace on the header. - (?P .*?) # don't blink: absorb stuff until... - ^ (?P \w+ .*) # a line *starts* with alphanum. - """, re.VERBOSE | re.MULTILINE | re.DOTALL) - - # A callable returning a true value iff its argument is a blank line - # or contains a single comment. - _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match - - def parse(self, string, name=''): - """ - Divide the given string into examples and intervening text, - and return them as a list of alternating Examples and strings. - Line numbers for the Examples are 0-based. The optional - argument `name` is a name identifying this string, and is only - used for error messages. - """ - string = string.expandtabs() - # If all lines begin with the same indentation, then strip it. - min_indent = self._min_indent(string) - if min_indent > 0: - string = '\n'.join([l[min_indent:] for l in string.split('\n')]) - - output = [] - charno, lineno = 0, 0 - # Find all doctest examples in the string: - for m in self._EXAMPLE_RE.finditer(string): - # Add the pre-example text to `output`. - output.append(string[charno:m.start()]) - # Update lineno (lines before this example) - lineno += string.count('\n', charno, m.start()) - # Extract info from the regexp match. - (source, options, want, exc_msg) = \ - self._parse_example(m, name, lineno) - # Create an Example, and add it to the list. - if not self._IS_BLANK_OR_COMMENT(source): - output.append( Example(source, want, exc_msg, - lineno=lineno, - indent=min_indent+len(m.group('indent')), - options=options) ) - # Update lineno (lines inside this example) - lineno += string.count('\n', m.start(), m.end()) - # Update charno. - charno = m.end() - # Add any remaining post-example text to `output`. - output.append(string[charno:]) - return output - - def get_doctest(self, string, globs, name, filename, lineno): - """ - Extract all doctest examples from the given string, and - collect them into a `DocTest` object. - - `globs`, `name`, `filename`, and `lineno` are attributes for - the new `DocTest` object. See the documentation for `DocTest` - for more information. - """ - return DocTest(self.get_examples(string, name), globs, - name, filename, lineno, string) - - def get_examples(self, string, name=''): - """ - Extract all doctest examples from the given string, and return - them as a list of `Example` objects. Line numbers are - 0-based, because it's most common in doctests that nothing - interesting appears on the same line as opening triple-quote, - and so the first interesting line is called \"line 1\" then. - - The optional argument `name` is a name identifying this - string, and is only used for error messages. - """ - return [x for x in self.parse(string, name) - if isinstance(x, Example)] - - def _parse_example(self, m, name, lineno): - """ - Given a regular expression match from `_EXAMPLE_RE` (`m`), - return a pair `(source, want)`, where `source` is the matched - example's source code (with prompts and indentation stripped); - and `want` is the example's expected output (with indentation - stripped). - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - # Get the example's indentation level. - indent = len(m.group('indent')) - - # Divide source into lines; check that they're properly - # indented; and then strip their indentation & prompts. - source_lines = m.group('source').split('\n') - self._check_prompt_blank(source_lines, indent, name, lineno) - self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno) - source = '\n'.join([sl[indent+4:] for sl in source_lines]) - - # Divide want into lines; check that it's properly indented; and - # then strip the indentation. Spaces before the last newline should - # be preserved, so plain rstrip() isn't good enough. - want = m.group('want') - want_lines = want.split('\n') - if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): - del want_lines[-1] # forget final newline & spaces after it - self._check_prefix(want_lines, ' '*indent, name, - lineno + len(source_lines)) - want = '\n'.join([wl[indent:] for wl in want_lines]) - - # If `want` contains a traceback message, then extract it. - m = self._EXCEPTION_RE.match(want) - if m: - exc_msg = m.group('msg') - else: - exc_msg = None - - # Extract options from the source. - options = self._find_options(source, name, lineno) - - return source, options, want, exc_msg - - # This regular expression looks for option directives in the - # source code of an example. Option directives are comments - # starting with "doctest:". Warning: this may give false - # positives for string-literals that contain the string - # "#doctest:". Eliminating these false positives would require - # actually parsing the string; but we limit them by ignoring any - # line containing "#doctest:" that is *followed* by a quote mark. - _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$', - re.MULTILINE) - - def _find_options(self, source, name, lineno): - """ - Return a dictionary containing option overrides extracted from - option directives in the given source string. - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - options = {} - # (note: with the current regexp, this will match at most once:) - for m in self._OPTION_DIRECTIVE_RE.finditer(source): - option_strings = m.group(1).replace(',', ' ').split() - for option in option_strings: - if (option[0] not in '+-' or - option[1:] not in OPTIONFLAGS_BY_NAME): - raise ValueError('line %r of the doctest for %s ' - 'has an invalid option: %r' % - (lineno+1, name, option)) - flag = OPTIONFLAGS_BY_NAME[option[1:]] - options[flag] = (option[0] == '+') - if options and self._IS_BLANK_OR_COMMENT(source): - raise ValueError('line %r of the doctest for %s has an option ' - 'directive on a line with no example: %r' % - (lineno, name, source)) - return options - - # This regular expression finds the indentation of every non-blank - # line in a string. - _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE) - - def _min_indent(self, s): - "Return the minimum indentation of any non-blank line in `s`" - indents = [len(indent) for indent in self._INDENT_RE.findall(s)] - if len(indents) > 0: - return min(indents) - else: - return 0 - - def _check_prompt_blank(self, lines, indent, name, lineno): - """ - Given the lines of a source string (including prompts and - leading indentation), check to make sure that every prompt is - followed by a space character. If any line is not followed by - a space character, then raise ValueError. - """ - for i, line in enumerate(lines): - if len(line) >= indent+4 and line[indent+3] != ' ': - raise ValueError('line %r of the docstring for %s ' - 'lacks blank after %s: %r' % - (lineno+i+1, name, - line[indent:indent+3], line)) - - def _check_prefix(self, lines, prefix, name, lineno): - """ - Check that every line in the given list starts with the given - prefix; if any line does not, then raise a ValueError. - """ - for i, line in enumerate(lines): - if line and not line.startswith(prefix): - raise ValueError('line %r of the docstring for %s has ' - 'inconsistent leading whitespace: %r' % - (lineno+i+1, name, line)) - - -###################################################################### -## 4. DocTest Finder -###################################################################### - -class DocTestFinder: - """ - A class used to extract the DocTests that are relevant to a given - object, from its docstring and the docstrings of its contained - objects. Doctests can currently be extracted from the following - object types: modules, functions, classes, methods, staticmethods, - classmethods, and properties. - """ - - def __init__(self, verbose=False, parser=DocTestParser(), - recurse=True, _namefilter=None, exclude_empty=True): - """ - Create a new doctest finder. - - The optional argument `parser` specifies a class or - function that should be used to create new DocTest objects (or - objects that implement the same interface as DocTest). The - signature for this factory function should match the signature - of the DocTest constructor. - - If the optional argument `recurse` is false, then `find` will - only examine the given object, and not any contained objects. - - If the optional argument `exclude_empty` is false, then `find` - will include tests for objects with empty docstrings. - """ - self._parser = parser - self._verbose = verbose - self._recurse = recurse - self._exclude_empty = exclude_empty - # _namefilter is undocumented, and exists only for temporary backward- - # compatibility support of testmod's deprecated isprivate mess. - self._namefilter = _namefilter - - def find(self, obj, name=None, module=None, globs=None, - extraglobs=None): - """ - Return a list of the DocTests that are defined by the given - object's docstring, or by any of its contained objects' - docstrings. - - The optional parameter `module` is the module that contains - the given object. If the module is not specified or is None, then - the test finder will attempt to automatically determine the - correct module. The object's module is used: - - - As a default namespace, if `globs` is not specified. - - To prevent the DocTestFinder from extracting DocTests - from objects that are imported from other modules. - - To find the name of the file containing the object. - - To help find the line number of the object within its - file. - - Contained objects whose module does not match `module` are ignored. - - If `module` is False, no attempt to find the module will be made. - This is obscure, of use mostly in tests: if `module` is False, or - is None but cannot be found automatically, then all objects are - considered to belong to the (non-existent) module, so all contained - objects will (recursively) be searched for doctests. - - The globals for each DocTest is formed by combining `globs` - and `extraglobs` (bindings in `extraglobs` override bindings - in `globs`). A new copy of the globals dictionary is created - for each DocTest. If `globs` is not specified, then it - defaults to the module's `__dict__`, if specified, or {} - otherwise. If `extraglobs` is not specified, then it defaults - to {}. - - """ - # If name was not specified, then extract it from the object. - if name is None: - name = getattr(obj, '__name__', None) - if name is None: - raise ValueError("DocTestFinder.find: name must be given " - "when obj.__name__ doesn't exist: %r" % - (type(obj),)) - - # Find the module that contains the given object (if obj is - # a module, then module=obj.). Note: this may fail, in which - # case module will be None. - if module is False: - module = None - elif module is None: - module = inspect.getmodule(obj) - - # Read the module's source code. This is used by - # DocTestFinder._find_lineno to find the line number for a - # given object's docstring. - try: - file = inspect.getsourcefile(obj) or inspect.getfile(obj) - source_lines = linecache.getlines(file) - if not source_lines: - source_lines = None - except TypeError: - source_lines = None - - # Initialize globals, and merge in extraglobs. - if globs is None: - if module is None: - globs = {} - else: - globs = module.__dict__.copy() - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - # Recursively expore `obj`, extracting DocTests. - tests = [] - self._find(tests, obj, name, module, source_lines, globs, {}) - return tests - - def _filter(self, obj, prefix, base): - """ - Return true if the given object should not be examined. - """ - return (self._namefilter is not None and - self._namefilter(prefix, base)) - - def _from_module(self, module, object): - """ - Return true if the given object is defined in the given - module. - """ - if module is None: - return True - elif inspect.isfunction(object): - return module.__dict__ is func_globals(object) - elif inspect.isclass(object): - return module.__name__ == object.__module__ - elif inspect.getmodule(object) is not None: - return module is inspect.getmodule(object) - elif hasattr(object, '__module__'): - return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. - else: - raise ValueError("object must be a class or function") - - def _find(self, tests, obj, name, module, source_lines, globs, seen): - """ - Find tests for the given object and any contained objects, and - add them to `tests`. - """ - if self._verbose: - print('Finding tests in %s' % name) - - # If we've already processed this object, then ignore it. - if id(obj) in seen: - return - seen[id(obj)] = 1 - - # Find a test for this object, and add it to the list of tests. - test = self._get_test(obj, name, module, globs, source_lines) - if test is not None: - tests.append(test) - - # Look for tests in a module's contained objects. - if inspect.ismodule(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - valname = '%s.%s' % (name, valname) - # Recurse to functions & classes. - if ((inspect.isfunction(val) or inspect.isclass(val)) and - self._from_module(module, val)): - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a module's __test__ dictionary. - if inspect.ismodule(obj) and self._recurse: - for valname, val in getattr(obj, '__test__', {}).items(): - if not isinstance(valname, basestring): - raise ValueError("DocTestFinder.find: __test__ keys " - "must be strings: %r" % - (type(valname),)) - if not (inspect.isfunction(val) or inspect.isclass(val) or - inspect.ismethod(val) or inspect.ismodule(val) or - isinstance(val, basestring)): - raise ValueError("DocTestFinder.find: __test__ values " - "must be strings, functions, methods, " - "classes, or modules: %r" % - (type(val),)) - valname = '%s.__test__.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a class's contained objects. - if inspect.isclass(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - # Special handling for staticmethod/classmethod. - if isinstance(val, staticmethod): - val = getattr(obj, valname) - if isinstance(val, classmethod): - val = im_func(getattr(obj, valname)) - - # Recurse to methods, properties, and nested classes. - if ((inspect.isfunction(val) or inspect.isclass(val) or - isinstance(val, property)) and - self._from_module(module, val)): - valname = '%s.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - def _get_test(self, obj, name, module, globs, source_lines): - """ - Return a DocTest for the given object, if it defines a docstring; - otherwise, return None. - """ - # Extract the object's docstring. If it doesn't have one, - # then return None (no test for this object). - if isinstance(obj, basestring): - docstring = obj - else: - try: - if obj.__doc__ is None: - docstring = '' - else: - docstring = obj.__doc__ - if not isinstance(docstring, basestring): - docstring = str(docstring) - except (TypeError, AttributeError): - docstring = '' - - # Find the docstring's location in the file. - lineno = self._find_lineno(obj, source_lines) - - # Don't bother if the docstring is empty. - if self._exclude_empty and not docstring: - return None - - # Return a DocTest for this object. - if module is None: - filename = None - else: - filename = getattr(module, '__file__', module.__name__) - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - return self._parser.get_doctest(docstring, globs, name, - filename, lineno) - - def _find_lineno(self, obj, source_lines): - """ - Return a line number of the given object's docstring. Note: - this method assumes that the object has a docstring. - """ - lineno = None - - # Find the line number for modules. - if inspect.ismodule(obj): - lineno = 0 - - # Find the line number for classes. - # Note: this could be fooled if a class is defined multiple - # times in a single file. - if inspect.isclass(obj): - if source_lines is None: - return None - pat = re.compile(r'^\s*class\s*%s\b' % - getattr(obj, '__name__', '-')) - for i, line in enumerate(source_lines): - if pat.match(line): - lineno = i - break - - # Find the line number for functions & methods. - if inspect.ismethod(obj): obj = im_func(obj) - if inspect.isfunction(obj): obj = func_code(obj) - if inspect.istraceback(obj): obj = obj.tb_frame - if inspect.isframe(obj): obj = obj.f_code - if inspect.iscode(obj): - lineno = getattr(obj, 'co_firstlineno', None)-1 - - # Find the line number where the docstring starts. Assume - # that it's the first line that begins with a quote mark. - # Note: this could be fooled by a multiline function - # signature, where a continuation line begins with a quote - # mark. - if lineno is not None: - if source_lines is None: - return lineno+1 - pat = re.compile('(^|.*:)\s*\w*("|\')') - for lineno in range(lineno, len(source_lines)): - if pat.match(source_lines[lineno]): - return lineno - - # We couldn't find the line number. - return None - -###################################################################### -## 5. DocTest Runner -###################################################################### - -class DocTestRunner: - """ - A class used to run DocTest test cases, and accumulate statistics. - The `run` method is used to process a single DocTest case. It - returns a tuple `(f, t)`, where `t` is the number of test cases - tried, and `f` is the number of test cases that failed. - - >>> tests = DocTestFinder().find(_TestClass) - >>> runner = DocTestRunner(verbose=False) - >>> for test in tests: - ... print runner.run(test) - (0, 2) - (0, 1) - (0, 2) - (0, 2) - - The `summarize` method prints a summary of all the test cases that - have been run by the runner, and returns an aggregated `(f, t)` - tuple: - - >>> runner.summarize(verbose=1) - 4 items passed all tests: - 2 tests in _TestClass - 2 tests in _TestClass.__init__ - 2 tests in _TestClass.get - 1 tests in _TestClass.square - 7 tests in 4 items. - 7 passed and 0 failed. - Test passed. - (0, 7) - - The aggregated number of tried examples and failed examples is - also available via the `tries` and `failures` attributes: - - >>> runner.tries - 7 - >>> runner.failures - 0 - - The comparison between expected outputs and actual outputs is done - by an `OutputChecker`. This comparison may be customized with a - number of option flags; see the documentation for `testmod` for - more information. If the option flags are insufficient, then the - comparison may also be customized by passing a subclass of - `OutputChecker` to the constructor. - - The test runner's display output can be controlled in two ways. - First, an output function (`out) can be passed to - `TestRunner.run`; this function will be called with strings that - should be displayed. It defaults to `sys.stdout.write`. If - capturing the output is not sufficient, then the display output - can be also customized by subclassing DocTestRunner, and - overriding the methods `report_start`, `report_success`, - `report_unexpected_exception`, and `report_failure`. - """ - # This divider string is used to separate failure messages, and to - # separate sections of the summary. - DIVIDER = "*" * 70 - - def __init__(self, checker=None, verbose=None, optionflags=0): - """ - Create a new test runner. - - Optional keyword arg `checker` is the `OutputChecker` that - should be used to compare the expected outputs and actual - outputs of doctest examples. - - Optional keyword arg 'verbose' prints lots of stuff if true, - only failures if false; by default, it's true iff '-v' is in - sys.argv. - - Optional argument `optionflags` can be used to control how the - test runner compares expected output to actual output, and how - it displays failures. See the documentation for `testmod` for - more information. - """ - self._checker = checker or OutputChecker() - if verbose is None: - verbose = '-v' in sys.argv - self._verbose = verbose - self.optionflags = optionflags - self.original_optionflags = optionflags - - # Keep track of the examples we've run. - self.tries = 0 - self.failures = 0 - self._name2ft = {} - - # Create a fake output target for capturing doctest output. - self._fakeout = _SpoofOut() - - #///////////////////////////////////////////////////////////////// - # Reporting methods - #///////////////////////////////////////////////////////////////// - - def report_start(self, out, test, example): - """ - Report that the test runner is about to process the given - example. (Only displays a message if verbose=True) - """ - if self._verbose: - if example.want: - out('Trying:\n' + _indent(example.source) + - 'Expecting:\n' + _indent(example.want)) - else: - out('Trying:\n' + _indent(example.source) + - 'Expecting nothing\n') - - def report_success(self, out, test, example, got): - """ - Report that the given example ran successfully. (Only - displays a message if verbose=True) - """ - if self._verbose: - out("ok\n") - - def report_failure(self, out, test, example, got): - """ - Report that the given example failed. - """ - out(self._failure_header(test, example) + - self._checker.output_difference(example, got, self.optionflags)) - - def report_unexpected_exception(self, out, test, example, exc_info): - """ - Report that the given example raised an unexpected exception. - """ - out(self._failure_header(test, example) + - 'Exception raised:\n' + _indent(_exception_traceback(exc_info))) - - def _failure_header(self, test, example): - out = [self.DIVIDER] - if test.filename: - if test.lineno is not None and example.lineno is not None: - lineno = test.lineno + example.lineno + 1 - else: - lineno = '?' - out.append('File "%s", line %s, in %s' % - (test.filename, lineno, test.name)) - else: - out.append('Line %s, in %s' % (example.lineno+1, test.name)) - out.append('Failed example:') - source = example.source - out.append(_indent(source)) - return '\n'.join(out) - - #///////////////////////////////////////////////////////////////// - # DocTest Running - #///////////////////////////////////////////////////////////////// - - def __run(self, test, compileflags, out): - """ - Run the examples in `test`. Write the outcome of each example - with one of the `DocTestRunner.report_*` methods, using the - writer function `out`. `compileflags` is the set of compiler - flags that should be used to execute examples. Return a tuple - `(f, t)`, where `t` is the number of examples tried, and `f` - is the number of examples that failed. The examples are run - in the namespace `test.globs`. - """ - # Keep track of the number of failures and tries. - failures = tries = 0 - - # Save the option flags (since option directives can be used - # to modify them). - original_optionflags = self.optionflags - - SUCCESS, FAILURE, BOOM = range(3) # `outcome` state - - check = self._checker.check_output - - # Process each example. - for examplenum, example in enumerate(test.examples): - - # If REPORT_ONLY_FIRST_FAILURE is set, then supress - # reporting after the first failure. - quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and - failures > 0) - - # Merge in the example's options. - self.optionflags = original_optionflags - if example.options: - for (optionflag, val) in example.options.items(): - if val: - self.optionflags |= optionflag - else: - self.optionflags &= ~optionflag - - # Record that we started this example. - tries += 1 - if not quiet: - self.report_start(out, test, example) - - # Use a special filename for compile(), so we can retrieve - # the source code during interactive debugging (see - # __patched_linecache_getlines). - filename = '' % (test.name, examplenum) - - # Run the example in the given context (globs), and record - # any exception that gets raised. (But don't intercept - # keyboard interrupts.) - try: - # Don't blink! This is where the user's code gets run. - exec(compile(example.source, filename, "single", - compileflags, 1), test.globs) - self.debugger.set_continue() # ==== Example Finished ==== - exception = None - except KeyboardInterrupt: - raise - except: - exception = sys.exc_info() - self.debugger.set_continue() # ==== Example Finished ==== - - got = self._fakeout.getvalue() # the actual output - self._fakeout.truncate(0) - outcome = FAILURE # guilty until proved innocent or insane - - # If the example executed without raising any exceptions, - # verify its output. - if exception is None: - if check(example.want, got, self.optionflags): - outcome = SUCCESS - - # The example raised an exception: check if it was expected. - else: - exc_info = sys.exc_info() - exc_msg = traceback.format_exception_only(*exc_info[:2])[-1] - if not quiet: - got += _exception_traceback(exc_info) - - # If `example.exc_msg` is None, then we weren't expecting - # an exception. - if example.exc_msg is None: - outcome = BOOM - - # We expected an exception: see whether it matches. - elif check(example.exc_msg, exc_msg, self.optionflags): - outcome = SUCCESS - - # Another chance if they didn't care about the detail. - elif self.optionflags & IGNORE_EXCEPTION_DETAIL: - m1 = re.match(r'[^:]*:', example.exc_msg) - m2 = re.match(r'[^:]*:', exc_msg) - if m1 and m2 and check(m1.group(0), m2.group(0), - self.optionflags): - outcome = SUCCESS - - # Report the outcome. - if outcome is SUCCESS: - if not quiet: - self.report_success(out, test, example, got) - elif outcome is FAILURE: - if not quiet: - self.report_failure(out, test, example, got) - failures += 1 - elif outcome is BOOM: - if not quiet: - self.report_unexpected_exception(out, test, example, - exc_info) - failures += 1 - else: - assert False, ("unknown outcome", outcome) - - # Restore the option flags (in case they were modified) - self.optionflags = original_optionflags - - # Record and return the number of failures and tries. - self.__record_outcome(test, failures, tries) - return failures, tries - - def __record_outcome(self, test, f, t): - """ - Record the fact that the given DocTest (`test`) generated `f` - failures out of `t` tried examples. - """ - f2, t2 = self._name2ft.get(test.name, (0,0)) - self._name2ft[test.name] = (f+f2, t+t2) - self.failures += f - self.tries += t - - __LINECACHE_FILENAME_RE = re.compile(r'[\w\.]+)' - r'\[(?P\d+)\]>$') - def __patched_linecache_getlines(self, filename, module_globals=None): - m = self.__LINECACHE_FILENAME_RE.match(filename) - if m and m.group('name') == self.test.name: - example = self.test.examples[int(m.group('examplenum'))] - return example.source.splitlines(True) - elif func_code(self.save_linecache_getlines).co_argcount > 1: - return self.save_linecache_getlines(filename, module_globals) - else: - return self.save_linecache_getlines(filename) - - def run(self, test, compileflags=None, out=None, clear_globs=True): - """ - Run the examples in `test`, and display the results using the - writer function `out`. - - The examples are run in the namespace `test.globs`. If - `clear_globs` is true (the default), then this namespace will - be cleared after the test runs, to help with garbage - collection. If you would like to examine the namespace after - the test completes, then use `clear_globs=False`. - - `compileflags` gives the set of flags that should be used by - the Python compiler when running the examples. If not - specified, then it will default to the set of future-import - flags that apply to `globs`. - - The output of each example is checked using - `DocTestRunner.check_output`, and the results are formatted by - the `DocTestRunner.report_*` methods. - """ - self.test = test - - if compileflags is None: - compileflags = _extract_future_flags(test.globs) - - save_stdout = sys.stdout - if out is None: - out = save_stdout.write - sys.stdout = self._fakeout - - # Patch pdb.set_trace to restore sys.stdout during interactive - # debugging (so it's not still redirected to self._fakeout). - # Note that the interactive output will go to *our* - # save_stdout, even if that's not the real sys.stdout; this - # allows us to write test cases for the set_trace behavior. - save_set_trace = pdb.set_trace - self.debugger = _OutputRedirectingPdb(save_stdout) - self.debugger.reset() - pdb.set_trace = self.debugger.set_trace - - # Patch linecache.getlines, so we can see the example's source - # when we're inside the debugger. - self.save_linecache_getlines = linecache.getlines - linecache.getlines = self.__patched_linecache_getlines - - try: - return self.__run(test, compileflags, out) - finally: - sys.stdout = save_stdout - pdb.set_trace = save_set_trace - linecache.getlines = self.save_linecache_getlines - if clear_globs: - test.globs.clear() - - #///////////////////////////////////////////////////////////////// - # Summarization - #///////////////////////////////////////////////////////////////// - def summarize(self, verbose=None): - """ - Print a summary of all the test cases that have been run by - this DocTestRunner, and return a tuple `(f, t)`, where `f` is - the total number of failed examples, and `t` is the total - number of tried examples. - - The optional `verbose` argument controls how detailed the - summary is. If the verbosity is not specified, then the - DocTestRunner's verbosity is used. - """ - if verbose is None: - verbose = self._verbose - notests = [] - passed = [] - failed = [] - totalt = totalf = 0 - for x in self._name2ft.items(): - name, (f, t) = x - assert f <= t - totalt += t - totalf += f - if t == 0: - notests.append(name) - elif f == 0: - passed.append( (name, t) ) - else: - failed.append(x) - if verbose: - if notests: - print(len(notests), "items had no tests:") - notests.sort() - for thing in notests: - print(" ", thing) - if passed: - print(len(passed), "items passed all tests:") - passed.sort() - for thing, count in passed: - print(" %3d tests in %s" % (count, thing)) - if failed: - print(self.DIVIDER) - print(len(failed), "items had failures:") - failed.sort() - for thing, (f, t) in failed: - print(" %3d of %3d in %s" % (f, t, thing)) - if verbose: - print(totalt, "tests in", len(self._name2ft), "items.") - print(totalt - totalf, "passed and", totalf, "failed.") - if totalf: - print("***Test Failed***", totalf, "failures.") - elif verbose: - print("Test passed.") - return totalf, totalt - - #///////////////////////////////////////////////////////////////// - # Backward compatibility cruft to maintain doctest.master. - #///////////////////////////////////////////////////////////////// - def merge(self, other): - d = self._name2ft - for name, (f, t) in other._name2ft.items(): - if name in d: - print("*** DocTestRunner.merge: '" + name + "' in both" \ - " testers; summing outcomes.") - f2, t2 = d[name] - f = f + f2 - t = t + t2 - d[name] = f, t - -class OutputChecker: - """ - A class used to check the whether the actual output from a doctest - example matches the expected output. `OutputChecker` defines two - methods: `check_output`, which compares a given pair of outputs, - and returns true if they match; and `output_difference`, which - returns a string describing the differences between two outputs. - """ - def check_output(self, want, got, optionflags): - """ - Return True iff the actual output from an example (`got`) - matches the expected output (`want`). These strings are - always considered to match if they are identical; but - depending on what option flags the test runner is using, - several non-exact match types are also possible. See the - documentation for `TestRunner` for more information about - option flags. - """ - # Handle the common case first, for efficiency: - # if they're string-identical, always return true. - if got == want: - return True - - # The values True and False replaced 1 and 0 as the return - # value for boolean comparisons in Python 2.3. - if not (optionflags & DONT_ACCEPT_TRUE_FOR_1): - if (got,want) == ("True\n", "1\n"): - return True - if (got,want) == ("False\n", "0\n"): - return True - - # can be used as a special sequence to signify a - # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - # Replace in want with a blank line. - want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER), - '', want) - # If a line in got contains only spaces, then remove the - # spaces. - got = re.sub('(?m)^\s*?$', '', got) - if got == want: - return True - - # This flag causes doctest to ignore any differences in the - # contents of whitespace strings. Note that this can be used - # in conjunction with the ELLIPSIS flag. - if optionflags & NORMALIZE_WHITESPACE: - got = ' '.join(got.split()) - want = ' '.join(want.split()) - if got == want: - return True - - # The ELLIPSIS flag says to let the sequence "..." in `want` - # match any substring in `got`. - if optionflags & ELLIPSIS: - if _ellipsis_match(want, got): - return True - - # We didn't find any match; return false. - return False - - # Should we do a fancy diff? - def _do_a_fancy_diff(self, want, got, optionflags): - # Not unless they asked for a fancy diff. - if not optionflags & (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF): - return False - - # If expected output uses ellipsis, a meaningful fancy diff is - # too hard ... or maybe not. In two real-life failures Tim saw, - # a diff was a major help anyway, so this is commented out. - # [todo] _ellipsis_match() knows which pieces do and don't match, - # and could be the basis for a kick-ass diff in this case. - ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want: - ## return False - - # ndiff does intraline difference marking, so can be useful even - # for 1-line differences. - if optionflags & REPORT_NDIFF: - return True - - # The other diff types need at least a few lines to be helpful. - return want.count('\n') > 2 and got.count('\n') > 2 - - def output_difference(self, example, got, optionflags): - """ - Return a string describing the differences between the - expected output for a given example (`example`) and the actual - output (`got`). `optionflags` is the set of option flags used - to compare `want` and `got`. - """ - want = example.want - # If s are being used, then replace blank lines - # with in the actual output string. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got) - - # Check if we should use diff. - if self._do_a_fancy_diff(want, got, optionflags): - # Split want & got into lines. - want_lines = want.splitlines(True) # True == keep line ends - got_lines = got.splitlines(True) - # Use difflib to find their differences. - if optionflags & REPORT_UDIFF: - diff = difflib.unified_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'unified diff with -expected +actual' - elif optionflags & REPORT_CDIFF: - diff = difflib.context_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'context diff with expected followed by actual' - elif optionflags & REPORT_NDIFF: - engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK) - diff = list(engine.compare(want_lines, got_lines)) - kind = 'ndiff with -expected +actual' - else: - assert 0, 'Bad diff option' - # Remove trailing whitespace on diff output. - diff = [line.rstrip() + '\n' for line in diff] - return 'Differences (%s):\n' % kind + _indent(''.join(diff)) - - # If we're not using diff, then simply list the expected - # output followed by the actual output. - if want and got: - return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got)) - elif want: - return 'Expected:\n%sGot nothing\n' % _indent(want) - elif got: - return 'Expected nothing\nGot:\n%s' % _indent(got) - else: - return 'Expected nothing\nGot nothing\n' - -class DocTestFailure(Exception): - """A DocTest example has failed in debugging mode. - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - got: the actual output - """ - def __init__(self, test, example, got): - self.test = test - self.example = example - self.got = got - - def __str__(self): - return str(self.test) - -class UnexpectedException(Exception): - """A DocTest example has encountered an unexpected exception - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - exc_info: the exception info - """ - def __init__(self, test, example, exc_info): - self.test = test - self.example = example - self.exc_info = exc_info - - def __str__(self): - return str(self.test) - -class DebugRunner(DocTestRunner): - r"""Run doc tests but raise an exception as soon as there is a failure. - - If an unexpected exception occurs, an UnexpectedException is raised. - It contains the test, the example, and the original exception: - - >>> runner = DebugRunner(verbose=False) - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except UnexpectedException, failure: - ... pass - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - We wrap the original exception to give the calling application - access to the test and example information. - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> try: - ... runner.run(test) - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - If a failure or error occurs, the globals are left intact: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 1} - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... >>> raise KeyError - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - Traceback (most recent call last): - ... - UnexpectedException: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 2} - - But the globals are cleared if there is no error: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - (0, 1) - - >>> test.globs - {} - - """ - - def run(self, test, compileflags=None, out=None, clear_globs=True): - r = DocTestRunner.run(self, test, compileflags, out, False) - if clear_globs: - test.globs.clear() - return r - - def report_unexpected_exception(self, out, test, example, exc_info): - raise UnexpectedException(test, example, exc_info) - - def report_failure(self, out, test, example, got): - raise DocTestFailure(test, example, got) - -###################################################################### -## 6. Test Functions -###################################################################### -# These should be backwards compatible. - -# For backward compatibility, a global instance of a DocTestRunner -# class, updated by testmod. -master = None - -def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, - raise_on_error=False, exclude_empty=False): - """m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, raise_on_error=False, - exclude_empty=False - - Test examples in docstrings in functions and classes reachable - from module m (or the current module if m is not supplied), starting - with m.__doc__. Unless isprivate is specified, private names - are not skipped. - - Also test examples reachable from dict m.__test__ if it exists and is - not None. m.__test__ maps names to functions, classes and strings; - function and class docstrings are tested even if the name is private; - strings are tested directly, as if they were docstrings. - - Return (#failures, #tests). - - See doctest.__doc__ for an overview. - - Optional keyword arg "name" gives the name of the module; by default - use m.__name__. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use m.__dict__. A copy of this - dict is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. This is new in 2.4. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. This is new in 2.3. Possible values (see the - docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Deprecated in Python 2.4: - Optional keyword arg "isprivate" specifies a function used to - determine whether a name is private. The default function is - treat all functions as public. Optionally, "isprivate" can be - set to doctest.is_private to skip over functions marked as private - using the underscore naming convention; see its docs for details. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if isprivate is not None: - warnings.warn("the isprivate argument is deprecated; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning) - - # If no module was given, then use __main__. - if m is None: - # DWA - m will still be None if this wasn't invoked from the command - # line, in which case the following TypeError is about as good an error - # as we should expect - m = sys.modules.get('__main__') - - # Check that we were actually given a module. - if not inspect.ismodule(m): - raise TypeError("testmod: module required; %r" % (m,)) - - # If no name was given, then use the module's name. - if name is None: - name = m.__name__ - - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - for test in finder.find(m, name, globs=globs, extraglobs=extraglobs): - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def testfile(filename, module_relative=True, name=None, package=None, - globs=None, verbose=None, report=True, optionflags=0, - extraglobs=None, raise_on_error=False, parser=DocTestParser()): - """ - Test examples in the given file. Return (#failures, #tests). - - Optional keyword arg "module_relative" specifies how filenames - should be interpreted: - - - If "module_relative" is True (the default), then "filename" - specifies a module-relative path. By default, this path is - relative to the calling module's directory; but if the - "package" argument is specified, then it is relative to that - package. To ensure os-independence, "filename" should use - "/" characters to separate path segments, and should not - be an absolute path (i.e., it may not begin with "/"). - - - If "module_relative" is False, then "filename" specifies an - os-specific path. The path may be absolute or relative (to - the current working directory). - - Optional keyword arg "name" gives the name of the test; by default - use the file's basename. - - Optional keyword argument "package" is a Python package or the - name of a Python package whose directory should be used as the - base directory for a module relative filename. If no package is - specified, then the calling module's directory is used as the base - directory for module relative filenames. It is an error to - specify "package" if "module_relative" is False. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use {}. A copy of this dict - is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. Possible values (see the docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Optional keyword arg "parser" specifies a DocTestParser (or - subclass) that should be used to extract tests from the files. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path - if module_relative: - package = _normalize_module(package) - filename = _module_relative_path(package, filename) - - # If no name was given, then use the file's name. - if name is None: - name = os.path.basename(filename) - - # Assemble the globals. - if globs is None: - globs = {} - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - # Read the file, convert it to a test, and run it. - f = open(filename) - s = f.read() - f.close() - test = parser.get_doctest(s, globs, name, filename, 0) - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def run_docstring_examples(f, globs, verbose=False, name="NoName", - compileflags=None, optionflags=0): - """ - Test examples in the given object's docstring (`f`), using `globs` - as globals. Optional argument `name` is used in failure messages. - If the optional argument `verbose` is true, then generate output - even if there are no failures. - - `compileflags` gives the set of flags that should be used by the - Python compiler when running the examples. If not specified, then - it will default to the set of future-import flags that apply to - `globs`. - - Optional keyword arg `optionflags` specifies options for the - testing and output. See the documentation for `testmod` for more - information. - """ - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(verbose=verbose, recurse=False) - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - for test in finder.find(f, name, globs=globs): - runner.run(test, compileflags=compileflags) - -###################################################################### -## 7. Tester -###################################################################### -# This is provided only for backwards compatibility. It's not -# actually used in any way. - -class Tester: - def __init__(self, mod=None, globs=None, verbose=None, - isprivate=None, optionflags=0): - - warnings.warn("class Tester is deprecated; " - "use class doctest.DocTestRunner instead", - DeprecationWarning, stacklevel=2) - if mod is None and globs is None: - raise TypeError("Tester.__init__: must specify mod or globs") - if mod is not None and not inspect.ismodule(mod): - raise TypeError("Tester.__init__: mod must be a module; %r" % - (mod,)) - if globs is None: - globs = mod.__dict__ - self.globs = globs - - self.verbose = verbose - self.isprivate = isprivate - self.optionflags = optionflags - self.testfinder = DocTestFinder(_namefilter=isprivate) - self.testrunner = DocTestRunner(verbose=verbose, - optionflags=optionflags) - - def runstring(self, s, name): - test = DocTestParser().get_doctest(s, self.globs, name, None, None) - if self.verbose: - print("Running string", name) - (f,t) = self.testrunner.run(test) - if self.verbose: - print(f, "of", t, "examples failed in string", name) - return (f,t) - - def rundoc(self, object, name=None, module=None): - f = t = 0 - tests = self.testfinder.find(object, name, module=module, - globs=self.globs) - for test in tests: - (f2, t2) = self.testrunner.run(test) - (f,t) = (f+f2, t+t2) - return (f,t) - - def rundict(self, d, name, module=None): - import types - m = types.ModuleType(name) - m.__dict__.update(d) - if module is None: - module = False - return self.rundoc(m, name, module) - - def run__test__(self, d, name): - import types - m = types.ModuleType(name) - m.__test__ = d - return self.rundoc(m, name) - - def summarize(self, verbose=None): - return self.testrunner.summarize(verbose) - - def merge(self, other): - self.testrunner.merge(other.testrunner) - -###################################################################### -## 8. Unittest Support -###################################################################### - -_unittest_reportflags = 0 - -def set_unittest_reportflags(flags): - """Sets the unittest option flags. - - The old flag is returned so that a runner could restore the old - value if it wished to: - - >>> old = _unittest_reportflags - >>> set_unittest_reportflags(REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) == old - True - - >>> import doctest - >>> doctest._unittest_reportflags == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - - Only reporting flags can be set: - - >>> set_unittest_reportflags(ELLIPSIS) - Traceback (most recent call last): - ... - ValueError: ('Only reporting flags allowed', 8) - - >>> set_unittest_reportflags(old) == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - """ - global _unittest_reportflags - - if (flags & REPORTING_FLAGS) != flags: - raise ValueError("Only reporting flags allowed", flags) - old = _unittest_reportflags - _unittest_reportflags = flags - return old - - -class DocTestCase(unittest.TestCase): - - def __init__(self, test, optionflags=0, setUp=None, tearDown=None, - checker=None): - - unittest.TestCase.__init__(self) - self._dt_optionflags = optionflags - self._dt_checker = checker - self._dt_test = test - self._dt_setUp = setUp - self._dt_tearDown = tearDown - - def setUp(self): - test = self._dt_test - - if self._dt_setUp is not None: - self._dt_setUp(test) - - def tearDown(self): - test = self._dt_test - - if self._dt_tearDown is not None: - self._dt_tearDown(test) - - test.globs.clear() - - def runTest(self): - test = self._dt_test - old = sys.stdout - new = StringIO() - optionflags = self._dt_optionflags - - if not (optionflags & REPORTING_FLAGS): - # The option flags don't include any reporting flags, - # so add the default reporting flags - optionflags |= _unittest_reportflags - - runner = DocTestRunner(optionflags=optionflags, - checker=self._dt_checker, verbose=False) - - try: - runner.DIVIDER = "-"*70 - failures, tries = runner.run( - test, out=new.write, clear_globs=False) - finally: - sys.stdout = old - - if failures: - raise self.failureException(self.format_failure(new.getvalue())) - - def format_failure(self, err): - test = self._dt_test - if test.lineno is None: - lineno = 'unknown line number' - else: - lineno = '%s' % test.lineno - lname = '.'.join(test.name.split('.')[-1:]) - return ('Failed doctest test for %s\n' - ' File "%s", line %s, in %s\n\n%s' - % (test.name, test.filename, lineno, lname, err) - ) - - def debug(self): - r"""Run the test case without results and without catching exceptions - - The unit test framework includes a debug method on test cases - and test suites to support post-mortem debugging. The test code - is run in such a way that errors are not caught. This way a - caller can catch the errors and initiate post-mortem debugging. - - The DocTestCase provides a debug method that raises - UnexpectedException errors if there is an unexepcted - exception: - - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - >>> try: - ... case.debug() - ... except UnexpectedException, failure: - ... pass - - The UnexpectedException contains the test, the example, and - the original exception: - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - - >>> try: - ... case.debug() - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - """ - - self.setUp() - runner = DebugRunner(optionflags=self._dt_optionflags, - checker=self._dt_checker, verbose=False) - runner.run(self._dt_test) - self.tearDown() - - def id(self): - return self._dt_test.name - - def __repr__(self): - name = self._dt_test.name.split('.') - return "%s (%s)" % (name[-1], '.'.join(name[:-1])) - - __str__ = __repr__ - - def shortDescription(self): - return "Doctest: " + self._dt_test.name - -def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, - **options): - """ - Convert doctest tests for a module to a unittest test suite. - - This converts each documentation string in a module that - contains doctest tests to a unittest test case. If any of the - tests in a doc string fail, then the test case fails. An exception - is raised showing the name of the file containing the test and a - (sometimes approximate) line number. - - The `module` argument provides the module to be tested. The argument - can be either a module or a module name. - - If no argument is given, the calling module is used. - - A number of options may be provided as keyword arguments: - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - """ - - if test_finder is None: - test_finder = DocTestFinder() - - module = _normalize_module(module) - tests = test_finder.find(module, globs=globs, extraglobs=extraglobs) - if globs is None: - globs = module.__dict__ - if not tests: - # Why do we want to do this? Because it reveals a bug that might - # otherwise be hidden. - raise ValueError(module, "has no tests") - - tests.sort() - suite = unittest.TestSuite() - for test in tests: - if len(test.examples) == 0: - continue - if not test.filename: - filename = module.__file__ - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - test.filename = filename - suite.addTest(DocTestCase(test, **options)) - - return suite - -class DocFileCase(DocTestCase): - - def id(self): - return '_'.join(self._dt_test.name.split('.')) - - def __repr__(self): - return self._dt_test.filename - __str__ = __repr__ - - def format_failure(self, err): - return ('Failed doctest test for %s\n File "%s", line 0\n\n%s' - % (self._dt_test.name, self._dt_test.filename, err) - ) - -def DocFileTest(path, module_relative=True, package=None, - globs=None, parser=DocTestParser(), **options): - if globs is None: - globs = {} - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path. - if module_relative: - package = _normalize_module(package) - path = _module_relative_path(package, path) - - # Find the file and read it. - name = os.path.basename(path) - f = open(path) - doc = f.read() - f.close() - - # Convert it to a test, and wrap it in a DocFileCase. - test = parser.get_doctest(doc, globs, name, path, 0) - return DocFileCase(test, **options) - -def DocFileSuite(*paths, **kw): - """A unittest suite for one or more doctest files. - - The path to each doctest file is given as a string; the - interpretation of that string depends on the keyword argument - "module_relative". - - A number of options may be provided as keyword arguments: - - module_relative - If "module_relative" is True, then the given file paths are - interpreted as os-independent module-relative paths. By - default, these paths are relative to the calling module's - directory; but if the "package" argument is specified, then - they are relative to that package. To ensure os-independence, - "filename" should use "/" characters to separate path - segments, and may not be an absolute path (i.e., it may not - begin with "/"). - - If "module_relative" is False, then the given file paths are - interpreted as os-specific paths. These paths may be absolute - or relative (to the current working directory). - - package - A Python package or the name of a Python package whose directory - should be used as the base directory for module relative paths. - If "package" is not specified, then the calling module's - directory is used as the base directory for module relative - filenames. It is an error to specify "package" if - "module_relative" is False. - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - - parser - A DocTestParser (or subclass) that should be used to extract - tests from the files. - """ - suite = unittest.TestSuite() - - # We do this here so that _normalize_module is called at the right - # level. If it were called in DocFileTest, then this function - # would be the caller and we might guess the package incorrectly. - if kw.get('module_relative', True): - kw['package'] = _normalize_module(kw.get('package')) - - for path in paths: - suite.addTest(DocFileTest(path, **kw)) - - return suite - -###################################################################### -## 9. Debugging Support -###################################################################### - -def script_from_examples(s): - r"""Extract script from text with examples. - - Converts text with examples to a Python script. Example input is - converted to regular code. Example output and all other words - are converted to comments: - - >>> text = ''' - ... Here are examples of simple math. - ... - ... Python has super accurate integer addition - ... - ... >>> 2 + 2 - ... 5 - ... - ... And very friendly error messages: - ... - ... >>> 1/0 - ... To Infinity - ... And - ... Beyond - ... - ... You can use logic if you want: - ... - ... >>> if 0: - ... ... blah - ... ... blah - ... ... - ... - ... Ho hum - ... ''' - - >>> print script_from_examples(text) - # Here are examples of simple math. - # - # Python has super accurate integer addition - # - 2 + 2 - # Expected: - ## 5 - # - # And very friendly error messages: - # - 1/0 - # Expected: - ## To Infinity - ## And - ## Beyond - # - # You can use logic if you want: - # - if 0: - blah - blah - # - # Ho hum - """ - output = [] - for piece in DocTestParser().parse(s): - if isinstance(piece, Example): - # Add the example's source code (strip trailing NL) - output.append(piece.source[:-1]) - # Add the expected output: - want = piece.want - if want: - output.append('# Expected:') - output += ['## '+l for l in want.split('\n')[:-1]] - else: - # Add non-example text. - output += [_comment_line(l) - for l in piece.split('\n')[:-1]] - - # Trim junk on both ends. - while output and output[-1] == '#': - output.pop() - while output and output[0] == '#': - output.pop(0) - # Combine the output, and return it. - return '\n'.join(output) - -def testsource(module, name): - """Extract the test sources from a doctest docstring as a script. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the doc string with tests to be debugged. - """ - module = _normalize_module(module) - tests = DocTestFinder().find(module) - test = [t for t in tests if t.name == name] - if not test: - raise ValueError(name, "not found in tests") - test = test[0] - testsrc = script_from_examples(test.docstring) - return testsrc - -def debug_src(src, pm=False, globs=None): - """Debug a single doctest docstring, in argument `src`'""" - testsrc = script_from_examples(src) - debug_script(testsrc, pm, globs) - -def debug_script(src, pm=False, globs=None): - "Debug a test script. `src` is the script, as a string." - import pdb - - # Note that tempfile.NameTemporaryFile() cannot be used. As the - # docs say, a file so created cannot be opened by name a second time - # on modern Windows boxes, and execfile() needs to open it. - srcfilename = tempfile.mktemp(".py", "doctestdebug") - f = open(srcfilename, 'w') - f.write(src) - f.close() - - try: - if globs: - globs = globs.copy() - else: - globs = {} - - if pm: - try: - execfile(srcfilename, globs, globs) - except: - print(sys.exc_info()[1]) - pdb.post_mortem(sys.exc_info()[2]) - else: - # Note that %r is vital here. '%s' instead can, e.g., cause - # backslashes to get treated as metacharacters on Windows. - pdb.run("execfile(%r)" % srcfilename, globs, globs) - - finally: - os.remove(srcfilename) - -def debug(module, name, pm=False): - """Debug a single doctest docstring. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the docstring with tests to be debugged. - """ - module = _normalize_module(module) - testsrc = testsource(module, name) - debug_script(testsrc, pm, module.__dict__) - -###################################################################### -## 10. Example Usage -###################################################################### -class _TestClass: - """ - A pointless class, for sanity-checking of docstring testing. - - Methods: - square() - get() - - >>> _TestClass(13).get() + _TestClass(-12).get() - 1 - >>> hex(_TestClass(13).square().get()) - '0xa9' - """ - - def __init__(self, val): - """val -> _TestClass object with associated value val. - - >>> t = _TestClass(123) - >>> print t.get() - 123 - """ - - self.val = val - - def square(self): - """square() -> square TestClass's associated value - - >>> _TestClass(13).square().get() - 169 - """ - - self.val = self.val ** 2 - return self - - def get(self): - """get() -> return TestClass's associated value. - - >>> x = _TestClass(-42) - >>> print x.get() - -42 - """ - - return self.val - -__test__ = {"_TestClass": _TestClass, - "string": r""" - Example of a string object, searched as-is. - >>> x = 1; y = 2 - >>> x + y, x * y - (3, 2) - """, - - "bool-int equivalence": r""" - In 2.2, boolean expressions displayed - 0 or 1. By default, we still accept - them. This can be disabled by passing - DONT_ACCEPT_TRUE_FOR_1 to the new - optionflags argument. - >>> 4 == 4 - 1 - >>> 4 == 4 - True - >>> 4 > 4 - 0 - >>> 4 > 4 - False - """, - - "blank lines": r""" - Blank lines can be marked with : - >>> print 'foo\n\nbar\n' - foo - - bar - - """, - - "ellipsis": r""" - If the ellipsis flag is used, then '...' can be used to - elide substrings in the desired output: - >>> print range(1000) #doctest: +ELLIPSIS - [0, 1, 2, ..., 999] - """, - - "whitespace normalization": r""" - If the whitespace normalization flag is used, then - differences in whitespace are ignored. - >>> print range(30) #doctest: +NORMALIZE_WHITESPACE - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29] - """, - } - -def _test(): - r = unittest.TextTestRunner() - r.run(DocTestSuite()) - -if __name__ == "__main__": - _test() - diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/environment.py b/libs/setuptools-2.2/build/lib/setuptools/tests/environment.py deleted file mode 100644 index 476d280..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/environment.py +++ /dev/null @@ -1,165 +0,0 @@ -import os -import zipfile -import sys -import tempfile -import unittest -import shutil -import stat -import unicodedata - -from subprocess import Popen as _Popen, PIPE as _PIPE - - -def _extract(self, member, path=None, pwd=None): - """for zipfile py2.5 borrowed from cpython""" - if not isinstance(member, zipfile.ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return _extract_member(self, member, path, pwd) - - -def _extract_from_zip(self, name, dest_path): - dest_file = open(dest_path, 'wb') - try: - dest_file.write(self.read(name)) - finally: - dest_file.close() - - -def _extract_member(self, member, targetpath, pwd): - """for zipfile py2.5 borrowed from cpython""" - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - _extract_from_zip(self, member.filename, targetpath) - - return targetpath - - -def _remove_dir(target): - - #on windows this seems to a problem - for dir_path, dirs, files in os.walk(target): - os.chmod(dir_path, stat.S_IWRITE) - for filename in files: - os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE) - shutil.rmtree(target) - - -class ZippedEnvironment(unittest.TestCase): - - datafile = None - dataname = None - old_cwd = None - - def setUp(self): - if self.datafile is None or self.dataname is None: - return - - if not os.path.isfile(self.datafile): - self.old_cwd = None - return - - self.old_cwd = os.getcwd() - - self.temp_dir = tempfile.mkdtemp() - zip_file, source, target = [None, None, None] - try: - zip_file = zipfile.ZipFile(self.datafile) - for files in zip_file.namelist(): - _extract(zip_file, files, self.temp_dir) - finally: - if zip_file: - zip_file.close() - del zip_file - - os.chdir(os.path.join(self.temp_dir, self.dataname)) - - def tearDown(self): - #Assume setUp was never completed - if self.dataname is None or self.datafile is None: - return - - try: - if self.old_cwd: - os.chdir(self.old_cwd) - _remove_dir(self.temp_dir) - except OSError: - #sigh? - pass - - -def _which_dirs(cmd): - result = set() - for path in os.environ.get('PATH', '').split(os.pathsep): - filename = os.path.join(path, cmd) - if os.access(filename, os.X_OK): - result.add(path) - return result - - -def run_setup_py(cmd, pypath=None, path=None, - data_stream=0, env=None): - """ - Execution command for tests, separate from those used by the - code directly to prevent accidental behavior issues - """ - if env is None: - env = dict() - for envname in os.environ: - env[envname] = os.environ[envname] - - #override the python path if needed - if pypath is not None: - env["PYTHONPATH"] = pypath - - #overide the execution path if needed - if path is not None: - env["PATH"] = path - if not env.get("PATH", ""): - env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip")) - env["PATH"] = os.pathsep.join(env["PATH"]) - - cmd = [sys.executable, "setup.py"] + list(cmd) - - #regarding the shell argument, see: http://bugs.python.org/issue8557 - try: - proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE, - shell=(sys.platform == 'win32'), env=env) - - data = proc.communicate()[data_stream] - except OSError: - return 1, '' - - #decode the console string if needed - if hasattr(data, "decode"): - data = data.decode() # should use the preffered encoding - data = unicodedata.normalize('NFC', data) - - #communciate calls wait() - return proc.returncode, data diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/py26compat.py b/libs/setuptools-2.2/build/lib/setuptools/tests/py26compat.py deleted file mode 100644 index d4fb891..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/py26compat.py +++ /dev/null @@ -1,14 +0,0 @@ -import unittest - -try: - # provide skipIf for Python 2.4-2.6 - skipIf = unittest.skipIf -except AttributeError: - def skipIf(condition, reason): - def skipper(func): - def skip(*args, **kwargs): - return - if condition: - return skip - return func - return skipper diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/script-with-bom.py b/libs/setuptools-2.2/build/lib/setuptools/tests/script-with-bom.py deleted file mode 100644 index 22dee0d..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/script-with-bom.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -result = 'passed' diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/server.py b/libs/setuptools-2.2/build/lib/setuptools/tests/server.py deleted file mode 100644 index ae2381e..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/server.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Basic http server for tests to simulate PyPI or custom indexes -""" -import sys -import time -import threading -from setuptools.compat import BaseHTTPRequestHandler -from setuptools.compat import (urllib2, URLError, HTTPServer, - SimpleHTTPRequestHandler) - -class IndexServer(HTTPServer): - """Basic single-threaded http server simulating a package index - - You can use this server in unittest like this:: - s = IndexServer() - s.start() - index_url = s.base_url() + 'mytestindex' - # do some test requests to the index - # The index files should be located in setuptools/tests/indexes - s.stop() - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=SimpleHTTPRequestHandler): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - self._run = True - - def serve(self): - while self._run: - self.handle_request() - - def start(self): - self.thread = threading.Thread(target=self.serve) - self.thread.start() - - def stop(self): - "Stop the server" - - # Let the server finish the last request and wait for a new one. - time.sleep(0.1) - - # self.shutdown is not supported on python < 2.6, so just - # set _run to false, and make a request, causing it to - # terminate. - self._run = False - url = 'http://127.0.0.1:%(server_port)s/' % vars(self) - try: - if sys.version_info >= (2, 6): - urllib2.urlopen(url, timeout=5) - else: - urllib2.urlopen(url) - except URLError: - # ignore any errors; all that's important is the request - pass - self.thread.join() - self.socket.close() - - def base_url(self): - port = self.server_port - return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port - -class RequestRecorder(BaseHTTPRequestHandler): - def do_GET(self): - requests = vars(self.server).setdefault('requests', []) - requests.append(self) - self.send_response(200, 'OK') - -class MockServer(HTTPServer, threading.Thread): - """ - A simple HTTP Server that records the requests made to it. - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=RequestRecorder): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - threading.Thread.__init__(self) - self.setDaemon(True) - self.requests = [] - - def run(self): - self.serve_forever() - - def url(self): - return 'http://localhost:%(server_port)s/' % vars(self) - url = property(url) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_bdist_egg.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_bdist_egg.py deleted file mode 100644 index 1a12218..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_bdist_egg.py +++ /dev/null @@ -1,69 +0,0 @@ -"""develop tests -""" -import sys -import os, re, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.bdist_egg import bdist_egg -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', py_modules=['hi']) -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - f = open('setup.py', 'w') - f.write(SETUP_PY) - f.close() - f = open('hi.py', 'w') - f.write('1\n') - f.close() - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_bdist_egg(self): - dist = Distribution(dict( - script_name='setup.py', - script_args=['bdist_egg'], - name='foo', - py_modules=['hi'] - )) - os.makedirs(os.path.join('build', 'src')) - old_stdout = sys.stdout - sys.stdout = o = StringIO() - try: - dist.parse_command_line() - dist.run_commands() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - [content] = os.listdir('dist') - self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) - -def test_suite(): - return unittest.makeSuite(TestDevelopTest) - diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_build_ext.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_build_ext.py deleted file mode 100644 index a520ced..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_build_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -"""build_ext tests -""" -import os, shutil, tempfile, unittest -from distutils.command.build_ext import build_ext as distutils_build_ext -from setuptools.command.build_ext import build_ext -from setuptools.dist import Distribution - -class TestBuildExtTest(unittest.TestCase): - - def test_get_ext_filename(self): - # setuptools needs to give back the same - # result than distutils, even if the fullname - # is not in ext_map - dist = Distribution() - cmd = build_ext(dist) - cmd.ext_map['foo/bar'] = '' - res = cmd.get_ext_filename('foo') - wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') - assert res == wanted - diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_develop.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_develop.py deleted file mode 100644 index 7b90161..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_develop.py +++ /dev/null @@ -1,122 +0,0 @@ -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.command.develop import develop -from setuptools.command import easy_install as easy_install_pkg -from setuptools.compat import StringIO -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['foo'], - use_2to3=True, -) -""" - -INIT_PY = """print "foo" -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'foo')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # foo/__init__.py - init = os.path.join(self.dir, 'foo', '__init__.py') - f = open(init, 'w') - f.write(INIT_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_develop(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - dist = Distribution( - dict(name='foo', - packages=['foo'], - use_2to3=True, - version='0.0', - )) - dist.script_name = 'setup.py' - cmd = develop(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - #sys.stdout = StringIO() - try: - cmd.run() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - content = os.listdir(site.USER_SITE) - content.sort() - self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) - - # Check that we are using the right code. - egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') - try: - path = egg_link_file.read().split()[0].strip() - finally: - egg_link_file.close() - init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt') - try: - init = init_file.read().strip() - finally: - init_file.close() - if sys.version < "3": - self.assertEqual(init, 'print "foo"') - else: - self.assertEqual(init, 'print("foo")') - - def notest_develop_with_setup_requires(self): - - wanted = ("Could not find suitable distribution for " - "Requirement.parse('I-DONT-EXIST')") - old_dir = os.getcwd() - os.chdir(self.dir) - try: - try: - dist = Distribution({'setup_requires': ['I_DONT_EXIST']}) - except DistutilsError: - e = sys.exc_info()[1] - error = str(e) - if error == wanted: - pass - finally: - os.chdir(old_dir) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_dist_info.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_dist_info.py deleted file mode 100644 index a8adb68..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_dist_info.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Test .dist-info style distributions. -""" -import os -import shutil -import tempfile -import unittest -import textwrap - -try: - import ast -except: - pass - -import pkg_resources - -from setuptools.tests.py26compat import skipIf - -def DALS(s): - "dedent and left-strip" - return textwrap.dedent(s).lstrip() - -class TestDistInfo(unittest.TestCase): - - def test_distinfo(self): - dists = {} - for d in pkg_resources.find_distributions(self.tmpdir): - dists[d.project_name] = d - - assert len(dists) == 2, dists - - unversioned = dists['UnversionedDistribution'] - versioned = dists['VersionedDistribution'] - - assert versioned.version == '2.718' # from filename - assert unversioned.version == '0.3' # from METADATA - - @skipIf('ast' not in globals(), - "ast is used to test conditional dependencies (Python >= 2.6)") - def test_conditional_dependencies(self): - requires = [pkg_resources.Requirement.parse('splort==4'), - pkg_resources.Requirement.parse('quux>=1.1')] - - for d in pkg_resources.find_distributions(self.tmpdir): - self.assertEqual(d.requires(), requires[:1]) - self.assertEqual(d.requires(extras=('baz',)), requires) - self.assertEqual(d.extras, ['baz']) - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - versioned = os.path.join(self.tmpdir, - 'VersionedDistribution-2.718.dist-info') - os.mkdir(versioned) - metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: VersionedDistribution - Requires-Dist: splort (4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - unversioned = os.path.join(self.tmpdir, - 'UnversionedDistribution.dist-info') - os.mkdir(unversioned) - metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: UnversionedDistribution - Version: 0.3 - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - - def tearDown(self): - shutil.rmtree(self.tmpdir) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_easy_install.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_easy_install.py deleted file mode 100644 index d2cc7a0..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_easy_install.py +++ /dev/null @@ -1,456 +0,0 @@ -"""Easy install Tests -""" -import sys -import os -import shutil -import tempfile -import unittest -import site -import contextlib -import textwrap -import tarfile -import logging -import distutils.core - -from setuptools.compat import StringIO, BytesIO, next, urlparse -from setuptools.sandbox import run_setup, SandboxViolation -from setuptools.command.easy_install import ( - easy_install, fix_jython_executable, get_script_args, nt_quote_arg) -from setuptools.command.easy_install import PthDistributions -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution -from pkg_resources import working_set, VersionConflict -from pkg_resources import Distribution as PRDistribution -import setuptools.tests.server -import pkg_resources - -class FakeDist(object): - def get_entry_map(self, group): - if group != 'console_scripts': - return {} - return {'name': 'ep'} - - def as_requirement(self): - return 'spec' - -WANTED = """\ -#!%s -# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' -__requires__ = 'spec' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) -""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestEasyInstallTest(unittest.TestCase): - - def test_install_site_py(self): - dist = Distribution() - cmd = easy_install(dist) - cmd.sitepy_installed = False - cmd.install_dir = tempfile.mkdtemp() - try: - cmd.install_site_py() - sitepy = os.path.join(cmd.install_dir, 'site.py') - self.assertTrue(os.path.exists(sitepy)) - finally: - shutil.rmtree(cmd.install_dir) - - def test_get_script_args(self): - dist = FakeDist() - - old_platform = sys.platform - try: - name, script = [i for i in next(get_script_args(dist))][0:2] - finally: - sys.platform = old_platform - - self.assertEqual(script, WANTED) - - def test_no_find_links(self): - # new option '--no-find-links', that blocks find-links added at - # the project level - dist = Distribution() - cmd = easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.no_find_links = True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - self.assertEqual(cmd.package_index.scanned_urls, {}) - - # let's try without it (default behavior) - cmd = easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - keys = sorted(cmd.package_index.scanned_urls.keys()) - self.assertEqual(keys, ['link1', 'link2']) - - -class TestPTHFileWriter(unittest.TestCase): - def test_add_from_cwd_site_sets_dirty(self): - '''a pth file manager should set dirty - if a distribution is in site but also the cwd - ''' - pth = PthDistributions('does-not_exist', [os.getcwd()]) - self.assertTrue(not pth.dirty) - pth.add(PRDistribution(os.getcwd())) - self.assertTrue(pth.dirty) - - def test_add_from_site_is_ignored(self): - if os.name != 'nt': - location = '/test/location/does-not-have-to-exist' - else: - location = 'c:\\does_not_exist' - pth = PthDistributions('does-not_exist', [location, ]) - self.assertTrue(not pth.dirty) - pth.add(PRDistribution(location)) - self.assertTrue(not pth.dirty) - - -class TestUserInstallTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.old_enable_site = site.ENABLE_USER_SITE - self.old_file = easy_install_pkg.__file__ - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - easy_install_pkg.__file__ = site.USER_SITE - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - site.ENABLE_USER_SITE = self.old_enable_site - easy_install_pkg.__file__ = self.old_file - - def test_user_install_implied(self): - site.ENABLE_USER_SITE = True # disabled sometimes - #XXX: replace with something meaningfull - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.ensure_finalized() - self.assertTrue(cmd.user, 'user should be implied') - - def test_multiproc_atexit(self): - try: - __import__('multiprocessing') - except ImportError: - # skip the test if multiprocessing is not available - return - - log = logging.getLogger('test_easy_install') - logging.basicConfig(level=logging.INFO, stream=sys.stderr) - log.info('this should not break') - - def test_user_install_not_implied_without_usersite_enabled(self): - site.ENABLE_USER_SITE = False # usually enabled - #XXX: replace with something meaningfull - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.initialize_options() - self.assertFalse(cmd.user, 'NOT user should be implied') - - def test_local_index(self): - # make sure the local index is used - # when easy_install looks for installed - # packages - new_location = tempfile.mkdtemp() - target = tempfile.mkdtemp() - egg_file = os.path.join(new_location, 'foo-1.0.egg-info') - f = open(egg_file, 'w') - try: - f.write('Name: foo\n') - finally: - f.close() - - sys.path.append(target) - old_ppath = os.environ.get('PYTHONPATH') - os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path) - try: - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.install_dir = target - cmd.args = ['foo'] - cmd.ensure_finalized() - cmd.local_index.scan([new_location]) - res = cmd.easy_install('foo') - self.assertEqual(os.path.realpath(res.location), - os.path.realpath(new_location)) - finally: - sys.path.remove(target) - for basedir in [new_location, target, ]: - if not os.path.exists(basedir) or not os.path.isdir(basedir): - continue - try: - shutil.rmtree(basedir) - except: - pass - if old_ppath is not None: - os.environ['PYTHONPATH'] = old_ppath - else: - del os.environ['PYTHONPATH'] - - def test_setup_requires(self): - """Regression test for Distribute issue #318 - - Ensure that a package with setup_requires can be installed when - setuptools is installed in the user site-packages without causing a - SandboxViolation. - """ - - test_pkg = create_setup_requires_package(self.dir) - test_setup_py = os.path.join(test_pkg, 'setup.py') - - try: - with quiet_context(): - with reset_setup_stop_context(): - run_setup(test_setup_py, ['install']) - except SandboxViolation: - self.fail('Installation caused SandboxViolation') - - -class TestSetupRequires(unittest.TestCase): - - def test_setup_requires_honors_fetch_params(self): - """ - When easy_install installs a source distribution which specifies - setup_requires, it should honor the fetch parameters (such as - allow-hosts, index-url, and find-links). - """ - # set up a server which will simulate an alternate package index. - p_index = setuptools.tests.server.MockServer() - p_index.start() - netloc = 1 - p_index_loc = urlparse(p_index.url)[netloc] - if p_index_loc.endswith(':0'): - # Some platforms (Jython) don't find a port to which to bind, - # so skip this test for them. - return - with quiet_context(): - # create an sdist that has a build-time dependency. - with TestSetupRequires.create_sdist() as dist_file: - with tempdir_context() as temp_install_dir: - with environment_context(PYTHONPATH=temp_install_dir): - ei_params = ['--index-url', p_index.url, - '--allow-hosts', p_index_loc, - '--exclude-scripts', '--install-dir', temp_install_dir, - dist_file] - with reset_setup_stop_context(): - with argv_context(['easy_install']): - # attempt to install the dist. It should fail because - # it doesn't exist. - self.assertRaises(SystemExit, - easy_install_pkg.main, ei_params) - # there should have been two or three requests to the server - # (three happens on Python 3.3a) - self.assertTrue(2 <= len(p_index.requests) <= 3) - self.assertEqual(p_index.requests[0].path, '/does-not-exist/') - - @staticmethod - @contextlib.contextmanager - def create_sdist(): - """ - Return an sdist with a setup_requires dependency (of something that - doesn't exist) - """ - with tempdir_context() as dir: - dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - make_trivial_sdist( - dist_path, - textwrap.dedent(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """).lstrip()) - yield dist_path - - def test_setup_requires_overrides_version_conflict(self): - """ - Regression test for issue #323. - - Ensures that a distribution's setup_requires requirements can still be - installed and used locally even if a conflicting version of that - requirement is already on the path. - """ - - pr_state = pkg_resources.__getstate__() - fake_dist = PRDistribution('does-not-matter', project_name='foobar', - version='0.0') - working_set.add(fake_dist) - - try: - with tempdir_context() as temp_dir: - test_pkg = create_setup_requires_package(temp_dir) - test_setup_py = os.path.join(test_pkg, 'setup.py') - with quiet_context() as (stdout, stderr): - with reset_setup_stop_context(): - try: - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, ['--name']) - except VersionConflict: - self.fail('Installing setup.py requirements ' - 'caused a VersionConflict') - - lines = stdout.readlines() - self.assertTrue(len(lines) > 0) - self.assertTrue(lines[-1].strip(), 'test_pkg') - finally: - pkg_resources.__setstate__(pr_state) - - -def create_setup_requires_package(path): - """Creates a source tree under path for a trivial test package that has a - single requirement in setup_requires--a tarball for that requirement is - also created and added to the dependency_links argument. - """ - - test_setup_attrs = { - 'name': 'test_pkg', 'version': '0.0', - 'setup_requires': ['foobar==0.1'], - 'dependency_links': [os.path.abspath(path)] - } - - test_pkg = os.path.join(path, 'test_pkg') - test_setup_py = os.path.join(test_pkg, 'setup.py') - os.mkdir(test_pkg) - - f = open(test_setup_py, 'w') - f.write(textwrap.dedent("""\ - import setuptools - setuptools.setup(**%r) - """ % test_setup_attrs)) - f.close() - - foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') - make_trivial_sdist( - foobar_path, - textwrap.dedent("""\ - import setuptools - setuptools.setup( - name='foobar', - version='0.1' - ) - """)) - - return test_pkg - - -def make_trivial_sdist(dist_path, setup_py): - """Create a simple sdist tarball at dist_path, containing just a - setup.py, the contents of which are provided by the setup_py string. - """ - - setup_py_file = tarfile.TarInfo(name='setup.py') - try: - # Python 3 (StringIO gets converted to io module) - MemFile = BytesIO - except AttributeError: - MemFile = StringIO - setup_py_bytes = MemFile(setup_py.encode('utf-8')) - setup_py_file.size = len(setup_py_bytes.getvalue()) - dist = tarfile.open(dist_path, 'w:gz') - try: - dist.addfile(setup_py_file, fileobj=setup_py_bytes) - finally: - dist.close() - - -@contextlib.contextmanager -def tempdir_context(cd=lambda dir:None): - temp_dir = tempfile.mkdtemp() - orig_dir = os.getcwd() - try: - cd(temp_dir) - yield temp_dir - finally: - cd(orig_dir) - shutil.rmtree(temp_dir) - -@contextlib.contextmanager -def environment_context(**updates): - old_env = os.environ.copy() - os.environ.update(updates) - try: - yield - finally: - for key in updates: - del os.environ[key] - os.environ.update(old_env) - -@contextlib.contextmanager -def argv_context(repl): - old_argv = sys.argv[:] - sys.argv[:] = repl - yield - sys.argv[:] = old_argv - -@contextlib.contextmanager -def reset_setup_stop_context(): - """ - When the setuptools tests are run using setup.py test, and then - one wants to invoke another setup() command (such as easy_install) - within those tests, it's necessary to reset the global variable - in distutils.core so that the setup() command will run naturally. - """ - setup_stop_after = distutils.core._setup_stop_after - distutils.core._setup_stop_after = None - yield - distutils.core._setup_stop_after = setup_stop_after - - -@contextlib.contextmanager -def quiet_context(): - """ - Redirect stdout/stderr to StringIO objects to prevent console output from - distutils commands. - """ - - old_stdout = sys.stdout - old_stderr = sys.stderr - new_stdout = sys.stdout = StringIO() - new_stderr = sys.stderr = StringIO() - try: - yield new_stdout, new_stderr - finally: - new_stdout.seek(0) - new_stderr.seek(0) - sys.stdout = old_stdout - sys.stderr = old_stderr diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_egg_info.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_egg_info.py deleted file mode 100644 index 2785436..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_egg_info.py +++ /dev/null @@ -1,173 +0,0 @@ - -import os -import sys -import tempfile -import shutil -import unittest - -import pkg_resources -import warnings -from setuptools.command import egg_info -from setuptools import svn_utils -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf - -ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') -"An entries file generated with svn 1.6.17 against the legacy Setuptools repo" - - -class TestEggInfo(unittest.TestCase): - - def setUp(self): - self.test_dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.test_dir, '.svn')) - - self.old_cwd = os.getcwd() - os.chdir(self.test_dir) - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.test_dir) - - def _write_entries(self, entries): - fn = os.path.join(self.test_dir, '.svn', 'entries') - entries_f = open(fn, 'wb') - entries_f.write(entries) - entries_f.close() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_version_10_format(self): - """ - """ - #keeping this set for 1.6 is a good check on the get_svn_revision - #to ensure I return using svnversion what would had been returned - version_str = svn_utils.SvnInfo.get_svn_version() - version = [int(x) for x in version_str.split('.')[:2]] - if version != [1, 6]: - if hasattr(self, 'skipTest'): - self.skipTest('') - else: - sys.stderr.write('\n Skipping due to SVN Version\n') - return - - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - self.assertEqual(rev, '89000') - - def test_version_10_format_legacy_parser(self): - """ - """ - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env - - if path_variable: - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - #catch_warnings not available until py26 - warning_filters = warnings.filters - warnings.filters = warning_filters[:] - try: - warnings.simplefilter("ignore", DeprecationWarning) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - finally: - #restore the warning filters - warnings.filters = warning_filters - #restore the os path - if path_variable: - os.environ[path_variable] = old_path - - self.assertEqual(rev, '89000') - -DUMMY_SOURCE_TXT = """CHANGES.txt -CONTRIBUTORS.txt -HISTORY.txt -LICENSE -MANIFEST.in -README.txt -setup.py -dummy/__init__.py -dummy/test.txt -dummy.egg-info/PKG-INFO -dummy.egg-info/SOURCES.txt -dummy.egg-info/dependency_links.txt -dummy.egg-info/top_level.txt""" - - -class TestSvnDummy(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return None - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummy, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -class TestSvnDummyLegacy(environment.ZippedEnvironment): - - def setUp(self): - self.base_version = (1, 6) - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummyLegacy, self).setUp() - - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - path="", - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_markerlib.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_markerlib.py deleted file mode 100644 index dae71cb..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_markerlib.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import unittest -from setuptools.tests.py26compat import skipIf - -try: - import ast -except ImportError: - pass - -class TestMarkerlib(unittest.TestCase): - - @skipIf('ast' not in globals(), - "ast not available (Python < 2.6?)") - def test_markers(self): - from _markerlib import interpret, default_environment, compile - - os_name = os.name - - self.assertTrue(interpret("")) - - self.assertTrue(interpret("os.name != 'buuuu'")) - self.assertTrue(interpret("os_name != 'buuuu'")) - self.assertTrue(interpret("python_version > '1.0'")) - self.assertTrue(interpret("python_version < '5.0'")) - self.assertTrue(interpret("python_version <= '5.0'")) - self.assertTrue(interpret("python_version >= '1.0'")) - self.assertTrue(interpret("'%s' in os.name" % os_name)) - self.assertTrue(interpret("'%s' in os_name" % os_name)) - self.assertTrue(interpret("'buuuu' not in os.name")) - - self.assertFalse(interpret("os.name == 'buuuu'")) - self.assertFalse(interpret("os_name == 'buuuu'")) - self.assertFalse(interpret("python_version < '1.0'")) - self.assertFalse(interpret("python_version > '5.0'")) - self.assertFalse(interpret("python_version >= '5.0'")) - self.assertFalse(interpret("python_version <= '1.0'")) - self.assertFalse(interpret("'%s' not in os.name" % os_name)) - self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) - self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) - - environment = default_environment() - environment['extra'] = 'test' - self.assertTrue(interpret("extra == 'test'", environment)) - self.assertFalse(interpret("extra == 'doc'", environment)) - - def raises_nameError(): - try: - interpret("python.version == '42'") - except NameError: - pass - else: - raise Exception("Expected NameError") - - raises_nameError() - - def raises_syntaxError(): - try: - interpret("(x for x in (4,))") - except SyntaxError: - pass - else: - raise Exception("Expected SyntaxError") - - raises_syntaxError() - - statement = "python_version == '5'" - self.assertEqual(compile(statement).__doc__, statement) - diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_packageindex.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_packageindex.py deleted file mode 100644 index 664566a..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_packageindex.py +++ /dev/null @@ -1,203 +0,0 @@ -"""Package Index Tests -""" -import sys -import os -import unittest -import pkg_resources -from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url -import distutils.errors -import setuptools.package_index -from setuptools.tests.server import IndexServer - -class TestPackageIndex(unittest.TestCase): - - def test_bad_url_bad_port(self): - index = setuptools.package_index.PackageIndex() - url = 'http://127.0.0.1:0/nonesuch/test_package_index' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) - else: - self.assertTrue(isinstance(v, HTTPError)) - - def test_bad_url_typo(self): - # issue 16 - # easy_install inquant.contentmirror.plone breaks because of a typo - # in its home URL - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) - else: - self.assertTrue(isinstance(v, HTTPError)) - - def test_bad_url_bad_status_line(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - def _urlopen(*args): - raise httplib.BadStatusLine('line') - - index.opener = _urlopen - url = 'http://example.com' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue('line' in str(v)) - else: - raise AssertionError('Should have raise here!') - - def test_bad_url_double_scheme(self): - """ - A bad URL with a double scheme should raise a DistutilsError. - """ - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue 20 - url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' - try: - index.open_url(url) - except distutils.errors.DistutilsError: - error = sys.exc_info()[1] - msg = unicode(error) - assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg - return - raise RuntimeError("Did not raise") - - def test_bad_url_screwy_href(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue #160 - if sys.version_info[0] == 2 and sys.version_info[1] == 7: - # this should not fail - url = 'http://example.com' - page = ('') - index.process_index(url, page) - - def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - url = 'file:///tmp/test_package_index' - self.assertTrue(index.url_ok(url, True)) - - def test_links_priority(self): - """ - Download links from the pypi simple index should be used before - external download links. - https://bitbucket.org/tarek/distribute/issue/163 - - Usecase : - - someone uploads a package on pypi, a md5 is generated - - someone manually copies this link (with the md5 in the url) onto an - external page accessible from the package page. - - someone reuploads the package (with a different md5) - - while easy_installing, an MD5 error occurs because the external link - is used - -> Setuptools should use the link from pypi, not the external one. - """ - if sys.platform.startswith('java'): - # Skip this test on jython because binding to :0 fails - return - - # start an index server - server = IndexServer() - server.start() - index_url = server.base_url() + 'test_links_priority/simple/' - - # scan a test index - pi = setuptools.package_index.PackageIndex(index_url) - requirement = pkg_resources.Requirement.parse('foobar') - pi.find_packages(requirement) - server.stop() - - # the distribution has been found - self.assertTrue('foobar' in pi) - # we have only one link, because links are compared without md5 - self.assertTrue(len(pi['foobar'])==1) - # the link should be from the index - self.assertTrue('correct_md5' in pi['foobar'][0].location) - - def test_parse_bdist_wininst(self): - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) - - def test__vcs_split_rev_from_url(self): - """ - Test the basic usage of _vcs_split_rev_from_url - """ - vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url - url, rev = vsrfu('https://example.com/bar@2995') - self.assertEqual(url, 'https://example.com/bar') - self.assertEqual(rev, '2995') - - def test_local_index(self): - """ - local_open should be able to read an index from the file system. - """ - f = open('index.html', 'w') - f.write('
content
') - f.close() - try: - url = 'file:' + pathname2url(os.getcwd()) + '/' - res = setuptools.package_index.local_open(url) - finally: - os.remove('index.html') - assert 'content' in res.read() - - -class TestContentCheckers(unittest.TestCase): - - def test_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - checker.feed('You should probably not be using MD5'.encode('ascii')) - self.assertEqual(checker.hash.hexdigest(), - 'f12895fdffbd45007040d2e44df98478') - self.assertTrue(checker.is_valid()) - - def test_other_fragment(self): - "Content checks should succeed silently if no hash is present" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#something%20completely%20different') - checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) - - def test_blank_md5(self): - "Content checks should succeed if a hash is empty" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=') - checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) - - def test_get_hash_name_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - self.assertEqual(checker.hash_name, 'md5') - - def test_report(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - rep = checker.report(lambda x: x, 'My message about %s') - self.assertEqual(rep, 'My message about md5') diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_resources.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_resources.py deleted file mode 100644 index c9fcf76..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_resources.py +++ /dev/null @@ -1,620 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove - -import os -import sys -import tempfile -import shutil -from unittest import TestCase - -import pkg_resources -from pkg_resources import (parse_requirements, VersionConflict, parse_version, - Distribution, EntryPoint, Requirement, safe_version, safe_name, - WorkingSet) - -from setuptools.command.easy_install import (get_script_header, is_sh, - nt_quote_arg) -from setuptools.compat import StringIO, iteritems - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset - -def safe_repr(obj, short=False): - """ copied from Python2.7""" - try: - result = repr(obj) - except Exception: - result = object.__repr__(obj) - if not short or len(result) < pkg_resources._MAX_LENGTH: - return result - return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...' - -class Metadata(pkg_resources.EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self,*pairs): - self.metadata = dict(pairs) - - def has_metadata(self,name): - return name in self.metadata - - def get_metadata(self,name): - return self.metadata[name] - - def get_metadata_lines(self,name): - return pkg_resources.yield_lines(self.get_metadata(name)) - -dist_from_fn = pkg_resources.Distribution.from_filename - -class DistroTests(TestCase): - - def testCollection(self): - # empty path should produce no distributions - ad = pkg_resources.Environment([], platform=None, python=None) - self.assertEqual(list(ad), []) - self.assertEqual(ad['FooPkg'],[]) - ad.add(dist_from_fn("FooPkg-1.3_1.egg")) - ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) - ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - self.assertTrue(ad['FooPkg']) - # But only 1 package - self.assertEqual(list(ad), ['foopkg']) - - # Distributions sort by version - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] - ) - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] - ) - # And inserting adds them in order - ad.add(dist_from_fn("FooPkg-1.9.egg")) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] - ) - - ws = WorkingSet([]) - foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") - foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") - req, = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - self.assertEqual(ad.best_match(req,ws).version, '1.9') - # If a matching distro is already installed, should return only that - ws.add(foo14) - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]) - ws.add(foo12) - ws.add(foo14) - self.assertRaises(VersionConflict, ad.best_match, req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]) - ws.add(foo14) - ws.add(foo12) - ws.add(foo14) - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - def checkFooPkg(self,d): - self.assertEqual(d.project_name, "FooPkg") - self.assertEqual(d.key, "foopkg") - self.assertEqual(d.version, "1.3-1") - self.assertEqual(d.py_version, "2.4") - self.assertEqual(d.platform, "win32") - self.assertEqual(d.parsed_version, parse_version("1.3-1")) - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - self.assertEqual(d.py_version, sys.version[:3]) - self.assertEqual(d.platform, None) - - def testDistroParse(self): - d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg") - self.checkFooPkg(d) - d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", - metadata = Metadata( - ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") - ) - ) - self.checkFooPkg(d) - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - self.assertEqual( - list(dist.requires(extras)), - list(parse_requirements(txt)) - ) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - def testResolve(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - self.assertEqual(list(ws.resolve([],ad)), []) - # Request something not in the collection -> DistributionNotFound - self.assertRaises( - pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad - ) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) - ) - ad.add(Foo) - ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - self.assertEqual(targets, [Foo]) - list(map(ws.add,targets)) - self.assertRaises(VersionConflict, ws.resolve, - parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - self.assertRaises( - pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad - ) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - self.assertEqual( - list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] - ) - # Requests for conflicting versions produce VersionConflict - self.assertRaises(VersionConflict, - ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad) - - def testDistroDependsOptions(self): - d = self.distRequires(""" - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""") - self.checkRequires(d,"Twisted>=1.5") - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen","fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"] - ) - self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"]) - - -class EntryPointTests(TestCase): - - def assertfields(self, ep): - self.assertEqual(ep.name,"foo") - self.assertEqual(ep.module_name,"setuptools.tests.test_resources") - self.assertEqual(ep.attrs, ("EntryPointTests",)) - self.assertEqual(ep.extras, ("x",)) - self.assertTrue(ep.load() is EntryPointTests) - self.assertEqual( - str(ep), - "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ) - - def setUp(self): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) - - def testBasics(self): - ep = EntryPoint( - "foo", "setuptools.tests.test_resources", ["EntryPointTests"], - ["x"], self.dist - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - self.assertEqual(ep.name,"bar baz") - self.assertEqual(ep.module_name,"spammity") - self.assertEqual(ep.attrs, ()) - self.assertEqual(ep.extras, ("ping",)) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - self.assertEqual(ep.name,"fizzly") - self.assertEqual(ep.module_name,"wocka") - self.assertEqual(ep.attrs, ("foo",)) - self.assertEqual(ep.extras, ()) - - def testRejects(self): - for ep in [ - "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", - ]: - try: EntryPoint.parse(ep) - except ValueError: pass - else: raise AssertionError("Should've been bad", ep) - - def checkSubMap(self, m): - self.assertEqual(len(m), len(self.submap_expect)) - for key, ep in iteritems(self.submap_expect): - self.assertEqual(repr(m.get(key)), repr(ep)) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), - feature3=EntryPoint('feature3', 'this.module', extras=['something']) - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") - self.assertRaises(ValueError, EntryPoint.parse_group, "x", - ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz':self.submap_str}) - self.checkSubMap(m['xyz']) - self.assertEqual(list(m.keys()),['xyz']) - m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) - self.checkSubMap(m['xyz']) - self.assertEqual(list(m.keys()),['xyz']) - self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) - self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) - -class RequirementsTests(TestCase): - - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - self.assertEqual(str(r),"Twisted>=1.2") - self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") - self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) - self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) - self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) - - def testOrdering(self): - r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) - r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) - self.assertEqual(r1,r2) - self.assertEqual(str(r1),str(r2)) - self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") - - def testBasicContains(self): - r = Requirement("Twisted", [('>=','1.2')], ()) - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - self.assertTrue(parse_version('1.2') in r) - self.assertTrue(parse_version('1.1') not in r) - self.assertTrue('1.2' in r) - self.assertTrue('1.1' not in r) - self.assertTrue(foo_dist not in r) - self.assertTrue(twist11 not in r) - self.assertTrue(twist12 in r) - - def testAdvancedContains(self): - r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") - for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): - self.assertTrue(v in r, (v,r)) - for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): - self.assertTrue(v not in r, (v,r)) - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") - self.assertEqual(r1,r2) - self.assertEqual(r1,r3) - self.assertEqual(r1.extras, ("foo","bar")) - self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized - self.assertEqual(hash(r1), hash(r2)) - self.assertEqual( - hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), - frozenset(["foo","bar"]))) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("foo==0.3a2") - r2 = Requirement.parse("foo!=0.3a4") - d = Distribution.from_filename - - self.assertTrue(d("foo-0.3a4.egg") not in r1) - self.assertTrue(d("foo-0.3a1.egg") not in r1) - self.assertTrue(d("foo-0.3a4.egg") not in r2) - - self.assertTrue(d("foo-0.3a2.egg") in r1) - self.assertTrue(d("foo-0.3a2.egg") in r2) - self.assertTrue(d("foo-0.3a3.egg") in r2) - self.assertTrue(d("foo-0.3a5.egg") in r2) - - def testSetuptoolsProjectName(self): - """ - The setuptools project should implement the setuptools package. - """ - - self.assertEqual( - Requirement.parse('setuptools').project_name, 'setuptools') - # setuptools 0.7 and higher means setuptools. - self.assertEqual( - Requirement.parse('setuptools == 0.7').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') - - -class ParseTests(TestCase): - - def testEmptyParse(self): - self.assertEqual(list(parse_requirements('')), []) - - def testYielding(self): - for inp,out in [ - ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), - (['x\n\n','y'], ['x','y']), - ]: - self.assertEqual(list(pkg_resources.yield_lines(inp)),out) - - def testSplitting(self): - sample = """ - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - self.assertEqual(list(pkg_resources.split_sections(sample)), - [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] - ) - self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) - - def testSafeName(self): - self.assertEqual(safe_name("adns-python"), "adns-python") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") - self.assertNotEqual(safe_name("peak.web"), "peak-web") - - def testSafeVersion(self): - self.assertEqual(safe_version("1.2-1"), "1.2-1") - self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") - self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") - self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") - self.assertEqual(safe_version("peak.web"), "peak.web") - - def testSimpleRequirements(self): - self.assertEqual( - list(parse_requirements('Twis-Ted>=1.2-1')), - [Requirement('Twis-Ted',[('>=','1.2-1')], ())] - ) - self.assertEqual( - list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), - [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] - ) - self.assertEqual( - Requirement.parse("FooBar==1.99a3"), - Requirement("FooBar", [('==','1.99a3')], ()) - ) - self.assertRaises(ValueError,Requirement.parse,">=2.3") - self.assertRaises(ValueError,Requirement.parse,"x\\") - self.assertRaises(ValueError,Requirement.parse,"x==2 q") - self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") - self.assertRaises(ValueError,Requirement.parse,"#") - - def testVersionEquality(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertEqual(p1,p2, (s1,s2,p1,p2)) - - c('1.2-rc1', '1.2rc1') - c('0.4', '0.4.0') - c('0.4.0.0', '0.4.0') - c('0.4.0-0', '0.4-0') - c('0pl1', '0.0pl1') - c('0pre1', '0.0c1') - c('0.0.0preview1', '0c1') - c('0.0c1', '0-rc1') - c('1.2a1', '1.2.a.1') - c('1.2...a', '1.2a') - - def testVersionOrdering(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertTrue(p1= (3,) and os.environ.get("LC_CTYPE") - in (None, "C", "POSIX")): - return - - class java: - class lang: - class System: - @staticmethod - def getProperty(property): - return "" - sys.modules["java"] = java - - platform = sys.platform - sys.platform = 'java1.5.0_13' - stdout, stderr = sys.stdout, sys.stderr - try: - # A mock sys.executable that uses a shebang line (this file) - exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py') - self.assertEqual( - get_script_header('#!/usr/local/bin/python', executable=exe), - '#!/usr/bin/env %s\n' % exe) - - # Ensure we generate what is basically a broken shebang line - # when there's options, with a warning emitted - sys.stdout = sys.stderr = StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python -x', - executable=exe), - '#!%s -x\n' % exe) - self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) - sys.stdout = sys.stderr = StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python', - executable=self.non_ascii_exe), - '#!%s -x\n' % self.non_ascii_exe) - self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) - finally: - del sys.modules["java"] - sys.platform = platform - sys.stdout, sys.stderr = stdout, stderr - - -class NamespaceTests(TestCase): - - def setUp(self): - self._ns_pkgs = pkg_resources._namespace_packages.copy() - self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-") - os.makedirs(os.path.join(self._tmpdir, "site-pkgs")) - self._prev_sys_path = sys.path[:] - sys.path.append(os.path.join(self._tmpdir, "site-pkgs")) - - def tearDown(self): - shutil.rmtree(self._tmpdir) - pkg_resources._namespace_packages = self._ns_pkgs.copy() - sys.path = self._prev_sys_path[:] - - def _assertIn(self, member, container): - """ assertIn and assertTrue does not exist in Python2.3""" - if member not in container: - standardMsg = '%s not found in %s' % (safe_repr(member), - safe_repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) - - def test_two_levels_deep(self): - """ - Test nested namespace packages - Create namespace packages in the following tree : - site-packages-1/pkg1/pkg2 - site-packages-2/pkg1/pkg2 - Check both are in the _namespace_packages dict and that their __path__ - is correct - """ - sys.path.append(os.path.join(self._tmpdir, "site-pkgs2")) - os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2")) - os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")) - ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" - for site in ["site-pkgs", "site-pkgs2"]: - pkg1_init = open(os.path.join(self._tmpdir, site, - "pkg1", "__init__.py"), "w") - pkg1_init.write(ns_str) - pkg1_init.close() - pkg2_init = open(os.path.join(self._tmpdir, site, - "pkg1", "pkg2", "__init__.py"), "w") - pkg2_init.write(ns_str) - pkg2_init.close() - import pkg1 - self._assertIn("pkg1", pkg_resources._namespace_packages.keys()) - try: - import pkg1.pkg2 - except ImportError: - self.fail("Setuptools tried to import the parent namespace package") - # check the _namespace_packages dict - self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys()) - self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"]) - # check the __path__ attribute contains both paths - self.assertEqual(pkg1.pkg2.__path__, [ - os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"), - os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")]) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_sandbox.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_sandbox.py deleted file mode 100644 index 3dad137..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_sandbox.py +++ /dev/null @@ -1,79 +0,0 @@ -"""develop tests -""" -import sys -import os -import shutil -import unittest -import tempfile -import types - -import pkg_resources -import setuptools.sandbox -from setuptools.sandbox import DirectorySandbox, SandboxViolation - -def has_win32com(): - """ - Run this to determine if the local machine has win32com, and if it - does, include additional tests. - """ - if not sys.platform.startswith('win32'): - return False - try: - mod = __import__('win32com') - except ImportError: - return False - return True - -class TestSandbox(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.dir) - - def test_devnull(self): - if sys.version < '2.4': - return - sandbox = DirectorySandbox(self.dir) - sandbox.run(self._file_writer(os.devnull)) - - def _file_writer(path): - def do_write(): - f = open(path, 'w') - f.write('xxx') - f.close() - return do_write - - _file_writer = staticmethod(_file_writer) - - if has_win32com(): - def test_win32com(self): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - import win32com - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(self.dir) - try: - try: - sandbox.run(self._file_writer(target)) - except SandboxViolation: - self.fail("Could not create gen_py file due to SandboxViolation") - finally: - if os.path.exists(target): os.remove(target) - - def test_setup_py_with_BOM(self): - """ - It should be possible to execute a setup.py with a Byte Order Mark - """ - target = pkg_resources.resource_filename(__name__, - 'script-with-bom.py') - namespace = types.ModuleType('namespace') - setuptools.sandbox.execfile(target, vars(namespace)) - assert namespace.result == 'passed' - -if __name__ == '__main__': - unittest.main() diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_sdist.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_sdist.py deleted file mode 100644 index 71d1075..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_sdist.py +++ /dev/null @@ -1,535 +0,0 @@ -# -*- coding: utf-8 -*- -"""sdist tests""" - -import locale -import os -import shutil -import sys -import tempfile -import unittest -import unicodedata -import re -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf - -from setuptools.compat import StringIO, unicode -from setuptools.tests.py26compat import skipIf -from setuptools.command.sdist import sdist, walk_revctrl -from setuptools.command.egg_info import manifest_maker -from setuptools.dist import Distribution -from setuptools import svn_utils - -SETUP_ATTRS = { - 'name': 'sdist_test', - 'version': '0.0', - 'packages': ['sdist_test'], - 'package_data': {'sdist_test': ['*.txt']} -} - - -SETUP_PY = """\ -from setuptools import setup - -setup(**%r) -""" % SETUP_ATTRS - - -if sys.version_info >= (3,): - LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1') -else: - LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py' - - -# Cannot use context manager because of Python 2.4 -def quiet(): - global old_stdout, old_stderr - old_stdout, old_stderr = sys.stdout, sys.stderr - sys.stdout, sys.stderr = StringIO(), StringIO() - -def unquiet(): - sys.stdout, sys.stderr = old_stdout, old_stderr - - -# Fake byte literals for Python <= 2.5 -def b(s, encoding='utf-8'): - if sys.version_info >= (3,): - return s.encode(encoding) - return s - - -# Convert to POSIX path -def posix(path): - if sys.version_info >= (3,) and not isinstance(path, str): - return path.replace(os.sep.encode('ascii'), b('/')) - else: - return path.replace(os.sep, '/') - - -# HFS Plus uses decomposed UTF-8 -def decompose(path): - if isinstance(path, unicode): - return unicodedata.normalize('NFD', path) - try: - path = path.decode('utf-8') - path = unicodedata.normalize('NFD', path) - path = path.encode('utf-8') - except UnicodeError: - pass # Not UTF-8 - return path - - -class TestSdistTest(unittest.TestCase): - - def setUp(self): - self.temp_dir = tempfile.mkdtemp() - f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') - f.write(SETUP_PY) - f.close() - # Set up the rest of the test package - test_pkg = os.path.join(self.temp_dir, 'sdist_test') - os.mkdir(test_pkg) - # *.rst was not included in package_data, so c.rst should not be - # automatically added to the manifest when not under version control - for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: - # Just touch the files; their contents are irrelevant - open(os.path.join(test_pkg, fname), 'w').close() - - self.old_cwd = os.getcwd() - os.chdir(self.temp_dir) - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.temp_dir) - - def test_package_data_in_sdist(self): - """Regression test for pull request #4: ensures that files listed in - package_data are included in the manifest even if they're not added to - version control. - """ - - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # squelch output - quiet() - try: - cmd.run() - finally: - unquiet() - - manifest = cmd.filelist.files - self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) - - def test_manifest_is_written_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join('sdist_test', 'smörbröd.py') - - # Add UTF-8 filename and write manifest - quiet() - try: - mm.run() - mm.filelist.files.append(filename) - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - u_contents = contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The manifest should contain the UTF-8 filename - if sys.version_info >= (3,): - self.assertTrue(posix(filename) in u_contents) - else: - self.assertTrue(posix(filename) in contents) - - # Python 3 only - if sys.version_info >= (3,): - - def test_write_manifest_allows_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - - # Add filename and write manifest - quiet() - try: - mm.run() - u_filename = filename.decode('utf-8') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The manifest should contain the UTF-8 filename - self.assertTrue(posix(filename) in contents) - - # The filelist should have been updated as well - self.assertTrue(u_filename in mm.filelist.files) - - def test_write_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - - # Add filename with surrogates and write manifest - quiet() - try: - mm.run() - u_filename = filename.decode('utf-8', 'surrogateescape') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The Latin-1 filename should have been skipped - self.assertFalse(posix(filename) in contents) - - # The filelist should have been updated as well - self.assertFalse(u_filename in mm.filelist.files) - - def test_manifest_is_read_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - quiet() - try: - cmd.run() - finally: - unquiet() - - # Add UTF-8 filename to manifest - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - quiet() - try: - cmd.read_manifest() - finally: - unquiet() - - # The filelist should contain the UTF-8 filename - if sys.version_info >= (3,): - filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) - - # Python 3 only - if sys.version_info >= (3,): - - def test_read_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - quiet() - try: - cmd.run() - finally: - unquiet() - - # Add Latin-1 filename to manifest - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - quiet() - try: - try: - cmd.read_manifest() - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - finally: - unquiet() - - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) - - @skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8', - 'Unittest fails if locale is not utf-8 but the manifests is recorded correctly') - def test_sdist_with_utf8_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - open(filename, 'w').close() - - quiet() - try: - cmd.run() - finally: - unquiet() - - if sys.platform == 'darwin': - filename = decompose(filename) - - if sys.version_info >= (3,): - fs_enc = sys.getfilesystemencoding() - - if sys.platform == 'win32': - if fs_enc == 'cp1252': - # Python 3 mangles the UTF-8 filename - filename = filename.decode('cp1252') - self.assertTrue(filename in cmd.filelist.files) - else: - filename = filename.decode('mbcs') - self.assertTrue(filename in cmd.filelist.files) - else: - filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) - else: - self.assertTrue(filename in cmd.filelist.files) - - def test_sdist_with_latin1_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - open(filename, 'w').close() - self.assertTrue(os.path.isfile(filename)) - - quiet() - try: - cmd.run() - finally: - unquiet() - - if sys.version_info >= (3,): - #not all windows systems have a default FS encoding of cp1252 - if sys.platform == 'win32': - # Latin-1 is similar to Windows-1252 however - # on mbcs filesys it is not in latin-1 encoding - fs_enc = sys.getfilesystemencoding() - if fs_enc == 'mbcs': - filename = filename.decode('mbcs') - else: - filename = filename.decode('latin-1') - - self.assertTrue(filename in cmd.filelist.files) - else: - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) - else: - # No conversion takes place under Python 2 and the file - # is included. We shall keep it that way for BBB. - self.assertTrue(filename in cmd.filelist.files) - - -class TestDummyOutput(environment.ZippedEnvironment): - - def setUp(self): - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', "dummy.zip") - self.dataname = "dummy" - super(TestDummyOutput, self).setUp() - - def _run(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=0) - if code: - info = "DIR: " + os.path.abspath('.') - info += "\n SDIST RETURNED: %i\n\n" % code - info += data - raise AssertionError(info) - - datalines = data.splitlines() - - possible = ( - "running sdist", - "running egg_info", - "creating dummy\.egg-info", - "writing dummy\.egg-info", - "writing top-level names to dummy\.egg-info", - "writing dependency_links to dummy\.egg-info", - "writing manifest file 'dummy\.egg-info", - "reading manifest file 'dummy\.egg-info", - "reading manifest template 'MANIFEST\.in'", - "writing manifest file 'dummy\.egg-info", - "creating dummy-0.1.1", - "making hard links in dummy-0\.1\.1", - "copying files to dummy-0\.1\.1", - "copying \S+ -> dummy-0\.1\.1", - "copying dummy", - "copying dummy\.egg-info", - "hard linking \S+ -> dummy-0\.1\.1", - "hard linking dummy", - "hard linking dummy\.egg-info", - "Writing dummy-0\.1\.1", - "creating dist", - "creating 'dist", - "Creating tar archive", - "running check", - "adding 'dummy-0\.1\.1", - "tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1", - "gzip .+ dist/dummy-0\.1\.1\.tar", - "removing 'dummy-0\.1\.1' \\(and everything under it\\)", - ) - - print(" DIR: " + os.path.abspath('.')) - for line in datalines: - found = False - for pattern in possible: - if re.match(pattern, line): - print(" READ: " + line) - found = True - break - if not found: - raise AssertionError("Unexpexected: %s\n-in-\n%s" - % (line, data)) - - return data - - def test_sources(self): - self._run() - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_walksvn(self): - if self.base_version >= (1, 6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - #TODO is this right - expected = set([ - os.path.join('a file'), - os.path.join(folder2, 'Changes.txt'), - os.path.join(folder2, 'MD5SUMS'), - os.path.join(folder2, 'README.txt'), - os.path.join(folder3, 'Changes.txt'), - os.path.join(folder3, 'MD5SUMS'), - os.path.join(folder3, 'README.txt'), - os.path.join(folder3, 'TODO.txt'), - os.path.join(folder3, 'fin'), - os.path.join('third_party', 'README.txt'), - os.path.join('folder', folder2, 'Changes.txt'), - os.path.join('folder', folder2, 'MD5SUMS'), - os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'Changes.txt'), - os.path.join('folder', folder3, 'fin'), - os.path.join('folder', folder3, 'MD5SUMS'), - os.path.join('folder', folder3, 'oops'), - os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'ZuMachen.txt'), - os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'), - os.path.join('folder', 'lalala.txt'), - os.path.join('folder', 'quest.txt'), - # The example will have a deleted file - # (or should) but shouldn't return it - ]) - self.assertEqual(set(x for x in walk_revctrl()), expected) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_svn.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_svn.py deleted file mode 100644 index 3340036..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_svn.py +++ /dev/null @@ -1,245 +0,0 @@ -# -*- coding: utf-8 -*- -"""svn tests""" - -import io -import os -import subprocess -import sys -import unittest -from setuptools.tests import environment -from setuptools.compat import unicode, unichr - -from setuptools import svn_utils -from setuptools.tests.py26compat import skipIf - - -def _do_svn_check(): - try: - subprocess.check_call(["svn", "--version"], - shell=(sys.platform == 'win32')) - return True - except (OSError, subprocess.CalledProcessError): - return False -_svn_check = _do_svn_check() - - -class TestSvnVersion(unittest.TestCase): - - def test_no_svn_found(self): - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env - - if path_variable is None: - try: - self.skipTest('Cannot figure out how to modify path') - except AttributeError: # PY26 doesn't have this - return - - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - try: - version = svn_utils.SvnInfo.get_svn_version() - self.assertEqual(version, '') - finally: - os.environ[path_variable] = old_path - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_svn_should_exist(self): - version = svn_utils.SvnInfo.get_svn_version() - self.assertNotEqual(version, '') - -def _read_utf8_file(path): - fileobj = None - try: - fileobj = io.open(path, 'r', encoding='utf-8') - data = fileobj.read() - return data - finally: - if fileobj: - fileobj.close() - - -class ParserInfoXML(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_info.xml') - #Remember these are pre-generated to test XML parsing - # so these paths might not valid on your system - example_base = "%s_example" % svn_name - - data = _read_utf8_file(path) - - expected = set([ - ("\\".join((example_base, 'a file')), 'file'), - ("\\".join((example_base, 'folder')), 'dir'), - ("\\".join((example_base, 'folder', 'lalala.txt')), 'file'), - ("\\".join((example_base, 'folder', 'quest.txt')), 'file'), - ]) - self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)), - expected) - - def test_svn13(self): - self.parse_tester('svn13', False) - - def test_svn14(self): - self.parse_tester('svn14', False) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - -class ParserExternalXML(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_ext_list.xml') - example_base = svn_name + '_example' - data = _read_utf8_file(path) - - if ext_spaces: - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - expected = set([ - os.sep.join((example_base, folder2)), - os.sep.join((example_base, folder3)), - # folder is third_party大介 - os.sep.join((example_base, - unicode('third_party') + - unichr(0x5927) + unichr(0x4ecb))), - os.sep.join((example_base, 'folder', folder2)), - os.sep.join((example_base, 'folder', folder3)), - os.sep.join((example_base, 'folder', - unicode('third_party') + - unichr(0x5927) + unichr(0x4ecb))), - ]) - - expected = set(os.path.normpath(x) for x in expected) - dir_base = os.sep.join(('C:', 'development', 'svn_example')) - self.assertEqual(set(x for x - in svn_utils.parse_externals_xml(data, dir_base)), expected) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - - -class ParseExternal(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_ext_list.txt') - data = _read_utf8_file(path) - - if ext_spaces: - expected = set(['third party2', 'third party3', - 'third party3b', 'third_party']) - else: - expected = set(['third_party2', 'third_party3', 'third_party']) - - self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)), - expected) - - def test_svn13(self): - self.parse_tester('svn13', False) - - def test_svn14(self): - self.parse_tester('svn14', False) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # empty or null - self.dataname = None - self.datafile = None - return - - self.base_version = tuple([int(x) for x in version.split('.')[:2]]) - - if self.base_version < (1,3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1,9): - #trying the latest version - self.base_version = (1,8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_revision(self): - rev = svn_utils.SvnInfo.load('.').get_revision() - self.assertEqual(rev, 6) - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_entries(self): - expected = set([ - (os.path.join('a file'), 'file'), - (os.path.join('folder'), 'dir'), - (os.path.join('folder', 'lalala.txt'), 'file'), - (os.path.join('folder', 'quest.txt'), 'file'), - #The example will have a deleted file (or should) - #but shouldn't return it - ]) - info = svn_utils.SvnInfo.load('.') - self.assertEqual(set(x for x in info.entries), expected) - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_externals(self): - if self.base_version >= (1,6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - expected = set([ - os.path.join(folder2), - os.path.join(folder3), - os.path.join('third_party'), - os.path.join('folder', folder2), - os.path.join('folder', folder3), - os.path.join('folder', 'third_party'), - ]) - info = svn_utils.SvnInfo.load('.') - self.assertEqual(set([x for x in info.externals]), expected) - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_test.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_test.py deleted file mode 100644 index 7a06a40..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_test.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: UTF-8 -*- - -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.test import test -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', -) -""" - -NS_INIT = """# -*- coding: Latin-1 -*- -# Söme Arbiträry Ünicode to test Issüé 310 -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) -""" -# Make sure this is Latin-1 binary, before writing: -if sys.version_info < (3,): - NS_INIT = NS_INIT.decode('UTF-8') -NS_INIT = NS_INIT.encode('Latin-1') - -TEST_PY = """import unittest - -class TestTest(unittest.TestCase): - def test_test(self): - print "Foo" # Should fail under Python 3 unless 2to3 is used - -test_suite = unittest.makeSuite(TestTest) -""" - -class TestTestTest(unittest.TestCase): - - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'name')) - os.mkdir(os.path.join(self.dir, 'name', 'space')) - os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'wt') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # name/__init__.py - init = os.path.join(self.dir, 'name', '__init__.py') - f = open(init, 'wb') - f.write(NS_INIT) - f.close() - # name/space/__init__.py - init = os.path.join(self.dir, 'name', 'space', '__init__.py') - f = open(init, 'wt') - f.write('#empty\n') - f.close() - # name/space/tests/__init__.py - init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py') - f = open(init, 'wt') - f.write(TEST_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_test(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - dist = Distribution(dict( - name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', - use_2to3=True, - )) - dist.script_name = 'setup.py' - cmd = test(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - sys.stdout = StringIO() - try: - try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements. - cmd.run() - except SystemExit: # The test runner calls sys.exit, stop that making an error. - pass - finally: - sys.stdout = old_stdout - diff --git a/libs/setuptools-2.2/build/lib/setuptools/tests/test_upload_docs.py b/libs/setuptools-2.2/build/lib/setuptools/tests/test_upload_docs.py deleted file mode 100644 index 769f16c..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,72 +0,0 @@ -"""build_ext tests -""" -import sys, os, shutil, tempfile, unittest, site, zipfile -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestUploadDocsTest(unittest.TestCase): - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.upload_dir = os.path.join(self.dir, 'build') - os.mkdir(self.upload_dir) - - # A test document. - f = open(os.path.join(self.upload_dir, 'index.html'), 'w') - f.write("Hello world.") - f.close() - - # An empty folder. - os.mkdir(os.path.join(self.upload_dir, 'empty')) - - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_create_zipfile(self): - # Test to make sure zipfile creation handles common cases. - # This explicitly includes a folder containing an empty folder. - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.upload_dir = self.upload_dir - cmd.target_dir = self.upload_dir - tmp_dir = tempfile.mkdtemp() - tmp_file = os.path.join(tmp_dir, 'foo.zip') - try: - zip_file = cmd.create_zipfile(tmp_file) - - assert zipfile.is_zipfile(tmp_file) - - zip_file = zipfile.ZipFile(tmp_file) # woh... - - assert zip_file.namelist() == ['index.html'] - - zip_file.close() - finally: - shutil.rmtree(tmp_dir) - diff --git a/libs/setuptools-2.2/build/lib/setuptools/version.py b/libs/setuptools-2.2/build/lib/setuptools/version.py deleted file mode 100644 index 2b9ccf1..0000000 --- a/libs/setuptools-2.2/build/lib/setuptools/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '2.2' diff --git a/libs/setuptools-2.2/dist/setuptools-2.2-py3.3.egg b/libs/setuptools-2.2/dist/setuptools-2.2-py3.3.egg deleted file mode 100644 index d0218a1..0000000 Binary files a/libs/setuptools-2.2/dist/setuptools-2.2-py3.3.egg and /dev/null differ diff --git a/libs/setuptools-2.2/docs/Makefile b/libs/setuptools-2.2/docs/Makefile deleted file mode 100644 index 30bf10a..0000000 --- a/libs/setuptools-2.2/docs/Makefile +++ /dev/null @@ -1,75 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . - -.PHONY: help clean html web pickle htmlhelp latex changes linkcheck - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " changes to make an overview over all changed/added/deprecated items" - @echo " linkcheck to check all external links for integrity" - -clean: - -rm -rf build/* - -html: - mkdir -p build/html build/doctrees - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) build/html - @echo - @echo "Build finished. The HTML pages are in build/html." - -pickle: - mkdir -p build/pickle build/doctrees - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) build/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -web: pickle - -json: - mkdir -p build/json build/doctrees - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) build/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - mkdir -p build/htmlhelp build/doctrees - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) build/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in build/htmlhelp." - -latex: - mkdir -p build/latex build/doctrees - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) build/latex - @echo - @echo "Build finished; the LaTeX files are in build/latex." - @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ - "run these through (pdf)latex." - -changes: - mkdir -p build/changes build/doctrees - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) build/changes - @echo - @echo "The overview file is in build/changes." - -linkcheck: - mkdir -p build/linkcheck build/doctrees - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) build/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in build/linkcheck/output.txt." diff --git a/libs/setuptools-2.2/docs/_templates/indexsidebar.html b/libs/setuptools-2.2/docs/_templates/indexsidebar.html deleted file mode 100644 index a27c85f..0000000 --- a/libs/setuptools-2.2/docs/_templates/indexsidebar.html +++ /dev/null @@ -1,8 +0,0 @@ -

Download

- -

Current version: {{ version }}

-

Get Setuptools from the Python Package Index - -

Questions? Suggestions? Contributions?

- -

Visit the Setuptools project page

diff --git a/libs/setuptools-2.2/docs/_theme/nature/static/nature.css_t b/libs/setuptools-2.2/docs/_theme/nature/static/nature.css_t deleted file mode 100644 index 1a65426..0000000 --- a/libs/setuptools-2.2/docs/_theme/nature/static/nature.css_t +++ /dev/null @@ -1,237 +0,0 @@ -/** - * Sphinx stylesheet -- default theme - * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - */ - -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: Arial, sans-serif; - font-size: 100%; - background-color: #111111; - color: #555555; - margin: 0; - padding: 0; -} - -div.documentwrapper { - float: left; - width: 100%; -} - -div.bodywrapper { - margin: 0 0 0 300px; -} - -hr{ - border: 1px solid #B1B4B6; -} - -div.document { - background-color: #fafafa; -} - -div.body { - background-color: #ffffff; - color: #3E4349; - padding: 1em 30px 30px 30px; - font-size: 0.9em; -} - -div.footer { - color: #555; - width: 100%; - padding: 13px 0; - text-align: center; - font-size: 75%; -} - -div.footer a { - color: #444444; -} - -div.related { - background-color: #6BA81E; - line-height: 36px; - color: #ffffff; - text-shadow: 0px 1px 0 #444444; - font-size: 1.1em; -} - -div.related a { - color: #E2F3CC; -} - -div.related .right { - font-size: 0.9em; -} - -div.sphinxsidebar { - font-size: 0.9em; - line-height: 1.5em; - width: 300px; -} - -div.sphinxsidebarwrapper{ - padding: 20px 0; -} - -div.sphinxsidebar h3, -div.sphinxsidebar h4 { - font-family: Arial, sans-serif; - color: #222222; - font-size: 1.2em; - font-weight: bold; - margin: 0; - padding: 5px 10px; - text-shadow: 1px 1px 0 white -} - -div.sphinxsidebar h3 a { - color: #444444; -} - -div.sphinxsidebar p { - color: #888888; - padding: 5px 20px; - margin: 0.5em 0px; -} - -div.sphinxsidebar p.topless { -} - -div.sphinxsidebar ul { - margin: 10px 10px 10px 20px; - padding: 0; - color: #000000; -} - -div.sphinxsidebar a { - color: #444444; -} - -div.sphinxsidebar a:hover { - color: #E32E00; -} - -div.sphinxsidebar input { - border: 1px solid #cccccc; - font-family: sans-serif; - font-size: 1.1em; - padding: 0.15em 0.3em; -} - -div.sphinxsidebar input[type=text]{ - margin-left: 20px; -} - -/* -- body styles ----------------------------------------------------------- */ - -a { - color: #005B81; - text-decoration: none; -} - -a:hover { - color: #E32E00; -} - -div.body h1, -div.body h2, -div.body h3, -div.body h4, -div.body h5, -div.body h6 { - font-family: Arial, sans-serif; - font-weight: normal; - color: #212224; - margin: 30px 0px 10px 0px; - padding: 5px 0 5px 0px; - text-shadow: 0px 1px 0 white; - border-bottom: 1px solid #C8D5E3; -} - -div.body h1 { margin-top: 0; font-size: 200%; } -div.body h2 { font-size: 150%; } -div.body h3 { font-size: 120%; } -div.body h4 { font-size: 110%; } -div.body h5 { font-size: 100%; } -div.body h6 { font-size: 100%; } - -a.headerlink { - color: #c60f0f; - font-size: 0.8em; - padding: 0 4px 0 4px; - text-decoration: none; -} - -a.headerlink:hover { - background-color: #c60f0f; - color: white; -} - -div.body p, div.body dd, div.body li { - line-height: 1.8em; -} - -div.admonition p.admonition-title + p { - display: inline; -} - -div.highlight{ - background-color: white; -} - -div.note { - background-color: #eeeeee; - border: 1px solid #cccccc; -} - -div.seealso { - background-color: #ffffcc; - border: 1px solid #ffff66; -} - -div.topic { - background-color: #fafafa; - border-width: 0; -} - -div.warning { - background-color: #ffe4e4; - border: 1px solid #ff6666; -} - -p.admonition-title { - display: inline; -} - -p.admonition-title:after { - content: ":"; -} - -pre { - padding: 10px; - background-color: #fafafa; - color: #222222; - line-height: 1.5em; - font-size: 1.1em; - margin: 1.5em 0 1.5em 0; - -webkit-box-shadow: 0px 0px 4px #d8d8d8; - -moz-box-shadow: 0px 0px 4px #d8d8d8; - box-shadow: 0px 0px 4px #d8d8d8; -} - -tt { - color: #222222; - padding: 1px 2px; - font-size: 1.2em; - font-family: monospace; -} - -#table-of-contents ul { - padding-left: 2em; -} - diff --git a/libs/setuptools-2.2/docs/_theme/nature/static/pygments.css b/libs/setuptools-2.2/docs/_theme/nature/static/pygments.css deleted file mode 100644 index 652b761..0000000 --- a/libs/setuptools-2.2/docs/_theme/nature/static/pygments.css +++ /dev/null @@ -1,54 +0,0 @@ -.c { color: #999988; font-style: italic } /* Comment */ -.k { font-weight: bold } /* Keyword */ -.o { font-weight: bold } /* Operator */ -.cm { color: #999988; font-style: italic } /* Comment.Multiline */ -.cp { color: #999999; font-weight: bold } /* Comment.preproc */ -.c1 { color: #999988; font-style: italic } /* Comment.Single */ -.gd { color: #000000; background-color: #ffdddd } /* Generic.Deleted */ -.ge { font-style: italic } /* Generic.Emph */ -.gr { color: #aa0000 } /* Generic.Error */ -.gh { color: #999999 } /* Generic.Heading */ -.gi { color: #000000; background-color: #ddffdd } /* Generic.Inserted */ -.go { color: #111 } /* Generic.Output */ -.gp { color: #555555 } /* Generic.Prompt */ -.gs { font-weight: bold } /* Generic.Strong */ -.gu { color: #aaaaaa } /* Generic.Subheading */ -.gt { color: #aa0000 } /* Generic.Traceback */ -.kc { font-weight: bold } /* Keyword.Constant */ -.kd { font-weight: bold } /* Keyword.Declaration */ -.kp { font-weight: bold } /* Keyword.Pseudo */ -.kr { font-weight: bold } /* Keyword.Reserved */ -.kt { color: #445588; font-weight: bold } /* Keyword.Type */ -.m { color: #009999 } /* Literal.Number */ -.s { color: #bb8844 } /* Literal.String */ -.na { color: #008080 } /* Name.Attribute */ -.nb { color: #999999 } /* Name.Builtin */ -.nc { color: #445588; font-weight: bold } /* Name.Class */ -.no { color: #ff99ff } /* Name.Constant */ -.ni { color: #800080 } /* Name.Entity */ -.ne { color: #990000; font-weight: bold } /* Name.Exception */ -.nf { color: #990000; font-weight: bold } /* Name.Function */ -.nn { color: #555555 } /* Name.Namespace */ -.nt { color: #000080 } /* Name.Tag */ -.nv { color: purple } /* Name.Variable */ -.ow { font-weight: bold } /* Operator.Word */ -.mf { color: #009999 } /* Literal.Number.Float */ -.mh { color: #009999 } /* Literal.Number.Hex */ -.mi { color: #009999 } /* Literal.Number.Integer */ -.mo { color: #009999 } /* Literal.Number.Oct */ -.sb { color: #bb8844 } /* Literal.String.Backtick */ -.sc { color: #bb8844 } /* Literal.String.Char */ -.sd { color: #bb8844 } /* Literal.String.Doc */ -.s2 { color: #bb8844 } /* Literal.String.Double */ -.se { color: #bb8844 } /* Literal.String.Escape */ -.sh { color: #bb8844 } /* Literal.String.Heredoc */ -.si { color: #bb8844 } /* Literal.String.Interpol */ -.sx { color: #bb8844 } /* Literal.String.Other */ -.sr { color: #808000 } /* Literal.String.Regex */ -.s1 { color: #bb8844 } /* Literal.String.Single */ -.ss { color: #bb8844 } /* Literal.String.Symbol */ -.bp { color: #999999 } /* Name.Builtin.Pseudo */ -.vc { color: #ff99ff } /* Name.Variable.Class */ -.vg { color: #ff99ff } /* Name.Variable.Global */ -.vi { color: #ff99ff } /* Name.Variable.Instance */ -.il { color: #009999 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/libs/setuptools-2.2/docs/_theme/nature/theme.conf b/libs/setuptools-2.2/docs/_theme/nature/theme.conf deleted file mode 100644 index 1cc4004..0000000 --- a/libs/setuptools-2.2/docs/_theme/nature/theme.conf +++ /dev/null @@ -1,4 +0,0 @@ -[theme] -inherit = basic -stylesheet = nature.css -pygments_style = tango diff --git a/libs/setuptools-2.2/docs/conf.py b/libs/setuptools-2.2/docs/conf.py deleted file mode 100644 index 9929aaf..0000000 --- a/libs/setuptools-2.2/docs/conf.py +++ /dev/null @@ -1,197 +0,0 @@ -# -*- coding: utf-8 -*- -# -# Setuptools documentation build configuration file, created by -# sphinx-quickstart on Fri Jul 17 14:22:37 2009. -# -# This file is execfile()d with the current directory set to its containing dir. -# -# The contents of this file are pickled, so don't put values in the namespace -# that aren't pickleable (module imports are okay, they're removed automatically). -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import setup as setup_script - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -#sys.path.append(os.path.abspath('.')) - -# -- General configuration ----------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be extensions -# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. -extensions = [] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The suffix of source filenames. -source_suffix = '.txt' - -# The encoding of source files. -#source_encoding = 'utf-8' - -# The master toctree document. -master_doc = 'index' - -# General information about the project. -project = 'Setuptools' -copyright = '2009-2013, The fellowship of the packaging' - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -version = setup_script.setup_params['version'] -# The full version, including alpha/beta/rc tags. -release = setup_script.setup_params['version'] - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -#language = None - -# There are two options for replacing |today|: either, you set today to some -# non-false value, then it is used: -#today = '' -# Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' - -# List of documents that shouldn't be included in the build. -#unused_docs = [] - -# List of directories, relative to source directory, that shouldn't be searched -# for source files. -exclude_trees = [] - -# The reST default role (used for this markup: `text`) to use for all documents. -#default_role = None - -# If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -#add_module_names = True - -# If true, sectionauthor and moduleauthor directives will be shown in the -# output. They are ignored by default. -#show_authors = False - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] - - -# -- Options for HTML output --------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. Major themes that come with -# Sphinx are currently 'default' and 'sphinxdoc'. -html_theme = 'nature' - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -#html_theme_options = {} - -# Add any paths that contain custom themes here, relative to this directory. -html_theme_path = ['_theme'] - -# The name for this set of Sphinx documents. If None, it defaults to -# " v documentation". -html_title = "Setuptools documentation" - -# A shorter title for the navigation bar. Default is the same as html_title. -html_short_title = "Setuptools" - -# The name of an image file (relative to this directory) to place at the top -# of the sidebar. -#html_logo = None - -# The name of an image file (within the static path) to use as favicon of the -# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 -# pixels large. -#html_favicon = None - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -#html_static_path = ['_static'] - -# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, -# using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' - -# If true, SmartyPants will be used to convert quotes and dashes to -# typographically correct entities. -html_use_smartypants = True - -# Custom sidebar templates, maps document names to template names. -html_sidebars = {'index': 'indexsidebar.html'} - -# Additional templates that should be rendered to pages, maps page names to -# template names. -#html_additional_pages = {} - -# If false, no module index is generated. -html_use_modindex = False - -# If false, no index is generated. -html_use_index = False - -# If true, the index is split into individual pages for each letter. -#html_split_index = False - -# If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True - -# If true, an OpenSearch description file will be output, and all pages will -# contain a tag referring to it. The value of this option must be the -# base URL from which the finished HTML is served. -#html_use_opensearch = '' - -# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = '' - -# Output file base name for HTML help builder. -htmlhelp_basename = 'Setuptoolsdoc' - - -# -- Options for LaTeX output -------------------------------------------------- - -# The paper size ('letter' or 'a4'). -#latex_paper_size = 'letter' - -# The font size ('10pt', '11pt' or '12pt'). -#latex_font_size = '10pt' - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, author, documentclass [howto/manual]). -latex_documents = [ - ('index', 'Setuptools.tex', 'Setuptools Documentation', - 'The fellowship of the packaging', 'manual'), -] - -# The name of an image file (relative to this directory) to place at the top of -# the title page. -#latex_logo = None - -# For "manual" documents, if this is true, then toplevel headings are parts, -# not chapters. -#latex_use_parts = False - -# Additional stuff for the LaTeX preamble. -#latex_preamble = '' - -# Documents to append as an appendix to all manuals. -#latex_appendices = [] - -# If false, no module index is generated. -#latex_use_modindex = True diff --git a/libs/setuptools-2.2/docs/development.txt b/libs/setuptools-2.2/docs/development.txt deleted file mode 100644 index ba927c7..0000000 --- a/libs/setuptools-2.2/docs/development.txt +++ /dev/null @@ -1,35 +0,0 @@ -------------------------- -Development on Setuptools -------------------------- - -Setuptools is maintained by the Python community under the Python Packaging -Authority (PyPA) and led by Jason R. Coombs. - -This document describes the process by which Setuptools is developed. -This document assumes the reader has some passing familiarity with -*using* setuptools, the ``pkg_resources`` module, and EasyInstall. It -does not attempt to explain basic concepts like inter-project -dependencies, nor does it contain detailed lexical syntax for most -file formats. Neither does it explain concepts like "namespace -packages" or "resources" in any detail, as all of these subjects are -covered at length in the setuptools developer's guide and the -``pkg_resources`` reference manual. - -Instead, this is **internal** documentation for how those concepts and -features are *implemented* in concrete terms. It is intended for people -who are working on the setuptools code base, who want to be able to -troubleshoot setuptools problems, want to write code that reads the file -formats involved, or want to otherwise tinker with setuptools-generated -files and directories. - -Note, however, that these are all internal implementation details and -are therefore subject to change; stick to the published API if you don't -want to be responsible for keeping your code from breaking when -setuptools changes. You have been warned. - -.. toctree:: - :maxdepth: 1 - - formats - releases - diff --git a/libs/setuptools-2.2/docs/easy_install.txt b/libs/setuptools-2.2/docs/easy_install.txt deleted file mode 100644 index 6739ba1..0000000 --- a/libs/setuptools-2.2/docs/easy_install.txt +++ /dev/null @@ -1,1625 +0,0 @@ -============ -Easy Install -============ - -Easy Install is a python module (``easy_install``) bundled with ``setuptools`` -that lets you automatically download, build, install, and manage Python -packages. - -Please share your experiences with us! If you encounter difficulty installing -a package, please contact us via the `distutils mailing list -`_. (Note: please DO NOT send -private email directly to the author of setuptools; it will be discarded. The -mailing list is a searchable archive of previously-asked and answered -questions; you should begin your research there before reporting something as a -bug -- and then do so via list discussion first.) - -(Also, if you'd like to learn about how you can use ``setuptools`` to make your -own packages work better with EasyInstall, or provide EasyInstall-like features -without requiring your users to use EasyInstall directly, you'll probably want -to check out the full `setuptools`_ documentation as well.) - -.. contents:: **Table of Contents** - - -Using "Easy Install" -==================== - - -.. _installation instructions: - -Installing "Easy Install" -------------------------- - -Please see the `setuptools PyPI page `_ -for download links and basic installation instructions for each of the -supported platforms. - -You will need at least Python 2.6. An ``easy_install`` script will be -installed in the normal location for Python scripts on your platform. - -Note that the instructions on the setuptools PyPI page assume that you are -are installling to Python's primary ``site-packages`` directory. If this is -not the case, you should consult the section below on `Custom Installation -Locations`_ before installing. (And, on Windows, you should not use the -``.exe`` installer when installing to an alternate location.) - -Note that ``easy_install`` normally works by downloading files from the -internet. If you are behind an NTLM-based firewall that prevents Python -programs from accessing the net directly, you may wish to first install and use -the `APS proxy server `_, which lets you get past such -firewalls in the same way that your web browser(s) do. - -(Alternately, if you do not wish easy_install to actually download anything, you -can restrict it from doing so with the ``--allow-hosts`` option; see the -sections on `restricting downloads with --allow-hosts`_ and `command-line -options`_ for more details.) - - -Troubleshooting -~~~~~~~~~~~~~~~ - -If EasyInstall/setuptools appears to install correctly, and you can run the -``easy_install`` command but it fails with an ``ImportError``, the most likely -cause is that you installed to a location other than ``site-packages``, -without taking any of the steps described in the `Custom Installation -Locations`_ section below. Please see that section and follow the steps to -make sure that your custom location will work correctly. Then re-install. - -Similarly, if you can run ``easy_install``, and it appears to be installing -packages, but then you can't import them, the most likely issue is that you -installed EasyInstall correctly but are using it to install packages to a -non-standard location that hasn't been properly prepared. Again, see the -section on `Custom Installation Locations`_ for more details. - - -Windows Notes -~~~~~~~~~~~~~ - -Installing setuptools will provide an ``easy_install`` command according to -the techniques described in `Executables and Launchers`_. If the -``easy_install`` command is not available after installation, that section -provides details on how to configure Windows to make the commands available. - - -Downloading and Installing a Package ------------------------------------- - -For basic use of ``easy_install``, you need only supply the filename or URL of -a source distribution or .egg file (`Python Egg`__). - -__ http://peak.telecommunity.com/DevCenter/PythonEggs - -**Example 1**. Install a package by name, searching PyPI for the latest -version, and automatically downloading, building, and installing it:: - - easy_install SQLObject - -**Example 2**. Install or upgrade a package by name and version by finding -links on a given "download page":: - - easy_install -f http://pythonpaste.org/package_index.html SQLObject - -**Example 3**. Download a source distribution from a specified URL, -automatically building and installing it:: - - easy_install http://example.com/path/to/MyPackage-1.2.3.tgz - -**Example 4**. Install an already-downloaded .egg file:: - - easy_install /my_downloads/OtherPackage-3.2.1-py2.3.egg - -**Example 5**. Upgrade an already-installed package to the latest version -listed on PyPI:: - - easy_install --upgrade PyProtocols - -**Example 6**. Install a source distribution that's already downloaded and -extracted in the current directory (New in 0.5a9):: - - easy_install . - -**Example 7**. (New in 0.6a1) Find a source distribution or Subversion -checkout URL for a package, and extract it or check it out to -``~/projects/sqlobject`` (the name will always be in all-lowercase), where it -can be examined or edited. (The package will not be installed, but it can -easily be installed with ``easy_install ~/projects/sqlobject``. See `Editing -and Viewing Source Packages`_ below for more info.):: - - easy_install --editable --build-directory ~/projects SQLObject - -**Example 7**. (New in 0.6.11) Install a distribution within your home dir:: - - easy_install --user SQLAlchemy - -Easy Install accepts URLs, filenames, PyPI package names (i.e., ``distutils`` -"distribution" names), and package+version specifiers. In each case, it will -attempt to locate the latest available version that meets your criteria. - -When downloading or processing downloaded files, Easy Install recognizes -distutils source distribution files with extensions of .tgz, .tar, .tar.gz, -.tar.bz2, or .zip. And of course it handles already-built .egg -distributions as well as ``.win32.exe`` installers built using distutils. - -By default, packages are installed to the running Python installation's -``site-packages`` directory, unless you provide the ``-d`` or ``--install-dir`` -option to specify an alternative directory, or specify an alternate location -using distutils configuration files. (See `Configuration Files`_, below.) - -By default, any scripts included with the package are installed to the running -Python installation's standard script installation location. However, if you -specify an installation directory via the command line or a config file, then -the default directory for installing scripts will be the same as the package -installation directory, to ensure that the script will have access to the -installed package. You can override this using the ``-s`` or ``--script-dir`` -option. - -Installed packages are added to an ``easy-install.pth`` file in the install -directory, so that Python will always use the most-recently-installed version -of the package. If you would like to be able to select which version to use at -runtime, you should use the ``-m`` or ``--multi-version`` option. - - -Upgrading a Package -------------------- - -You don't need to do anything special to upgrade a package: just install the -new version, either by requesting a specific version, e.g.:: - - easy_install "SomePackage==2.0" - -a version greater than the one you have now:: - - easy_install "SomePackage>2.0" - -using the upgrade flag, to find the latest available version on PyPI:: - - easy_install --upgrade SomePackage - -or by using a download page, direct download URL, or package filename:: - - easy_install -f http://example.com/downloads ExamplePackage - - easy_install http://example.com/downloads/ExamplePackage-2.0-py2.4.egg - - easy_install my_downloads/ExamplePackage-2.0.tgz - -If you're using ``-m`` or ``--multi-version`` , using the ``require()`` -function at runtime automatically selects the newest installed version of a -package that meets your version criteria. So, installing a newer version is -the only step needed to upgrade such packages. - -If you're installing to a directory on PYTHONPATH, or a configured "site" -directory (and not using ``-m``), installing a package automatically replaces -any previous version in the ``easy-install.pth`` file, so that Python will -import the most-recently installed version by default. So, again, installing -the newer version is the only upgrade step needed. - -If you haven't suppressed script installation (using ``--exclude-scripts`` or -``-x``), then the upgraded version's scripts will be installed, and they will -be automatically patched to ``require()`` the corresponding version of the -package, so that you can use them even if they are installed in multi-version -mode. - -``easy_install`` never actually deletes packages (unless you're installing a -package with the same name and version number as an existing package), so if -you want to get rid of older versions of a package, please see `Uninstalling -Packages`_, below. - - -Changing the Active Version ---------------------------- - -If you've upgraded a package, but need to revert to a previously-installed -version, you can do so like this:: - - easy_install PackageName==1.2.3 - -Where ``1.2.3`` is replaced by the exact version number you wish to switch to. -If a package matching the requested name and version is not already installed -in a directory on ``sys.path``, it will be located via PyPI and installed. - -If you'd like to switch to the latest installed version of ``PackageName``, you -can do so like this:: - - easy_install PackageName - -This will activate the latest installed version. (Note: if you have set any -``find_links`` via distutils configuration files, those download pages will be -checked for the latest available version of the package, and it will be -downloaded and installed if it is newer than your current version.) - -Note that changing the active version of a package will install the newly -active version's scripts, unless the ``--exclude-scripts`` or ``-x`` option is -specified. - - -Uninstalling Packages ---------------------- - -If you have replaced a package with another version, then you can just delete -the package(s) you don't need by deleting the PackageName-versioninfo.egg file -or directory (found in the installation directory). - -If you want to delete the currently installed version of a package (or all -versions of a package), you should first run:: - - easy_install -m PackageName - -This will ensure that Python doesn't continue to search for a package you're -planning to remove. After you've done this, you can safely delete the .egg -files or directories, along with any scripts you wish to remove. - - -Managing Scripts ----------------- - -Whenever you install, upgrade, or change versions of a package, EasyInstall -automatically installs the scripts for the selected package version, unless -you tell it not to with ``-x`` or ``--exclude-scripts``. If any scripts in -the script directory have the same name, they are overwritten. - -Thus, you do not normally need to manually delete scripts for older versions of -a package, unless the newer version of the package does not include a script -of the same name. However, if you are completely uninstalling a package, you -may wish to manually delete its scripts. - -EasyInstall's default behavior means that you can normally only run scripts -from one version of a package at a time. If you want to keep multiple versions -of a script available, however, you can simply use the ``--multi-version`` or -``-m`` option, and rename the scripts that EasyInstall creates. This works -because EasyInstall installs scripts as short code stubs that ``require()`` the -matching version of the package the script came from, so renaming the script -has no effect on what it executes. - -For example, suppose you want to use two versions of the ``rst2html`` tool -provided by the `docutils `_ package. You might -first install one version:: - - easy_install -m docutils==0.3.9 - -then rename the ``rst2html.py`` to ``r2h_039``, and install another version:: - - easy_install -m docutils==0.3.10 - -This will create another ``rst2html.py`` script, this one using docutils -version 0.3.10 instead of 0.3.9. You now have two scripts, each using a -different version of the package. (Notice that we used ``-m`` for both -installations, so that Python won't lock us out of using anything but the most -recently-installed version of the package.) - - -Executables and Launchers -------------------------- - -On Unix systems, scripts are installed with as natural files with a "#!" -header and no extension and they launch under the Python version indicated in -the header. - -On Windows, there is no mechanism to "execute" files without extensions, so -EasyInstall provides two techniques to mirror the Unix behavior. The behavior -is indicated by the SETUPTOOLS_LAUNCHER environment variable, which may be -"executable" (default) or "natural". - -Regardless of the technique used, the script(s) will be installed to a Scripts -directory (by default in the Python installation directory). It is recommended -for EasyInstall that you ensure this directory is in the PATH environment -variable. The easiest way to ensure the Scripts directory is in the PATH is -to run ``Tools\Scripts\win_add2path.py`` from the Python directory (requires -Python 2.6 or later). - -Note that instead of changing your ``PATH`` to include the Python scripts -directory, you can also retarget the installation location for scripts so they -go on a directory that's already on the ``PATH``. For more information see -`Command-Line Options`_ and `Configuration Files`_. During installation, -pass command line options (such as ``--script-dir``) to -``ez_setup.py`` to control where ``easy_install.exe`` will be installed. - - -Windows Executable Launcher -~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -If the "executable" launcher is used, EasyInstall will create a '.exe' -launcher of the same name beside each installed script (including -``easy_install`` itself). These small .exe files launch the script of the -same name using the Python version indicated in the '#!' header. - -This behavior is currently default. To force -the use of executable launchers, set ``SETUPTOOLS_LAUNCHER`` to "executable". - -Natural Script Launcher -~~~~~~~~~~~~~~~~~~~~~~~ - -EasyInstall also supports deferring to an external launcher such as -`pylauncher `_ for launching scripts. -Enable this experimental functionality by setting the -``SETUPTOOLS_LAUNCHER`` environment variable to "natural". EasyInstall will -then install scripts as simple -scripts with a .pya (or .pyw) extension appended. If these extensions are -associated with the pylauncher and listed in the PATHEXT environment variable, -these scripts can then be invoked simply and directly just like any other -executable. This behavior may become default in a future version. - -EasyInstall uses the .pya extension instead of simply -the typical '.py' extension. This distinct extension is necessary to prevent -Python -from treating the scripts as importable modules (where name conflicts exist). -Current releases of pylauncher do not yet associate with .pya files by -default, but future versions should do so. - - -Tips & Techniques ------------------ - -Multiple Python Versions -~~~~~~~~~~~~~~~~~~~~~~~~ - -EasyInstall installs itself under two names: -``easy_install`` and ``easy_install-N.N``, where ``N.N`` is the Python version -used to install it. Thus, if you install EasyInstall for both Python 3.2 and -2.7, you can use the ``easy_install-3.2`` or ``easy_install-2.7`` scripts to -install packages for the respective Python version. - -Setuptools also supplies easy_install as a runnable module which may be -invoked using ``python -m easy_install`` for any Python with Setuptools -installed. - -Restricting Downloads with ``--allow-hosts`` -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -You can use the ``--allow-hosts`` (``-H``) option to restrict what domains -EasyInstall will look for links and downloads on. ``--allow-hosts=None`` -prevents downloading altogether. You can also use wildcards, for example -to restrict downloading to hosts in your own intranet. See the section below -on `Command-Line Options`_ for more details on the ``--allow-hosts`` option. - -By default, there are no host restrictions in effect, but you can change this -default by editing the appropriate `configuration files`_ and adding: - -.. code-block:: ini - - [easy_install] - allow_hosts = *.myintranet.example.com,*.python.org - -The above example would then allow downloads only from hosts in the -``python.org`` and ``myintranet.example.com`` domains, unless overridden on the -command line. - - -Installing on Un-networked Machines -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Just copy the eggs or source packages you need to a directory on the target -machine, then use the ``-f`` or ``--find-links`` option to specify that -directory's location. For example:: - - easy_install -H None -f somedir SomePackage - -will attempt to install SomePackage using only eggs and source packages found -in ``somedir`` and disallowing all remote access. You should of course make -sure you have all of SomePackage's dependencies available in somedir. - -If you have another machine of the same operating system and library versions -(or if the packages aren't platform-specific), you can create the directory of -eggs using a command like this:: - - easy_install -zmaxd somedir SomePackage - -This will tell EasyInstall to put zipped eggs or source packages for -SomePackage and all its dependencies into ``somedir``, without creating any -scripts or .pth files. You can then copy the contents of ``somedir`` to the -target machine. (``-z`` means zipped eggs, ``-m`` means multi-version, which -prevents .pth files from being used, ``-a`` means to copy all the eggs needed, -even if they're installed elsewhere on the machine, and ``-d`` indicates the -directory to place the eggs in.) - -You can also build the eggs from local development packages that were installed -with the ``setup.py develop`` command, by including the ``-l`` option, e.g.:: - - easy_install -zmaxld somedir SomePackage - -This will use locally-available source distributions to build the eggs. - - -Packaging Others' Projects As Eggs -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Need to distribute a package that isn't published in egg form? You can use -EasyInstall to build eggs for a project. You'll want to use the ``--zip-ok``, -``--exclude-scripts``, and possibly ``--no-deps`` options (``-z``, ``-x`` and -``-N``, respectively). Use ``-d`` or ``--install-dir`` to specify the location -where you'd like the eggs placed. By placing them in a directory that is -published to the web, you can then make the eggs available for download, either -in an intranet or to the internet at large. - -If someone distributes a package in the form of a single ``.py`` file, you can -wrap it in an egg by tacking an ``#egg=name-version`` suffix on the file's URL. -So, something like this:: - - easy_install -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo - -will install the package as an egg, and this:: - - easy_install -zmaxd. \ - -f "http://some.example.com/downloads/foo.py#egg=foo-1.0" foo - -will create a ``.egg`` file in the current directory. - - -Creating your own Package Index -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In addition to local directories and the Python Package Index, EasyInstall can -find download links on most any web page whose URL is given to the ``-f`` -(``--find-links``) option. In the simplest case, you can simply have a web -page with links to eggs or Python source packages, even an automatically -generated directory listing (such as the Apache web server provides). - -If you are setting up an intranet site for package downloads, you may want to -configure the target machines to use your download site by default, adding -something like this to their `configuration files`_: - -.. code-block:: ini - - [easy_install] - find_links = http://mypackages.example.com/somedir/ - http://turbogears.org/download/ - http://peak.telecommunity.com/dist/ - -As you can see, you can list multiple URLs separated by whitespace, continuing -on multiple lines if necessary (as long as the subsequent lines are indented. - -If you are more ambitious, you can also create an entirely custom package index -or PyPI mirror. See the ``--index-url`` option under `Command-Line Options`_, -below, and also the section on `Package Index "API"`_. - - -Password-Protected Sites ------------------------- - -If a site you want to download from is password-protected using HTTP "Basic" -authentication, you can specify your credentials in the URL, like so:: - - http://some_userid:some_password@some.example.com/some_path/ - -You can do this with both index page URLs and direct download URLs. As long -as any HTML pages read by easy_install use *relative* links to point to the -downloads, the same user ID and password will be used to do the downloading. - -Using .pypirc Credentials -------------------------- - -In additional to supplying credentials in the URL, ``easy_install`` will also -honor credentials if present in the .pypirc file. Teams maintaining a private -repository of packages may already have defined access credentials for -uploading packages according to the distutils documentation. ``easy_install`` -will attempt to honor those if present. Refer to the distutils documentation -for Python 2.5 or later for details on the syntax. - -Controlling Build Options -~~~~~~~~~~~~~~~~~~~~~~~~~ - -EasyInstall respects standard distutils `Configuration Files`_, so you can use -them to configure build options for packages that it installs from source. For -example, if you are on Windows using the MinGW compiler, you can configure the -default compiler by putting something like this: - -.. code-block:: ini - - [build] - compiler = mingw32 - -into the appropriate distutils configuration file. In fact, since this is just -normal distutils configuration, it will affect any builds using that config -file, not just ones done by EasyInstall. For example, if you add those lines -to ``distutils.cfg`` in the ``distutils`` package directory, it will be the -default compiler for *all* packages you build. See `Configuration Files`_ -below for a list of the standard configuration file locations, and links to -more documentation on using distutils configuration files. - - -Editing and Viewing Source Packages -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -Sometimes a package's source distribution contains additional documentation, -examples, configuration files, etc., that are not part of its actual code. If -you want to be able to examine these files, you can use the ``--editable`` -option to EasyInstall, and EasyInstall will look for a source distribution -or Subversion URL for the package, then download and extract it or check it out -as a subdirectory of the ``--build-directory`` you specify. If you then wish -to install the package after editing or configuring it, you can do so by -rerunning EasyInstall with that directory as the target. - -Note that using ``--editable`` stops EasyInstall from actually building or -installing the package; it just finds, obtains, and possibly unpacks it for -you. This allows you to make changes to the package if necessary, and to -either install it in development mode using ``setup.py develop`` (if the -package uses setuptools, that is), or by running ``easy_install projectdir`` -(where ``projectdir`` is the subdirectory EasyInstall created for the -downloaded package. - -In order to use ``--editable`` (``-e`` for short), you *must* also supply a -``--build-directory`` (``-b`` for short). The project will be placed in a -subdirectory of the build directory. The subdirectory will have the same -name as the project itself, but in all-lowercase. If a file or directory of -that name already exists, EasyInstall will print an error message and exit. - -Also, when using ``--editable``, you cannot use URLs or filenames as arguments. -You *must* specify project names (and optional version requirements) so that -EasyInstall knows what directory name(s) to create. If you need to force -EasyInstall to use a particular URL or filename, you should specify it as a -``--find-links`` item (``-f`` for short), and then also specify -the project name, e.g.:: - - easy_install -eb ~/projects \ - -fhttp://prdownloads.sourceforge.net/ctypes/ctypes-0.9.6.tar.gz?download \ - ctypes==0.9.6 - - -Dealing with Installation Conflicts -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -(NOTE: As of 0.6a11, this section is obsolete; it is retained here only so that -people using older versions of EasyInstall can consult it. As of version -0.6a11, installation conflicts are handled automatically without deleting the -old or system-installed packages, and without ignoring the issue. Instead, -eggs are automatically shifted to the front of ``sys.path`` using special -code added to the ``easy-install.pth`` file. So, if you are using version -0.6a11 or better of setuptools, you do not need to worry about conflicts, -and the following issues do not apply to you.) - -EasyInstall installs distributions in a "managed" way, such that each -distribution can be independently activated or deactivated on ``sys.path``. -However, packages that were not installed by EasyInstall are "unmanaged", -in that they usually live all in one directory and cannot be independently -activated or deactivated. - -As a result, if you are using EasyInstall to upgrade an existing package, or -to install a package with the same name as an existing package, EasyInstall -will warn you of the conflict. (This is an improvement over ``setup.py -install``, becuase the ``distutils`` just install new packages on top of old -ones, possibly combining two unrelated packages or leaving behind modules that -have been deleted in the newer version of the package.) - -EasyInstall will stop the installation if it detects a conflict -between an existing, "unmanaged" package, and a module or package in any of -the distributions you're installing. It will display a list of all of the -existing files and directories that would need to be deleted for the new -package to be able to function correctly. To proceed, you must manually -delete these conflicting files and directories and re-run EasyInstall. - -Of course, once you've replaced all of your existing "unmanaged" packages with -versions managed by EasyInstall, you won't have any more conflicts to worry -about! - - -Compressed Installation -~~~~~~~~~~~~~~~~~~~~~~~ - -EasyInstall tries to install packages in zipped form, if it can. Zipping -packages can improve Python's overall import performance if you're not using -the ``--multi-version`` option, because Python processes zipfile entries on -``sys.path`` much faster than it does directories. - -As of version 0.5a9, EasyInstall analyzes packages to determine whether they -can be safely installed as a zipfile, and then acts on its analysis. (Previous -versions would not install a package as a zipfile unless you used the -``--zip-ok`` option.) - -The current analysis approach is fairly conservative; it currenly looks for: - - * Any use of the ``__file__`` or ``__path__`` variables (which should be - replaced with ``pkg_resources`` API calls) - - * Possible use of ``inspect`` functions that expect to manipulate source files - (e.g. ``inspect.getsource()``) - - * Top-level modules that might be scripts used with ``python -m`` (Python 2.4) - -If any of the above are found in the package being installed, EasyInstall will -assume that the package cannot be safely run from a zipfile, and unzip it to -a directory instead. You can override this analysis with the ``-zip-ok`` flag, -which will tell EasyInstall to install the package as a zipfile anyway. Or, -you can use the ``--always-unzip`` flag, in which case EasyInstall will always -unzip, even if its analysis says the package is safe to run as a zipfile. - -Normally, however, it is simplest to let EasyInstall handle the determination -of whether to zip or unzip, and only specify overrides when needed to work -around a problem. If you find you need to override EasyInstall's guesses, you -may want to contact the package author and the EasyInstall maintainers, so that -they can make appropriate changes in future versions. - -(Note: If a package uses ``setuptools`` in its setup script, the package author -has the option to declare the package safe or unsafe for zipped usage via the -``zip_safe`` argument to ``setup()``. If the package author makes such a -declaration, EasyInstall believes the package's author and does not perform its -own analysis. However, your command-line option, if any, will still override -the package author's choice.) - - -Reference Manual -================ - -Configuration Files -------------------- - -(New in 0.4a2) - -You may specify default options for EasyInstall using the standard -distutils configuration files, under the command heading ``easy_install``. -EasyInstall will look first for a ``setup.cfg`` file in the current directory, -then a ``~/.pydistutils.cfg`` or ``$HOME\\pydistutils.cfg`` (on Unix-like OSes -and Windows, respectively), and finally a ``distutils.cfg`` file in the -``distutils`` package directory. Here's a simple example: - -.. code-block:: ini - - [easy_install] - - # set the default location to install packages - install_dir = /home/me/lib/python - - # Notice that indentation can be used to continue an option - # value; this is especially useful for the "--find-links" - # option, which tells easy_install to use download links on - # these pages before consulting PyPI: - # - find_links = http://sqlobject.org/ - http://peak.telecommunity.com/dist/ - -In addition to accepting configuration for its own options under -``[easy_install]``, EasyInstall also respects defaults specified for other -distutils commands. For example, if you don't set an ``install_dir`` for -``[easy_install]``, but *have* set an ``install_lib`` for the ``[install]`` -command, this will become EasyInstall's default installation directory. Thus, -if you are already using distutils configuration files to set default install -locations, build options, etc., EasyInstall will respect your existing settings -until and unless you override them explicitly in an ``[easy_install]`` section. - -For more information, see also the current Python documentation on the `use and -location of distutils configuration files `_. - -Notice that ``easy_install`` will use the ``setup.cfg`` from the current -working directory only if it was triggered from ``setup.py`` through the -``install_requires`` option. The standalone command will not use that file. - -Command-Line Options --------------------- - -``--zip-ok, -z`` - Install all packages as zip files, even if they are marked as unsafe for - running as a zipfile. This can be useful when EasyInstall's analysis - of a non-setuptools package is too conservative, but keep in mind that - the package may not work correctly. (Changed in 0.5a9; previously this - option was required in order for zipped installation to happen at all.) - -``--always-unzip, -Z`` - Don't install any packages as zip files, even if the packages are marked - as safe for running as a zipfile. This can be useful if a package does - something unsafe, but not in a way that EasyInstall can easily detect. - EasyInstall's default analysis is currently very conservative, however, so - you should only use this option if you've had problems with a particular - package, and *after* reporting the problem to the package's maintainer and - to the EasyInstall maintainers. - - (Note: the ``-z/-Z`` options only affect the installation of newly-built - or downloaded packages that are not already installed in the target - directory; if you want to convert an existing installed version from - zipped to unzipped or vice versa, you'll need to delete the existing - version first, and re-run EasyInstall.) - -``--multi-version, -m`` - "Multi-version" mode. Specifying this option prevents ``easy_install`` from - adding an ``easy-install.pth`` entry for the package being installed, and - if an entry for any version the package already exists, it will be removed - upon successful installation. In multi-version mode, no specific version of - the package is available for importing, unless you use - ``pkg_resources.require()`` to put it on ``sys.path``. This can be as - simple as:: - - from pkg_resources import require - require("SomePackage", "OtherPackage", "MyPackage") - - which will put the latest installed version of the specified packages on - ``sys.path`` for you. (For more advanced uses, like selecting specific - versions and enabling optional dependencies, see the ``pkg_resources`` API - doc.) - - Changed in 0.6a10: this option is no longer silently enabled when - installing to a non-PYTHONPATH, non-"site" directory. You must always - explicitly use this option if you want it to be active. - -``--upgrade, -U`` (New in 0.5a4) - By default, EasyInstall only searches online if a project/version - requirement can't be met by distributions already installed - on sys.path or the installation directory. However, if you supply the - ``--upgrade`` or ``-U`` flag, EasyInstall will always check the package - index and ``--find-links`` URLs before selecting a version to install. In - this way, you can force EasyInstall to use the latest available version of - any package it installs (subject to any version requirements that might - exclude such later versions). - -``--install-dir=DIR, -d DIR`` - Set the installation directory. It is up to you to ensure that this - directory is on ``sys.path`` at runtime, and to use - ``pkg_resources.require()`` to enable the installed package(s) that you - need. - - (New in 0.4a2) If this option is not directly specified on the command line - or in a distutils configuration file, the distutils default installation - location is used. Normally, this would be the ``site-packages`` directory, - but if you are using distutils configuration files, setting things like - ``prefix`` or ``install_lib``, then those settings are taken into - account when computing the default installation directory, as is the - ``--prefix`` option. - -``--script-dir=DIR, -s DIR`` - Set the script installation directory. If you don't supply this option - (via the command line or a configuration file), but you *have* supplied - an ``--install-dir`` (via command line or config file), then this option - defaults to the same directory, so that the scripts will be able to find - their associated package installation. Otherwise, this setting defaults - to the location where the distutils would normally install scripts, taking - any distutils configuration file settings into account. - -``--exclude-scripts, -x`` - Don't install scripts. This is useful if you need to install multiple - versions of a package, but do not want to reset the version that will be - run by scripts that are already installed. - -``--user`` (New in 0.6.11) - Use the the user-site-packages as specified in :pep:`370` - instead of the global site-packages. - -``--always-copy, -a`` (New in 0.5a4) - Copy all needed distributions to the installation directory, even if they - are already present in a directory on sys.path. In older versions of - EasyInstall, this was the default behavior, but now you must explicitly - request it. By default, EasyInstall will no longer copy such distributions - from other sys.path directories to the installation directory, unless you - explicitly gave the distribution's filename on the command line. - - Note that as of 0.6a10, using this option excludes "system" and - "development" eggs from consideration because they can't be reliably - copied. This may cause EasyInstall to choose an older version of a package - than what you expected, or it may cause downloading and installation of a - fresh copy of something that's already installed. You will see warning - messages for any eggs that EasyInstall skips, before it falls back to an - older version or attempts to download a fresh copy. - -``--find-links=URLS_OR_FILENAMES, -f URLS_OR_FILENAMES`` - Scan the specified "download pages" or directories for direct links to eggs - or other distributions. Any existing file or directory names or direct - download URLs are immediately added to EasyInstall's search cache, and any - indirect URLs (ones that don't point to eggs or other recognized archive - formats) are added to a list of additional places to search for download - links. As soon as EasyInstall has to go online to find a package (either - because it doesn't exist locally, or because ``--upgrade`` or ``-U`` was - used), the specified URLs will be downloaded and scanned for additional - direct links. - - Eggs and archives found by way of ``--find-links`` are only downloaded if - they are needed to meet a requirement specified on the command line; links - to unneeded packages are ignored. - - If all requested packages can be found using links on the specified - download pages, the Python Package Index will not be consulted unless you - also specified the ``--upgrade`` or ``-U`` option. - - (Note: if you want to refer to a local HTML file containing links, you must - use a ``file:`` URL, as filenames that do not refer to a directory, egg, or - archive are ignored.) - - You may specify multiple URLs or file/directory names with this option, - separated by whitespace. Note that on the command line, you will probably - have to surround the URL list with quotes, so that it is recognized as a - single option value. You can also specify URLs in a configuration file; - see `Configuration Files`_, above. - - Changed in 0.6a10: previously all URLs and directories passed to this - option were scanned as early as possible, but from 0.6a10 on, only - directories and direct archive links are scanned immediately; URLs are not - retrieved unless a package search was already going to go online due to a - package not being available locally, or due to the use of the ``--update`` - or ``-U`` option. - -``--no-find-links`` Blocks the addition of any link. - This parameter is useful if you want to avoid adding links defined in a - project easy_install is installing (whether it's a requested project or a - dependency). When used, ``--find-links`` is ignored. - - Added in Distribute 0.6.11 and Setuptools 0.7. - -``--index-url=URL, -i URL`` (New in 0.4a1; default changed in 0.6c7) - Specifies the base URL of the Python Package Index. The default is - https://pypi.python.org/simple if not specified. When a package is requested - that is not locally available or linked from a ``--find-links`` download - page, the package index will be searched for download pages for the needed - package, and those download pages will be searched for links to download - an egg or source distribution. - -``--editable, -e`` (New in 0.6a1) - Only find and download source distributions for the specified projects, - unpacking them to subdirectories of the specified ``--build-directory``. - EasyInstall will not actually build or install the requested projects or - their dependencies; it will just find and extract them for you. See - `Editing and Viewing Source Packages`_ above for more details. - -``--build-directory=DIR, -b DIR`` (UPDATED in 0.6a1) - Set the directory used to build source packages. If a package is built - from a source distribution or checkout, it will be extracted to a - subdirectory of the specified directory. The subdirectory will have the - same name as the extracted distribution's project, but in all-lowercase. - If a file or directory of that name already exists in the given directory, - a warning will be printed to the console, and the build will take place in - a temporary directory instead. - - This option is most useful in combination with the ``--editable`` option, - which forces EasyInstall to *only* find and extract (but not build and - install) source distributions. See `Editing and Viewing Source Packages`_, - above, for more information. - -``--verbose, -v, --quiet, -q`` (New in 0.4a4) - Control the level of detail of EasyInstall's progress messages. The - default detail level is "info", which prints information only about - relatively time-consuming operations like running a setup script, unpacking - an archive, or retrieving a URL. Using ``-q`` or ``--quiet`` drops the - detail level to "warn", which will only display installation reports, - warnings, and errors. Using ``-v`` or ``--verbose`` increases the detail - level to include individual file-level operations, link analysis messages, - and distutils messages from any setup scripts that get run. If you include - the ``-v`` option more than once, the second and subsequent uses are passed - down to any setup scripts, increasing the verbosity of their reporting as - well. - -``--dry-run, -n`` (New in 0.4a4) - Don't actually install the package or scripts. This option is passed down - to any setup scripts run, so packages should not actually build either. - This does *not* skip downloading, nor does it skip extracting source - distributions to a temporary/build directory. - -``--optimize=LEVEL``, ``-O LEVEL`` (New in 0.4a4) - If you are installing from a source distribution, and are *not* using the - ``--zip-ok`` option, this option controls the optimization level for - compiling installed ``.py`` files to ``.pyo`` files. It does not affect - the compilation of modules contained in ``.egg`` files, only those in - ``.egg`` directories. The optimization level can be set to 0, 1, or 2; - the default is 0 (unless it's set under ``install`` or ``install_lib`` in - one of your distutils configuration files). - -``--record=FILENAME`` (New in 0.5a4) - Write a record of all installed files to FILENAME. This is basically the - same as the same option for the standard distutils "install" command, and - is included for compatibility with tools that expect to pass this option - to "setup.py install". - -``--site-dirs=DIRLIST, -S DIRLIST`` (New in 0.6a1) - Specify one or more custom "site" directories (separated by commas). - "Site" directories are directories where ``.pth`` files are processed, such - as the main Python ``site-packages`` directory. As of 0.6a10, EasyInstall - automatically detects whether a given directory processes ``.pth`` files - (or can be made to do so), so you should not normally need to use this - option. It is is now only necessary if you want to override EasyInstall's - judgment and force an installation directory to be treated as if it - supported ``.pth`` files. - -``--no-deps, -N`` (New in 0.6a6) - Don't install any dependencies. This is intended as a convenience for - tools that wrap eggs in a platform-specific packaging system. (We don't - recommend that you use it for anything else.) - -``--allow-hosts=PATTERNS, -H PATTERNS`` (New in 0.6a6) - Restrict downloading and spidering to hosts matching the specified glob - patterns. E.g. ``-H *.python.org`` restricts web access so that only - packages listed and downloadable from machines in the ``python.org`` - domain. The glob patterns must match the *entire* user/host/port section of - the target URL(s). For example, ``*.python.org`` will NOT accept a URL - like ``http://python.org/foo`` or ``http://www.python.org:8080/``. - Multiple patterns can be specified by separting them with commas. The - default pattern is ``*``, which matches anything. - - In general, this option is mainly useful for blocking EasyInstall's web - access altogether (e.g. ``-Hlocalhost``), or to restrict it to an intranet - or other trusted site. EasyInstall will do the best it can to satisfy - dependencies given your host restrictions, but of course can fail if it - can't find suitable packages. EasyInstall displays all blocked URLs, so - that you can adjust your ``--allow-hosts`` setting if it is more strict - than you intended. Some sites may wish to define a restrictive default - setting for this option in their `configuration files`_, and then manually - override the setting on the command line as needed. - -``--prefix=DIR`` (New in 0.6a10) - Use the specified directory as a base for computing the default - installation and script directories. On Windows, the resulting default - directories will be ``prefix\\Lib\\site-packages`` and ``prefix\\Scripts``, - while on other platforms the defaults will be - ``prefix/lib/python2.X/site-packages`` (with the appropriate version - substituted) for libraries and ``prefix/bin`` for scripts. - - Note that the ``--prefix`` option only sets the *default* installation and - script directories, and does not override the ones set on the command line - or in a configuration file. - -``--local-snapshots-ok, -l`` (New in 0.6c6) - Normally, EasyInstall prefers to only install *released* versions of - projects, not in-development ones, because such projects may not - have a currently-valid version number. So, it usually only installs them - when their ``setup.py`` directory is explicitly passed on the command line. - - However, if this option is used, then any in-development projects that were - installed using the ``setup.py develop`` command, will be used to build - eggs, effectively upgrading the "in-development" project to a snapshot - release. Normally, this option is used only in conjunction with the - ``--always-copy`` option to create a distributable snapshot of every egg - needed to run an application. - - Note that if you use this option, you must make sure that there is a valid - version number (such as an SVN revision number tag) for any in-development - projects that may be used, as otherwise EasyInstall may not be able to tell - what version of the project is "newer" when future installations or - upgrades are attempted. - - -.. _non-root installation: - -Custom Installation Locations ------------------------------ - -By default, EasyInstall installs python packages into Python's main ``site-packages`` directory, -and manages them using a custom ``.pth`` file in that same directory. - -Very often though, a user or developer wants ``easy_install`` to install and manage python packages -in an alternative location, usually for one of 3 reasons: - -1. They don't have access to write to the main Python site-packages directory. - -2. They want a user-specific stash of packages, that is not visible to other users. - -3. They want to isolate a set of packages to a specific python application, usually to minimize - the possibility of version conflicts. - -Historically, there have been many approaches to achieve custom installation. -The following section lists only the easiest and most relevant approaches [1]_. - -`Use the "--user" option`_ - -`Use the "--user" option and customize "PYTHONUSERBASE"`_ - -`Use "virtualenv"`_ - -.. [1] There are older ways to achieve custom installation using various ``easy_install`` and ``setup.py install`` options, combined with ``PYTHONPATH`` and/or ``PYTHONUSERBASE`` alterations, but all of these are effectively deprecated by the User scheme brought in by `PEP-370`_ in Python 2.6. - -.. _PEP-370: http://www.python.org/dev/peps/pep-0370/ - - -Use the "--user" option -~~~~~~~~~~~~~~~~~~~~~~~ -With Python 2.6 came the User scheme for installation, which means that all -python distributions support an alternative install location that is specific to a user [2]_ [3]_. -The Default location for each OS is explained in the python documentation -for the ``site.USER_BASE`` variable. This mode of installation can be turned on by -specifying the ``--user`` option to ``setup.py install`` or ``easy_install``. -This approach serves the need to have a user-specific stash of packages. - -.. [2] Prior to Python2.6, Mac OS X offered a form of the User scheme. That is now subsumed into the User scheme introduced in Python 2.6. -.. [3] Prior to the User scheme, there was the Home scheme, which is still available, but requires more effort than the User scheme to get packages recognized. - -Use the "--user" option and customize "PYTHONUSERBASE" -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -The User scheme install location can be customized by setting the ``PYTHONUSERBASE`` environment -variable, which updates the value of ``site.USER_BASE``. To isolate packages to a specific -application, simply set the OS environment of that application to a specific value of -``PYTHONUSERBASE``, that contains just those packages. - -Use "virtualenv" -~~~~~~~~~~~~~~~~ -"virtualenv" is a 3rd-party python package that effectively "clones" a python installation, thereby -creating an isolated location to intall packages. The evolution of "virtualenv" started before the existence -of the User installation scheme. "virtualenv" provides a version of ``easy_install`` that is -scoped to the cloned python install and is used in the normal way. "virtualenv" does offer various features -that the User installation scheme alone does not provide, e.g. the ability to hide the main python site-packages. - -Please refer to the `virtualenv`_ documentation for more details. - -.. _virtualenv: https://pypi.python.org/pypi/virtualenv - - - -Package Index "API" -------------------- - -Custom package indexes (and PyPI) must follow the following rules for -EasyInstall to be able to look up and download packages: - -1. Except where stated otherwise, "pages" are HTML or XHTML, and "links" - refer to ``href`` attributes. - -2. Individual project version pages' URLs must be of the form - ``base/projectname/version``, where ``base`` is the package index's base URL. - -3. Omitting the ``/version`` part of a project page's URL (but keeping the - trailing ``/``) should result in a page that is either: - - a) The single active version of that project, as though the version had been - explicitly included, OR - - b) A page with links to all of the active version pages for that project. - -4. Individual project version pages should contain direct links to downloadable - distributions where possible. It is explicitly permitted for a project's - "long_description" to include URLs, and these should be formatted as HTML - links by the package index, as EasyInstall does no special processing to - identify what parts of a page are index-specific and which are part of the - project's supplied description. - -5. Where available, MD5 information should be added to download URLs by - appending a fragment identifier of the form ``#md5=...``, where ``...`` is - the 32-character hex MD5 digest. EasyInstall will verify that the - downloaded file's MD5 digest matches the given value. - -6. Individual project version pages should identify any "homepage" or - "download" URLs using ``rel="homepage"`` and ``rel="download"`` attributes - on the HTML elements linking to those URLs. Use of these attributes will - cause EasyInstall to always follow the provided links, unless it can be - determined by inspection that they are downloadable distributions. If the - links are not to downloadable distributions, they are retrieved, and if they - are HTML, they are scanned for download links. They are *not* scanned for - additional "homepage" or "download" links, as these are only processed for - pages that are part of a package index site. - -7. The root URL of the index, if retrieved with a trailing ``/``, must result - in a page containing links to *all* projects' active version pages. - - (Note: This requirement is a workaround for the absence of case-insensitive - ``safe_name()`` matching of project names in URL paths. If project names are - matched in this fashion (e.g. via the PyPI server, mod_rewrite, or a similar - mechanism), then it is not necessary to include this all-packages listing - page.) - -8. If a package index is accessed via a ``file://`` URL, then EasyInstall will - automatically use ``index.html`` files, if present, when trying to read a - directory with a trailing ``/`` on the URL. - - -Backward Compatibility -~~~~~~~~~~~~~~~~~~~~~~ - -Package indexes that wish to support setuptools versions prior to 0.6b4 should -also follow these rules: - -* Homepage and download links must be preceded with ``"Home Page"`` or - ``"Download URL"``, in addition to (or instead of) the ``rel=""`` - attributes on the actual links. These marker strings do not need to be - visible, or uncommented, however! For example, the following is a valid - homepage link that will work with any version of setuptools:: - -
  • - Home Page: - - http://sqlobject.org -
  • - - Even though the marker string is in an HTML comment, older versions of - EasyInstall will still "see" it and know that the link that follows is the - project's home page URL. - -* The pages described by paragraph 3(b) of the preceding section *must* - contain the string ``"Index of Packages"`` somewhere in their text. - This can be inside of an HTML comment, if desired, and it can be anywhere - in the page. (Note: this string MUST NOT appear on normal project pages, as - described in paragraphs 2 and 3(a)!) - -In addition, for compatibility with PyPI versions that do not use ``#md5=`` -fragment IDs, EasyInstall uses the following regular expression to match PyPI's -displayed MD5 info (broken onto two lines for readability):: - - ([^<]+)\n\s+\(md5\) - -History -======= - -0.6c9 - * Fixed ``win32.exe`` support for .pth files, so unnecessary directory nesting - is flattened out in the resulting egg. (There was a case-sensitivity - problem that affected some distributions, notably ``pywin32``.) - - * Prevent ``--help-commands`` and other junk from showing under Python 2.5 - when running ``easy_install --help``. - - * Fixed GUI scripts sometimes not executing on Windows - - * Fixed not picking up dependency links from recursive dependencies. - - * Only make ``.py``, ``.dll`` and ``.so`` files executable when unpacking eggs - - * Changes for Jython compatibility - - * Improved error message when a requirement is also a directory name, but the - specified directory is not a source package. - - * Fixed ``--allow-hosts`` option blocking ``file:`` URLs - - * Fixed HTTP SVN detection failing when the page title included a project - name (e.g. on SourceForge-hosted SVN) - - * Fix Jython script installation to handle ``#!`` lines better when - ``sys.executable`` is a script. - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Keep site directories (e.g. ``site-packages``) from being included in - ``.pth`` files. - -0.6c7 - * ``ftp:`` download URLs now work correctly. - - * The default ``--index-url`` is now ``https://pypi.python.org/simple``, to use - the Python Package Index's new simpler (and faster!) REST API. - -0.6c6 - * EasyInstall no longer aborts the installation process if a URL it wants to - retrieve can't be downloaded, unless the URL is an actual package download. - Instead, it issues a warning and tries to keep going. - - * Fixed distutils-style scripts originally built on Windows having their line - endings doubled when installed on any platform. - - * Added ``--local-snapshots-ok`` flag, to allow building eggs from projects - installed using ``setup.py develop``. - - * Fixed not HTML-decoding URLs scraped from web pages - -0.6c5 - * Fixed ``.dll`` files on Cygwin not having executable permisions when an egg - is installed unzipped. - -0.6c4 - * Added support for HTTP "Basic" authentication using ``http://user:pass@host`` - URLs. If a password-protected page contains links to the same host (and - protocol), those links will inherit the credentials used to access the - original page. - - * Removed all special support for Sourceforge mirrors, as Sourceforge's - mirror system now works well for non-browser downloads. - - * Fixed not recognizing ``win32.exe`` installers that included a custom - bitmap. - - * Fixed not allowing ``os.open()`` of paths outside the sandbox, even if they - are opened read-only (e.g. reading ``/dev/urandom`` for random numbers, as - is done by ``os.urandom()`` on some platforms). - - * Fixed a problem with ``.pth`` testing on Windows when ``sys.executable`` - has a space in it (e.g., the user installed Python to a ``Program Files`` - directory). - -0.6c3 - * You can once again use "python -m easy_install" with Python 2.4 and above. - - * Python 2.5 compatibility fixes added. - -0.6c2 - * Windows script wrappers now support quoted arguments and arguments - containing spaces. (Patch contributed by Jim Fulton.) - - * The ``ez_setup.py`` script now actually works when you put a setuptools - ``.egg`` alongside it for bootstrapping an offline machine. - - * A writable installation directory on ``sys.path`` is no longer required to - download and extract a source distribution using ``--editable``. - - * Generated scripts now use ``-x`` on the ``#!`` line when ``sys.executable`` - contains non-ASCII characters, to prevent deprecation warnings about an - unspecified encoding when the script is run. - -0.6c1 - * EasyInstall now includes setuptools version information in the - ``User-Agent`` string sent to websites it visits. - -0.6b4 - * Fix creating Python wrappers for non-Python scripts - - * Fix ``ftp://`` directory listing URLs from causing a crash when used in the - "Home page" or "Download URL" slots on PyPI. - - * Fix ``sys.path_importer_cache`` not being updated when an existing zipfile - or directory is deleted/overwritten. - - * Fix not recognizing HTML 404 pages from package indexes. - - * Allow ``file://`` URLs to be used as a package index. URLs that refer to - directories will use an internally-generated directory listing if there is - no ``index.html`` file in the directory. - - * Allow external links in a package index to be specified using - ``rel="homepage"`` or ``rel="download"``, without needing the old - PyPI-specific visible markup. - - * Suppressed warning message about possibly-misspelled project name, if an egg - or link for that project name has already been seen. - -0.6b3 - * Fix local ``--find-links`` eggs not being copied except with - ``--always-copy``. - - * Fix sometimes not detecting local packages installed outside of "site" - directories. - - * Fix mysterious errors during initial ``setuptools`` install, caused by - ``ez_setup`` trying to run ``easy_install`` twice, due to a code fallthru - after deleting the egg from which it's running. - -0.6b2 - * Don't install or update a ``site.py`` patch when installing to a - ``PYTHONPATH`` directory with ``--multi-version``, unless an - ``easy-install.pth`` file is already in use there. - - * Construct ``.pth`` file paths in such a way that installing an egg whose - name begins with ``import`` doesn't cause a syntax error. - - * Fixed a bogus warning message that wasn't updated since the 0.5 versions. - -0.6b1 - * Better ambiguity management: accept ``#egg`` name/version even if processing - what appears to be a correctly-named distutils file, and ignore ``.egg`` - files with no ``-``, since valid Python ``.egg`` files always have a version - number (but Scheme eggs often don't). - - * Support ``file://`` links to directories in ``--find-links``, so that - easy_install can build packages from local source checkouts. - - * Added automatic retry for Sourceforge mirrors. The new download process is - to first just try dl.sourceforge.net, then randomly select mirror IPs and - remove ones that fail, until something works. The removed IPs stay removed - for the remainder of the run. - - * Ignore bdist_dumb distributions when looking at download URLs. - -0.6a11 - * Process ``dependency_links.txt`` if found in a distribution, by adding the - URLs to the list for scanning. - - * Use relative paths in ``.pth`` files when eggs are being installed to the - same directory as the ``.pth`` file. This maximizes portability of the - target directory when building applications that contain eggs. - - * Added ``easy_install-N.N`` script(s) for convenience when using multiple - Python versions. - - * Added automatic handling of installation conflicts. Eggs are now shifted to - the front of sys.path, in an order consistent with where they came from, - making EasyInstall seamlessly co-operate with system package managers. - - The ``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk`` options - are now no longer necessary, and will generate warnings at the end of a - run if you use them. - - * Don't recursively traverse subdirectories given to ``--find-links``. - -0.6a10 - * Added exhaustive testing of the install directory, including a spawn test - for ``.pth`` file support, and directory writability/existence checks. This - should virtually eliminate the need to set or configure ``--site-dirs``. - - * Added ``--prefix`` option for more do-what-I-mean-ishness in the absence of - RTFM-ing. :) - - * Enhanced ``PYTHONPATH`` support so that you don't have to put any eggs on it - manually to make it work. ``--multi-version`` is no longer a silent - default; you must explicitly use it if installing to a non-PYTHONPATH, - non-"site" directory. - - * Expand ``$variables`` used in the ``--site-dirs``, ``--build-directory``, - ``--install-dir``, and ``--script-dir`` options, whether on the command line - or in configuration files. - - * Improved SourceForge mirror processing to work faster and be less affected - by transient HTML changes made by SourceForge. - - * PyPI searches now use the exact spelling of requirements specified on the - command line or in a project's ``install_requires``. Previously, a - normalized form of the name was used, which could lead to unnecessary - full-index searches when a project's name had an underscore (``_``) in it. - - * EasyInstall can now download bare ``.py`` files and wrap them in an egg, - as long as you include an ``#egg=name-version`` suffix on the URL, or if - the ``.py`` file is listed as the "Download URL" on the project's PyPI page. - This allows third parties to "package" trivial Python modules just by - linking to them (e.g. from within their own PyPI page or download links - page). - - * The ``--always-copy`` option now skips "system" and "development" eggs since - they can't be reliably copied. Note that this may cause EasyInstall to - choose an older version of a package than what you expected, or it may cause - downloading and installation of a fresh version of what's already installed. - - * The ``--find-links`` option previously scanned all supplied URLs and - directories as early as possible, but now only directories and direct - archive links are scanned immediately. URLs are not retrieved unless a - package search was already going to go online due to a package not being - available locally, or due to the use of the ``--update`` or ``-U`` option. - - * Fixed the annoying ``--help-commands`` wart. - -0.6a9 - * Fixed ``.pth`` file processing picking up nested eggs (i.e. ones inside - "baskets") when they weren't explicitly listed in the ``.pth`` file. - - * If more than one URL appears to describe the exact same distribution, prefer - the shortest one. This helps to avoid "table of contents" CGI URLs like the - ones on effbot.org. - - * Quote arguments to python.exe (including python's path) to avoid problems - when Python (or a script) is installed in a directory whose name contains - spaces on Windows. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - -0.6a8 - * Update for changed SourceForge mirror format - - * Fixed not installing dependencies for some packages fetched via Subversion - - * Fixed dependency installation with ``--always-copy`` not using the same - dependency resolution procedure as other operations. - - * Fixed not fully removing temporary directories on Windows, if a Subversion - checkout left read-only files behind - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - -0.6a7 - * Fixed not being able to install Windows script wrappers using Python 2.3 - -0.6a6 - * Added support for "traditional" PYTHONPATH-based non-root installation, and - also the convenient ``virtual-python.py`` script, based on a contribution - by Ian Bicking. The setuptools egg now contains a hacked ``site`` module - that makes the PYTHONPATH-based approach work with .pth files, so that you - can get the full EasyInstall feature set on such installations. - - * Added ``--no-deps`` and ``--allow-hosts`` options. - - * Improved Windows ``.exe`` script wrappers so that the script can have the - same name as a module without confusing Python. - - * Changed dependency processing so that it's breadth-first, allowing a - depender's preferences to override those of a dependee, to prevent conflicts - when a lower version is acceptable to the dependee, but not the depender. - Also, ensure that currently installed/selected packages aren't given - precedence over ones desired by a package being installed, which could - cause conflict errors. - -0.6a3 - * Improved error message when trying to use old ways of running - ``easy_install``. Removed the ability to run via ``python -m`` or by - running ``easy_install.py``; ``easy_install`` is the command to run on all - supported platforms. - - * Improved wrapper script generation and runtime initialization so that a - VersionConflict doesn't occur if you later install a competing version of a - needed package as the default version of that package. - - * Fixed a problem parsing version numbers in ``#egg=`` links. - -0.6a2 - * EasyInstall can now install "console_scripts" defined by packages that use - ``setuptools`` and define appropriate entry points. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are installed without a file extension. - - * Using ``python -m easy_install`` or running ``easy_install.py`` is now - DEPRECATED, since an ``easy_install`` wrapper is now available on all - platforms. - -0.6a1 - * EasyInstall now does MD5 validation of downloads from PyPI, or from any link - that has an "#md5=..." trailer with a 32-digit lowercase hex md5 digest. - - * EasyInstall now handles symlinks in target directories by removing the link, - rather than attempting to overwrite the link's destination. This makes it - easier to set up an alternate Python "home" directory (as described above in - the `Non-Root Installation`_ section). - - * Added support for handling MacOS platform information in ``.egg`` filenames, - based on a contribution by Kevin Dangoor. You may wish to delete and - reinstall any eggs whose filename includes "darwin" and "Power_Macintosh", - because the format for this platform information has changed so that minor - OS X upgrades (such as 10.4.1 to 10.4.2) do not cause eggs built with a - previous OS version to become obsolete. - - * easy_install's dependency processing algorithms have changed. When using - ``--always-copy``, it now ensures that dependencies are copied too. When - not using ``--always-copy``, it tries to use a single resolution loop, - rather than recursing. - - * Fixed installing extra ``.pyc`` or ``.pyo`` files for scripts with ``.py`` - extensions. - - * Added ``--site-dirs`` option to allow adding custom "site" directories. - Made ``easy-install.pth`` work in platform-specific alternate site - directories (e.g. ``~/Library/Python/2.x/site-packages`` on Mac OS X). - - * If you manually delete the current version of a package, the next run of - EasyInstall against the target directory will now remove the stray entry - from the ``easy-install.pth`` file. - - * EasyInstall now recognizes URLs with a ``#egg=project_name`` fragment ID - as pointing to the named project's source checkout. Such URLs have a lower - match precedence than any other kind of distribution, so they'll only be - used if they have a higher version number than any other available - distribution, or if you use the ``--editable`` option. The ``#egg`` - fragment can contain a version if it's formatted as ``#egg=proj-ver``, - where ``proj`` is the project name, and ``ver`` is the version number. You - *must* use the format for these values that the ``bdist_egg`` command uses; - i.e., all non-alphanumeric runs must be condensed to single underscore - characters. - - * Added the ``--editable`` option; see `Editing and Viewing Source Packages`_ - above for more info. Also, slightly changed the behavior of the - ``--build-directory`` option. - - * Fixed the setup script sandbox facility not recognizing certain paths as - valid on case-insensitive platforms. - -0.5a12 - * Fix ``python -m easy_install`` not working due to setuptools being installed - as a zipfile. Update safety scanner to check for modules that might be used - as ``python -m`` scripts. - - * Misc. fixes for win32.exe support, including changes to support Python 2.4's - changed ``bdist_wininst`` format. - -0.5a10 - * Put the ``easy_install`` module back in as a module, as it's needed for - ``python -m`` to run it! - - * Allow ``--find-links/-f`` to accept local directories or filenames as well - as URLs. - -0.5a9 - * EasyInstall now automatically detects when an "unmanaged" package or - module is going to be on ``sys.path`` ahead of a package you're installing, - thereby preventing the newer version from being imported. By default, it - will abort installation to alert you of the problem, but there are also - new options (``--delete-conflicting`` and ``--ignore-conflicts-at-my-risk``) - available to change the default behavior. (Note: this new feature doesn't - take effect for egg files that were built with older ``setuptools`` - versions, because they lack the new metadata file required to implement it.) - - * The ``easy_install`` distutils command now uses ``DistutilsError`` as its - base error type for errors that should just issue a message to stderr and - exit the program without a traceback. - - * EasyInstall can now be given a path to a directory containing a setup - script, and it will attempt to build and install the package there. - - * EasyInstall now performs a safety analysis on module contents to determine - whether a package is likely to run in zipped form, and displays - information about what modules may be doing introspection that would break - when running as a zipfile. - - * Added the ``--always-unzip/-Z`` option, to force unzipping of packages that - would ordinarily be considered safe to unzip, and changed the meaning of - ``--zip-ok/-z`` to "always leave everything zipped". - -0.5a8 - * There is now a separate documentation page for `setuptools`_; revision - history that's not specific to EasyInstall has been moved to that page. - - .. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools - -0.5a5 - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - -0.5a4 - * Added ``--always-copy/-a`` option to always copy needed packages to the - installation directory, even if they're already present elsewhere on - sys.path. (In previous versions, this was the default behavior, but now - you must request it.) - - * Added ``--upgrade/-U`` option to force checking PyPI for latest available - version(s) of all packages requested by name and version, even if a matching - version is available locally. - - * Added automatic installation of dependencies declared by a distribution - being installed. These dependencies must be listed in the distribution's - ``EGG-INFO`` directory, so the distribution has to have declared its - dependencies by using setuptools. If a package has requirements it didn't - declare, you'll still have to deal with them yourself. (E.g., by asking - EasyInstall to find and install them.) - - * Added the ``--record`` option to ``easy_install`` for the benefit of tools - that run ``setup.py install --record=filename`` on behalf of another - packaging system.) - -0.5a3 - * Fixed not setting script permissions to allow execution. - - * Improved sandboxing so that setup scripts that want a temporary directory - (e.g. pychecker) can still run in the sandbox. - -0.5a2 - * Fix stupid stupid refactoring-at-the-last-minute typos. :( - -0.5a1 - * Added support for converting ``.win32.exe`` installers to eggs on the fly. - EasyInstall will now recognize such files by name and install them. - - * Fixed a problem with picking the "best" version to install (versions were - being sorted as strings, rather than as parsed values) - -0.4a4 - * Added support for the distutils "verbose/quiet" and "dry-run" options, as - well as the "optimize" flag. - - * Support downloading packages that were uploaded to PyPI (by scanning all - links on package pages, not just the homepage/download links). - -0.4a3 - * Add progress messages to the search/download process so that you can tell - what URLs it's reading to find download links. (Hopefully, this will help - people report out-of-date and broken links to package authors, and to tell - when they've asked for a package that doesn't exist.) - -0.4a2 - * Added support for installing scripts - - * Added support for setting options via distutils configuration files, and - using distutils' default options as a basis for EasyInstall's defaults. - - * Renamed ``--scan-url/-s`` to ``--find-links/-f`` to free up ``-s`` for the - script installation directory option. - - * Use ``urllib2`` instead of ``urllib``, to allow use of ``https:`` URLs if - Python includes SSL support. - -0.4a1 - * Added ``--scan-url`` and ``--index-url`` options, to scan download pages - and search PyPI for needed packages. - -0.3a4 - * Restrict ``--build-directory=DIR/-b DIR`` option to only be used with single - URL installs, to avoid running the wrong setup.py. - -0.3a3 - * Added ``--build-directory=DIR/-b DIR`` option. - - * Added "installation report" that explains how to use 'require()' when doing - a multiversion install or alternate installation directory. - - * Added SourceForge mirror auto-select (Contributed by Ian Bicking) - - * Added "sandboxing" that stops a setup script from running if it attempts to - write to the filesystem outside of the build area - - * Added more workarounds for packages with quirky ``install_data`` hacks - -0.3a2 - * Added subversion download support for ``svn:`` and ``svn+`` URLs, as well as - automatic recognition of HTTP subversion URLs (Contributed by Ian Bicking) - - * Misc. bug fixes - -0.3a1 - * Initial release. - - -Future Plans -============ - -* Additional utilities to list/remove/verify packages -* Signature checking? SSL? Ability to suppress PyPI search? -* Display byte progress meter when downloading distributions and long pages? -* Redirect stdout/stderr to log during run_setup? - diff --git a/libs/setuptools-2.2/docs/formats.txt b/libs/setuptools-2.2/docs/formats.txt deleted file mode 100644 index ef28353..0000000 --- a/libs/setuptools-2.2/docs/formats.txt +++ /dev/null @@ -1,676 +0,0 @@ -===================================== -The Internal Structure of Python Eggs -===================================== - -STOP! This is not the first document you should read! - - - -.. contents:: **Table of Contents** - - ----------------------- -Eggs and their Formats ----------------------- - -A "Python egg" is a logical structure embodying the release of a -specific version of a Python project, comprising its code, resources, -and metadata. There are multiple formats that can be used to physically -encode a Python egg, and others can be developed. However, a key -principle of Python eggs is that they should be discoverable and -importable. That is, it should be possible for a Python application to -easily and efficiently find out what eggs are present on a system, and -to ensure that the desired eggs' contents are importable. - -There are two basic formats currently implemented for Python eggs: - -1. ``.egg`` format: a directory or zipfile *containing* the project's - code and resources, along with an ``EGG-INFO`` subdirectory that - contains the project's metadata - -2. ``.egg-info`` format: a file or directory placed *adjacent* to the - project's code and resources, that directly contains the project's - metadata. - -Both formats can include arbitrary Python code and resources, including -static data files, package and non-package directories, Python -modules, C extension modules, and so on. But each format is optimized -for different purposes. - -The ``.egg`` format is well-suited to distribution and the easy -uninstallation or upgrades of code, since the project is essentially -self-contained within a single directory or file, unmingled with any -other projects' code or resources. It also makes it possible to have -multiple versions of a project simultaneously installed, such that -individual programs can select the versions they wish to use. - -The ``.egg-info`` format, on the other hand, was created to support -backward-compatibility, performance, and ease of installation for system -packaging tools that expect to install all projects' code and resources -to a single directory (e.g. ``site-packages``). Placing the metadata -in that same directory simplifies the installation process, since it -isn't necessary to create ``.pth`` files or otherwise modify -``sys.path`` to include each installed egg. - -Its disadvantage, however, is that it provides no support for clean -uninstallation or upgrades, and of course only a single version of a -project can be installed to a given directory. Thus, support from a -package management tool is required. (This is why setuptools' "install" -command refers to this type of egg installation as "single-version, -externally managed".) Also, they lack sufficient data to allow them to -be copied from their installation source. easy_install can "ship" an -application by copying ``.egg`` files or directories to a target -location, but it cannot do this for ``.egg-info`` installs, because -there is no way to tell what code and resources belong to a particular -egg -- there may be several eggs "scrambled" together in a single -installation location, and the ``.egg-info`` format does not currently -include a way to list the files that were installed. (This may change -in a future version.) - - -Code and Resources -================== - -The layout of the code and resources is dictated by Python's normal -import layout, relative to the egg's "base location". - -For the ``.egg`` format, the base location is the ``.egg`` itself. That -is, adding the ``.egg`` filename or directory name to ``sys.path`` -makes its contents importable. - -For the ``.egg-info`` format, however, the base location is the -directory that *contains* the ``.egg-info``, and thus it is the -directory that must be added to ``sys.path`` to make the egg importable. -(Note that this means that the "normal" installation of a package to a -``sys.path`` directory is sufficient to make it an "egg" if it has an -``.egg-info`` file or directory installed alongside of it.) - - -Project Metadata -================= - -If eggs contained only code and resources, there would of course be -no difference between them and any other directory or zip file on -``sys.path``. Thus, metadata must also be included, using a metadata -file or directory. - -For the ``.egg`` format, the metadata is placed in an ``EGG-INFO`` -subdirectory, directly within the ``.egg`` file or directory. For the -``.egg-info`` format, metadata is stored directly within the -``.egg-info`` directory itself. - -The minimum project metadata that all eggs must have is a standard -Python ``PKG-INFO`` file, named ``PKG-INFO`` and placed within the -metadata directory appropriate to the format. Because it's possible for -this to be the only metadata file included, ``.egg-info`` format eggs -are not required to be a directory; they can just be a ``.egg-info`` -file that directly contains the ``PKG-INFO`` metadata. This eliminates -the need to create a directory just to store one file. This option is -*not* available for ``.egg`` formats, since setuptools always includes -other metadata. (In fact, setuptools itself never generates -``.egg-info`` files, either; the support for using files was added so -that the requirement could easily be satisfied by other tools, such -as the distutils in Python 2.5). - -In addition to the ``PKG-INFO`` file, an egg's metadata directory may -also include files and directories representing various forms of -optional standard metadata (see the section on `Standard Metadata`_, -below) or user-defined metadata required by the project. For example, -some projects may define a metadata format to describe their application -plugins, and metadata in this format would then be included by plugin -creators in their projects' metadata directories. - - -Filename-Embedded Metadata -========================== - -To allow introspection of installed projects and runtime resolution of -inter-project dependencies, a certain amount of information is embedded -in egg filenames. At a minimum, this includes the project name, and -ideally will also include the project version number. Optionally, it -can also include the target Python version and required runtime -platform if platform-specific C code is included. The syntax of an -egg filename is as follows:: - - name ["-" version ["-py" pyver ["-" required_platform]]] "." ext - -The "name" and "version" should be escaped using the ``to_filename()`` -function provided by ``pkg_resources``, after first processing them with -``safe_name()`` and ``safe_version()`` respectively. These latter two -functions can also be used to later "unescape" these parts of the -filename. (For a detailed description of these transformations, please -see the "Parsing Utilities" section of the ``pkg_resources`` manual.) - -The "pyver" string is the Python major version, as found in the first -3 characters of ``sys.version``. "required_platform" is essentially -a distutils ``get_platform()`` string, but with enhancements to properly -distinguish Mac OS versions. (See the ``get_build_platform()`` -documentation in the "Platform Utilities" section of the -``pkg_resources`` manual for more details.) - -Finally, the "ext" is either ``.egg`` or ``.egg-info``, as appropriate -for the egg's format. - -Normally, an egg's filename should include at least the project name and -version, as this allows the runtime system to find desired project -versions without having to read the egg's PKG-INFO to determine its -version number. - -Setuptools, however, only includes the version number in the filename -when an ``.egg`` file is built using the ``bdist_egg`` command, or when -an ``.egg-info`` directory is being installed by the -``install_egg_info`` command. When generating metadata for use with the -original source tree, it only includes the project name, so that the -directory will not have to be renamed each time the project's version -changes. - -This is especially important when version numbers change frequently, and -the source metadata directory is kept under version control with the -rest of the project. (As would be the case when the project's source -includes project-defined metadata that is not generated from by -setuptools from data in the setup script.) - - -Egg Links -========= - -In addition to the ``.egg`` and ``.egg-info`` formats, there is a third -egg-related extension that you may encounter on occasion: ``.egg-link`` -files. - -These files are not eggs, strictly speaking. They simply provide a way -to reference an egg that is not physically installed in the desired -location. They exist primarily as a cross-platform alternative to -symbolic links, to support "installing" code that is being developed in -a different location than the desired installation location. For -example, if a user is developing an application plugin in their home -directory, but the plugin needs to be "installed" in an application -plugin directory, running "setup.py develop -md /path/to/app/plugins" -will install an ``.egg-link`` file in ``/path/to/app/plugins``, that -tells the egg runtime system where to find the actual egg (the user's -project source directory and its ``.egg-info`` subdirectory). - -``.egg-link`` files are named following the format for ``.egg`` and -``.egg-info`` names, but only the project name is included; no version, -Python version, or platform information is included. When the runtime -searches for available eggs, ``.egg-link`` files are opened and the -actual egg file/directory name is read from them. - -Each ``.egg-link`` file should contain a single file or directory name, -with no newlines. This filename should be the base location of one or -more eggs. That is, the name must either end in ``.egg``, or else it -should be the parent directory of one or more ``.egg-info`` format eggs. - -As of setuptools 0.6c6, the path may be specified as a platform-independent -(i.e. ``/``-separated) relative path from the directory containing the -``.egg-link`` file, and a second line may appear in the file, specifying a -platform-independent relative path from the egg's base directory to its -setup script directory. This allows installation tools such as EasyInstall -to find the project's setup directory and build eggs or perform other setup -commands on it. - - ------------------ -Standard Metadata ------------------ - -In addition to the minimum required ``PKG-INFO`` metadata, projects can -include a variety of standard metadata files or directories, as -described below. Except as otherwise noted, these files and directories -are automatically generated by setuptools, based on information supplied -in the setup script or through analysis of the project's code and -resources. - -Most of these files and directories are generated via "egg-info -writers" during execution of the setuptools ``egg_info`` command, and -are listed in the ``egg_info.writers`` entry point group defined by -setuptools' own ``setup.py`` file. - -Project authors can register their own metadata writers as entry points -in this group (as described in the setuptools manual under "Adding new -EGG-INFO Files") to cause setuptools to generate project-specific -metadata files or directories during execution of the ``egg_info`` -command. It is up to project authors to document these new metadata -formats, if they create any. - - -``.txt`` File Formats -===================== - -Files described in this section that have ``.txt`` extensions have a -simple lexical format consisting of a sequence of text lines, each line -terminated by a linefeed character (regardless of platform). Leading -and trailing whitespace on each line is ignored, as are blank lines and -lines whose first nonblank character is a ``#`` (comment symbol). (This -is the parsing format defined by the ``yield_lines()`` function of -the ``pkg_resources`` module.) - -All ``.txt`` files defined by this section follow this format, but some -are also "sectioned" files, meaning that their contents are divided into -sections, using square-bracketed section headers akin to Windows -``.ini`` format. Note that this does *not* imply that the lines within -the sections follow an ``.ini`` format, however. Please see an -individual metadata file's documentation for a description of what the -lines and section names mean in that particular file. - -Sectioned files can be parsed using the ``split_sections()`` function; -see the "Parsing Utilities" section of the ``pkg_resources`` manual for -for details. - - -Dependency Metadata -=================== - - -``requires.txt`` ----------------- - -This is a "sectioned" text file. Each section is a sequence of -"requirements", as parsed by the ``parse_requirements()`` function; -please see the ``pkg_resources`` manual for the complete requirement -parsing syntax. - -The first, unnamed section (i.e., before the first section header) in -this file is the project's core requirements, which must be installed -for the project to function. (Specified using the ``install_requires`` -keyword to ``setup()``). - -The remaining (named) sections describe the project's "extra" -requirements, as specified using the ``extras_require`` keyword to -``setup()``. The section name is the name of the optional feature, and -the section body lists that feature's dependencies. - -Note that it is not normally necessary to inspect this file directly; -``pkg_resources.Distribution`` objects have a ``requires()`` method -that can be used to obtain ``Requirement`` objects describing the -project's core and optional dependencies. - - - -``dependency_links.txt`` ------------------------- - -A list of dependency URLs, one per line, as specified using the -``dependency_links`` keyword to ``setup()``. These may be direct -download URLs, or the URLs of web pages containing direct download -links, and will be used by EasyInstall to find dependencies, as though -the user had manually provided them via the ``--find-links`` command -line option. Please see the setuptools manual and EasyInstall manual -for more information on specifying this option, and for information on -how EasyInstall processes ``--find-links`` URLs. - - -``depends.txt`` -- Obsolete, do not create! -------------------------------------------- - -This file follows an identical format to ``requires.txt``, but is -obsolete and should not be used. The earliest versions of setuptools -required users to manually create and maintain this file, so the runtime -still supports reading it, if it exists. The new filename was created -so that it could be automatically generated from ``setup()`` information -without overwriting an existing hand-created ``depends.txt``, if one -was already present in the project's source ``.egg-info`` directory. - - -``namespace_packages.txt`` -- Namespace Package Metadata -======================================================== - -A list of namespace package names, one per line, as supplied to the -``namespace_packages`` keyword to ``setup()``. Please see the manuals -for setuptools and ``pkg_resources`` for more information about -namespace packages. - - -``entry_points.txt`` -- "Entry Point"/Plugin Metadata -===================================================== - -This is a "sectioned" text file, whose contents encode the -``entry_points`` keyword supplied to ``setup()``. All sections are -named, as the section names specify the entry point groups in which the -corresponding section's entry points are registered. - -Each section is a sequence of "entry point" lines, each parseable using -the ``EntryPoint.parse`` classmethod; please see the ``pkg_resources`` -manual for the complete entry point parsing syntax. - -Note that it is not necessary to parse this file directly; the -``pkg_resources`` module provides a variety of APIs to locate and load -entry points automatically. Please see the setuptools and -``pkg_resources`` manuals for details on the nature and uses of entry -points. - - -The ``scripts`` Subdirectory -============================ - -This directory is currently only created for ``.egg`` files built by -the setuptools ``bdist_egg`` command. It will contain copies of all -of the project's "traditional" scripts (i.e., those specified using the -``scripts`` keyword to ``setup()``). This is so that they can be -reconstituted when an ``.egg`` file is installed. - -The scripts are placed here using the disutils' standard -``install_scripts`` command, so any ``#!`` lines reflect the Python -installation where the egg was built. But instead of copying the -scripts to the local script installation directory, EasyInstall writes -short wrapper scripts that invoke the original scripts from inside the -egg, after ensuring that sys.path includes the egg and any eggs it -depends on. For more about `script wrappers`_, see the section below on -`Installation and Path Management Issues`_. - - -Zip Support Metadata -==================== - - -``native_libs.txt`` -------------------- - -A list of C extensions and other dynamic link libraries contained in -the egg, one per line. Paths are ``/``-separated and relative to the -egg's base location. - -This file is generated as part of ``bdist_egg`` processing, and as such -only appears in ``.egg`` files (and ``.egg`` directories created by -unpacking them). It is used to ensure that all libraries are extracted -from a zipped egg at the same time, in case there is any direct linkage -between them. Please see the `Zip File Issues`_ section below for more -information on library and resource extraction from ``.egg`` files. - - -``eager_resources.txt`` ------------------------ - -A list of resource files and/or directories, one per line, as specified -via the ``eager_resources`` keyword to ``setup()``. Paths are -``/``-separated and relative to the egg's base location. - -Resource files or directories listed here will be extracted -simultaneously, if any of the named resources are extracted, or if any -native libraries listed in ``native_libs.txt`` are extracted. Please -see the setuptools manual for details on what this feature is used for -and how it works, as well as the `Zip File Issues`_ section below. - - -``zip-safe`` and ``not-zip-safe`` ---------------------------------- - -These are zero-length files, and either one or the other should exist. -If ``zip-safe`` exists, it means that the project will work properly -when installedas an ``.egg`` zipfile, and conversely the existence of -``not-zip-safe`` means the project should not be installed as an -``.egg`` file. The ``zip_safe`` option to setuptools' ``setup()`` -determines which file will be written. If the option isn't provided, -setuptools attempts to make its own assessment of whether the package -can work, based on code and content analysis. - -If neither file is present at installation time, EasyInstall defaults -to assuming that the project should be unzipped. (Command-line options -to EasyInstall, however, take precedence even over an existing -``zip-safe`` or ``not-zip-safe`` file.) - -Note that these flag files appear only in ``.egg`` files generated by -``bdist_egg``, and in ``.egg`` directories created by unpacking such an -``.egg`` file. - - - -``top_level.txt`` -- Conflict Management Metadata -================================================= - -This file is a list of the top-level module or package names provided -by the project, one Python identifier per line. - -Subpackages are not included; a project containing both a ``foo.bar`` -and a ``foo.baz`` would include only one line, ``foo``, in its -``top_level.txt``. - -This data is used by ``pkg_resources`` at runtime to issue a warning if -an egg is added to ``sys.path`` when its contained packages may have -already been imported. - -(It was also once used to detect conflicts with non-egg packages at -installation time, but in more recent versions, setuptools installs eggs -in such a way that they always override non-egg packages, thus -preventing a problem from arising.) - - -``SOURCES.txt`` -- Source Files Manifest -======================================== - -This file is roughly equivalent to the distutils' ``MANIFEST`` file. -The differences are as follows: - -* The filenames always use ``/`` as a path separator, which must be - converted back to a platform-specific path whenever they are read. - -* The file is automatically generated by setuptools whenever the - ``egg_info`` or ``sdist`` commands are run, and it is *not* - user-editable. - -Although this metadata is included with distributed eggs, it is not -actually used at runtime for any purpose. Its function is to ensure -that setuptools-built *source* distributions can correctly discover -what files are part of the project's source, even if the list had been -generated using revision control metadata on the original author's -system. - -In other words, ``SOURCES.txt`` has little or no runtime value for being -included in distributed eggs, and it is possible that future versions of -the ``bdist_egg`` and ``install_egg_info`` commands will strip it before -installation or distribution. Therefore, do not rely on its being -available outside of an original source directory or source -distribution. - - ------------------------------- -Other Technical Considerations ------------------------------- - - -Zip File Issues -=============== - -Although zip files resemble directories, they are not fully -substitutable for them. Most platforms do not support loading dynamic -link libraries contained in zipfiles, so it is not possible to directly -import C extensions from ``.egg`` zipfiles. Similarly, there are many -existing libraries -- whether in Python or C -- that require actual -operating system filenames, and do not work with arbitrary "file-like" -objects or in-memory strings, and thus cannot operate directly on the -contents of zip files. - -To address these issues, the ``pkg_resources`` module provides a -"resource API" to support obtaining either the contents of a resource, -or a true operating system filename for the resource. If the egg -containing the resource is a directory, the resource's real filename -is simply returned. However, if the egg is a zipfile, then the -resource is first extracted to a cache directory, and the filename -within the cache is returned. - -The cache directory is determined by the ``pkg_resources`` API; please -see the ``set_cache_path()`` and ``get_default_cache()`` documentation -for details. - - -The Extraction Process ----------------------- - -Resources are extracted to a cache subdirectory whose name is based -on the enclosing ``.egg`` filename and the path to the resource. If -there is already a file of the correct name, size, and timestamp, its -filename is returned to the requester. Otherwise, the desired file is -extracted first to a temporary name generated using -``mkstemp(".$extract",target_dir)``, and then its timestamp is set to -match the one in the zip file, before renaming it to its final name. -(Some collision detection and resolution code is used to handle the -fact that Windows doesn't overwrite files when renaming.) - -If a resource directory is requested, all of its contents are -recursively extracted in this fashion, to ensure that the directory -name can be used as if it were valid all along. - -If the resource requested for extraction is listed in the -``native_libs.txt`` or ``eager_resources.txt`` metadata files, then -*all* resources listed in *either* file will be extracted before the -requested resource's filename is returned, thus ensuring that all -C extensions and data used by them will be simultaneously available. - - -Extension Import Wrappers -------------------------- - -Since Python's built-in zip import feature does not support loading -C extension modules from zipfiles, the setuptools ``bdist_egg`` command -generates special import wrappers to make it work. - -The wrappers are ``.py`` files (along with corresponding ``.pyc`` -and/or ``.pyo`` files) that have the same module name as the -corresponding C extension. These wrappers are located in the same -package directory (or top-level directory) within the zipfile, so that -say, ``foomodule.so`` will get a corresponding ``foo.py``, while -``bar/baz.pyd`` will get a corresponding ``bar/baz.py``. - -These wrapper files contain a short stanza of Python code that asks -``pkg_resources`` for the filename of the corresponding C extension, -then reloads the module using the obtained filename. This will cause -``pkg_resources`` to first ensure that all of the egg's C extensions -(and any accompanying "eager resources") are extracted to the cache -before attempting to link to the C library. - -Note, by the way, that ``.egg`` directories will also contain these -wrapper files. However, Python's default import priority is such that -C extensions take precedence over same-named Python modules, so the -import wrappers are ignored unless the egg is a zipfile. - - -Installation and Path Management Issues -======================================= - -Python's initial setup of ``sys.path`` is very dependent on the Python -version and installation platform, as well as how Python was started -(i.e., script vs. ``-c`` vs. ``-m`` vs. interactive interpreter). -In fact, Python also provides only two relatively robust ways to affect -``sys.path`` outside of direct manipulation in code: the ``PYTHONPATH`` -environment variable, and ``.pth`` files. - -However, with no cross-platform way to safely and persistently change -environment variables, this leaves ``.pth`` files as EasyInstall's only -real option for persistent configuration of ``sys.path``. - -But ``.pth`` files are rather strictly limited in what they are allowed -to do normally. They add directories only to the *end* of ``sys.path``, -after any locally-installed ``site-packages`` directory, and they are -only processed *in* the ``site-packages`` directory to start with. - -This is a double whammy for users who lack write access to that -directory, because they can't create a ``.pth`` file that Python will -read, and even if a sympathetic system administrator adds one for them -that calls ``site.addsitedir()`` to allow some other directory to -contain ``.pth`` files, they won't be able to install newer versions of -anything that's installed in the systemwide ``site-packages``, because -their paths will still be added *after* ``site-packages``. - -So EasyInstall applies two workarounds to solve these problems. - -The first is that EasyInstall leverages ``.pth`` files' "import" feature -to manipulate ``sys.path`` and ensure that anything EasyInstall adds -to a ``.pth`` file will always appear before both the standard library -and the local ``site-packages`` directories. Thus, it is always -possible for a user who can write a Python-read ``.pth`` file to ensure -that their packages come first in their own environment. - -Second, when installing to a ``PYTHONPATH`` directory (as opposed to -a "site" directory like ``site-packages``) EasyInstall will also install -a special version of the ``site`` module. Because it's in a -``PYTHONPATH`` directory, this module will get control before the -standard library version of ``site`` does. It will record the state of -``sys.path`` before invoking the "real" ``site`` module, and then -afterwards it processes any ``.pth`` files found in ``PYTHONPATH`` -directories, including all the fixups needed to ensure that eggs always -appear before the standard library in sys.path, but are in a relative -order to one another that is defined by their ``PYTHONPATH`` and -``.pth``-prescribed sequence. - -The net result of these changes is that ``sys.path`` order will be -as follows at runtime: - -1. The ``sys.argv[0]`` directory, or an emtpy string if no script - is being executed. - -2. All eggs installed by EasyInstall in any ``.pth`` file in each - ``PYTHONPATH`` directory, in order first by ``PYTHONPATH`` order, - then normal ``.pth`` processing order (which is to say alphabetical - by ``.pth`` filename, then by the order of listing within each - ``.pth`` file). - -3. All eggs installed by EasyInstall in any ``.pth`` file in each "site" - directory (such as ``site-packages``), following the same ordering - rules as for the ones on ``PYTHONPATH``. - -4. The ``PYTHONPATH`` directories themselves, in their original order - -5. Any paths from ``.pth`` files found on ``PYTHONPATH`` that were *not* - eggs installed by EasyInstall, again following the same relative - ordering rules. - -6. The standard library and "site" directories, along with the contents - of any ``.pth`` files found in the "site" directories. - -Notice that sections 1, 4, and 6 comprise the "normal" Python setup for -``sys.path``. Sections 2 and 3 are inserted to support eggs, and -section 5 emulates what the "normal" semantics of ``.pth`` files on -``PYTHONPATH`` would be if Python natively supported them. - -For further discussion of the tradeoffs that went into this design, as -well as notes on the actual magic inserted into ``.pth`` files to make -them do these things, please see also the following messages to the -distutils-SIG mailing list: - -* http://mail.python.org/pipermail/distutils-sig/2006-February/006026.html -* http://mail.python.org/pipermail/distutils-sig/2006-March/006123.html - - -Script Wrappers ---------------- - -EasyInstall never directly installs a project's original scripts to -a script installation directory. Instead, it writes short wrapper -scripts that first ensure that the project's dependencies are active -on sys.path, before invoking the original script. These wrappers -have a #! line that points to the version of Python that was used to -install them, and their second line is always a comment that indicates -the type of script wrapper, the project version required for the script -to run, and information identifying the script to be invoked. - -The format of this marker line is:: - - "# EASY-INSTALL-" script_type ": " tuple_of_strings "\n" - -The ``script_type`` is one of ``SCRIPT``, ``DEV-SCRIPT``, or -``ENTRY-SCRIPT``. The ``tuple_of_strings`` is a comma-separated -sequence of Python string constants. For ``SCRIPT`` and ``DEV-SCRIPT`` -wrappers, there are two strings: the project version requirement, and -the script name (as a filename within the ``scripts`` metadata -directory). For ``ENTRY-SCRIPT`` wrappers, there are three: -the project version requirement, the entry point group name, and the -entry point name. (See the "Automatic Script Creation" section in the -setuptools manual for more information about entry point scripts.) - -In each case, the project version requirement string will be a string -parseable with the ``pkg_resources`` modules' ``Requirement.parse()`` -classmethod. The only difference between a ``SCRIPT`` wrapper and a -``DEV-SCRIPT`` is that a ``DEV-SCRIPT`` actually executes the original -source script in the project's source tree, and is created when the -"setup.py develop" command is run. A ``SCRIPT`` wrapper, on the other -hand, uses the "installed" script written to the ``EGG-INFO/scripts`` -subdirectory of the corresponding ``.egg`` zipfile or directory. -(``.egg-info`` eggs do not have script wrappers associated with them, -except in the "setup.py develop" case.) - -The purpose of including the marker line in generated script wrappers is -to facilitate introspection of installed scripts, and their relationship -to installed eggs. For example, an uninstallation tool could use this -data to identify what scripts can safely be removed, and/or identify -what scripts would stop working if a particular egg is uninstalled. - diff --git a/libs/setuptools-2.2/docs/index.txt b/libs/setuptools-2.2/docs/index.txt deleted file mode 100644 index 53839be..0000000 --- a/libs/setuptools-2.2/docs/index.txt +++ /dev/null @@ -1,26 +0,0 @@ -Welcome to Setuptools' documentation! -===================================== - -Setuptools is a fully-featured, actively-maintained, and stable library -designed to facilitate packaging Python projects, where packaging includes: - - - Python package and module definitions - - Distribution package metadata - - Test hooks - - Project installation - - Platform-specific details - - Python 3 support - -Documentation content: - -.. toctree:: - :maxdepth: 2 - - roadmap - python3 - using - setuptools - easy_install - pkg_resources - development - merge diff --git a/libs/setuptools-2.2/docs/merge-faq.txt b/libs/setuptools-2.2/docs/merge-faq.txt deleted file mode 100644 index 5201309..0000000 --- a/libs/setuptools-2.2/docs/merge-faq.txt +++ /dev/null @@ -1,80 +0,0 @@ -Setuptools/Distribute Merge FAQ -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -How do I upgrade from Distribute? -================================= - -Distribute specifically prohibits installation of Setuptools 0.7 from Distribute 0.6. There are then two options for upgrading. - -Note that after upgrading using either technique, the only option to downgrade to either version is to completely uninstall Distribute and Setuptools 0.7 versions before reinstalling an 0.6 release. - -Use Distribute 0.7 ------------------- - -The PYPA has put together a compatibility wrapper, a new release of Distribute version 0.7. This package will install over Distribute 0.6.x installations and will replace Distribute with a simple wrapper that requires Setuptools 0.7 or later. This technique is experimental, but initial results indicate this technique is the easiest upgrade path. - - -Uninstall ---------- - -First, completely uninstall Distribute. Since Distribute does not have an automated installation routine, this process is manual. Follow the instructions in the README for uninstalling. - - -How do I upgrade from Setuptools 0.6? -===================================== - -There are no special instructions for upgrading over older versions of Setuptools. Simply use `easy_install -U` or run the latest `ez_setup.py`. - -Where does the merge occur? -======================================================== - -The merge is occurring between the heads of the default branch of Distribute and the setuptools-0.6 branch of Setuptools. The Setuptools SVN repo has been converted to a Mercurial repo hosted on Bitbucket. The work is still underway, so the exact changesets included may change, although the anticipated merge targets are Setuptools at 0.6c12 and Distribute at 0.6.35. - -What happens to other branches? -======================================================== - -Distribute 0.7 was abandoned long ago and won't be included in the resulting code tree, but may be retained for posterity in the original repo. - -Setuptools default branch (also 0.7 development) may also be abandoned or may be incorporated into the new merged line if desirable (and as resources allow). - -What history is lost/changed? -======================================================== - -As setuptools was not on Mercurial when the fork occurred and as Distribute did not include the full setuptools history (prior to the creation of the setuptools-0.6 branch), the two source trees were not compatible. In order to most effectively communicate the code history, the Distribute code was grafted onto the (originally private) setuptools Mercurial repo. Although this grafting maintained the full code history with names, dates, and changes, it did lose the original hashes of those changes. Therefore, references to changes by hash (including tags) are lost. - -Additionally, any heads that were not actively merged into the Distribute 0.6.35 release were also omitted. As a result, the changesets included in the merge repo are those from the original setuptools repo and all changesets ancestral to the Distribute 0.6.35 release. - -What features will be in the merged code base? -======================================================== - -In general, all "features" added in distribute will be included in setuptools. Where there exist conflicts or undesirable features, we will be explicit about what these limitations are. Changes that are backward-incompatible from setuptools 0.6 to distribute will likely be removed, and these also will be well documented. - -Bootstrapping scripts (ez_setup/distribute_setup) and docs, as with distribute, will be maintained in the repository and built as part of the release process. Documentation and bootstrapping scripts will be hosted at python.org, as they are with distribute now. Documentation at telecommunity will be updated to refer or redirect to the new, merged docs. - -On the whole, the merged setuptools should be largely compatible with the latest releases of both setuptools and distribute and will be an easy transition for users of either library. - -Who is invited to contribute? Who is excluded? -======================================================== - -While we've worked privately to initiate this merge due to the potential sensitivity of the topic, no one is excluded from this effort. We invite all members of the community, especially those most familiar with Python packaging and its challenges to join us in the effort. - -We have lots of ideas for how we'd like to improve the codebase, release process, everything. Like distribute, the post-merge setuptools will have its source hosted on bitbucket. (So if you're currently a distribute contributor, about the only thing that's going to change is the URL of the repository you follow.) Also like distribute, it'll support Python 3, and hopefully we'll soon merge Vinay Sajip's patches to make it run on Python 3 without needing 2to3 to be run on the code first. - -While we've worked privately to initiate this merge due to the potential sensitivity of the topic, no one is excluded from this effort. We invite all members of the community, especially those most familiar with Python packaging and its challenges to join us in the effort. - -Why Setuptools and not Distribute or another name? -======================================================== - -We do, however, understand that this announcement might be unsettling for some. The setuptools name has been subjected to a lot of deprecation in recent years, so the idea that it will now be the preferred name instead of distribute might be somewhat difficult or disorienting for some. We considered use of another name (Distribute or an entirely new name), but that would serve to only complicate matters further. Instead, our goal is to simplify the packaging landscape but without losing any hard-won advancements. We hope that the people who worked to spread the first message will be equally enthusiastic about spreading the new one, and we especially look forward to seeing the new posters and slogans celebrating setuptools. - -What is the timeframe of release? -======================================================== - -There are no hard timeframes for any of this effort, although progress is underway and a draft merge is underway and being tested privately. As an unfunded volunteer effort, our time to put in on it is limited, and we've both had some recent health and other challenges that have made working on this difficult, which in part explains why we haven't met our original deadline of a completed merge before PyCon. - -The final Setuptools 0.7 was cut on June 1, 2013 and will be released to PyPI shortly thereafter. - -What version number can I expect for the new release? -======================================================== - -The new release will roughly follow the previous trend for setuptools and release the new release as 0.7. This number is somewhat arbitrary, but we wanted something other than 0.6 to distinguish it from its ancestor forks but not 1.0 to avoid putting too much emphasis on the release itself and to focus on merging the functionality. In the future, the project will likely adopt a versioning scheme similar to semver to convey semantic meaning about the release in the version number. diff --git a/libs/setuptools-2.2/docs/merge.txt b/libs/setuptools-2.2/docs/merge.txt deleted file mode 100644 index ba37d6e..0000000 --- a/libs/setuptools-2.2/docs/merge.txt +++ /dev/null @@ -1,122 +0,0 @@ -Merge with Distribute -~~~~~~~~~~~~~~~~~~~~~ - -In 2013, the fork of Distribute was merged back into Setuptools. This -document describes some of the details of the merge. - -.. toctree:: - :maxdepth: 2 - - merge-faq - -Process -======= - -In order to try to accurately reflect the fork and then re-merge of the -projects, the merge process brought both code trees together into one -repository and grafted the Distribute fork onto the Setuptools development -line (as if it had been created as a branch in the first place). - -The rebase to get distribute onto setuptools went something like this:: - - hg phase -d -f -r 26b4c29b62db - hg rebase -s 26b4c29b62db -d 7a5cf59c78d7 - -The technique required a late version of mercurial (2.5) to work correctly. - -The only code that was included was the code that was ancestral to the public -releases of Distribute 0.6. Additionally, because Setuptools was not hosted -on Mercurial at the time of the fork and because the Distribute fork did not -include a complete conversion of the Setuptools history, the Distribute -changesets had to be re-applied to a new, different conversion of the -Setuptools SVN repository. As a result, all of the hashes have changed. - -Distribute was grafted in a 'distribute' branch and the 'setuptools-0.6' -branch was targeted for the merge. The 'setuptools' branch remains with -unreleased code and may be incorporated in the future. - -Reconciling Differences -======================= - -There were both technical and philosophical differences between Setuptools -and Distribute. To reconcile these differences in a manageable way, the -following technique was undertaken: - -Create a 'Setuptools-Distribute merge' branch, based on a late release of -Distribute (0.6.35). This was done with a00b441856c4. - -In that branch, first remove code that is no longer relevant to -Setuptools (such as the setuptools patching code). - -Next, in the the merge branch, create another base from at the point where the -fork occurred (such that the code is still essentially an older but pristine -setuptools). This base can be found as 955792b069d0. This creates two heads -in the merge branch, each with a basis in the fork. - -Then, repeatedly copy changes for a -single file or small group of files from a late revision of that file in the -'setuptools-0.6' branch (1aae1efe5733 was used) and commit those changes on -the setuptools-only head. That head is then merged with the head with -Distribute changes. It is in this Mercurial -merge operation that the fundamental differences between Distribute and -Setuptools are reconciled, but since only a single file or small set of files -are used, the scope is limited. - -Finally, once all the challenging files have been reconciled and merged, the -remaining changes from the setuptools-0.6 branch are merged, deferring to the -reconciled changes (a1fa855a5a62 and 160ccaa46be0). - -Originally, jaraco attempted all of this using anonymous heads in the -Distribute branch, but later realized this technique made for a somewhat -unclear merge process, so the changes were re-committed as described above -for clarity. In this way, the "distribute" and "setuptools" branches can -continue to track the official Distribute changesets. - -Concessions -=========== - -With the merge of Setuptools and Distribute, the following concessions were -made: - -Differences from setuptools 0.6c12: - -Major Changes -------------- - -* Python 3 support. -* Improved support for GAE. -* Support `PEP-370 `_ per-user site - packages. -* Sort order of Distributions in pkg_resources now prefers PyPI to external - links (Distribute issue 163). -* Python 2.4 or greater is required (drop support for Python 2.3). - -Minor Changes -------------- - -* Wording of some output has changed to replace contractions with their - canonical form (i.e. prefer "could not" to "couldn't"). -* Manifest files are only written for 32-bit .exe launchers. - -Differences from Distribute 0.6.36: - -Major Changes -------------- - -* The _distribute property of the setuptools module has been removed. -* Distributions are once again installed as zipped eggs by default, per the - rationale given in `the seminal bug report - `_ indicates that the feature - should remain and no substantial justification was given in the `Distribute - report `_. - -Minor Changes -------------- - -* The patch for `#174 `_ - has been rolled-back, as the comment on the ticket indicates that the patch - addressed a symptom and not the fundamental issue. -* ``easy_install`` (the command) once again honors setup.cfg if found in the - current directory. The "mis-behavior" characterized in `#99 - `_ is actually intended - behavior, and no substantial rationale was given for the deviation. diff --git a/libs/setuptools-2.2/docs/pkg_resources.txt b/libs/setuptools-2.2/docs/pkg_resources.txt deleted file mode 100644 index 8dd3e9a..0000000 --- a/libs/setuptools-2.2/docs/pkg_resources.txt +++ /dev/null @@ -1,1982 +0,0 @@ -============================================================= -Package Discovery and Resource Access using ``pkg_resources`` -============================================================= - -The ``pkg_resources`` module distributed with ``setuptools`` provides an API -for Python libraries to access their resource files, and for extensible -applications and frameworks to automatically discover plugins. It also -provides runtime support for using C extensions that are inside zipfile-format -eggs, support for merging packages that have separately-distributed modules or -subpackages, and APIs for managing Python's current "working set" of active -packages. - - -.. contents:: **Table of Contents** - - --------- -Overview --------- - -The ``pkg_resources`` module provides runtime facilities for finding, -introspecting, activating and using installed Python distributions. Some -of the more advanced features (notably the support for parallel installation -of multiple versions) rely specifically on the "egg" format (either as a -zip archive or subdirectory), while others (such as plugin discovery) will -work correctly so long as "egg-info" metadata directories are available for -relevant distributions. - -Eggs are a distribution format for Python modules, similar in concept to -Java's "jars" or Ruby's "gems", or the "wheel" format defined in PEP 427. -However, unlike a pure distribution format, eggs can also be installed and -added directly to ``sys.path`` as an import location. When installed in -this way, eggs are *discoverable*, meaning that they carry metadata that -unambiguously identifies their contents and dependencies. This means that -an installed egg can be *automatically* found and added to ``sys.path`` in -response to simple requests of the form, "get me everything I need to use -docutils' PDF support". This feature allows mutually conflicting versions of -a distribution to co-exist in the same Python installation, with individual -applications activating the desired version at runtime by manipulating the -contents of ``sys.path`` (this differs from the virtual environment -approach, which involves creating isolated environments for each -application). - -The following terms are needed in order to explain the capabilities offered -by this module: - -project - A library, framework, script, plugin, application, or collection of data - or other resources, or some combination thereof. Projects are assumed to - have "relatively unique" names, e.g. names registered with PyPI. - -release - A snapshot of a project at a particular point in time, denoted by a version - identifier. - -distribution - A file or files that represent a particular release. - -importable distribution - A file or directory that, if placed on ``sys.path``, allows Python to - import any modules contained within it. - -pluggable distribution - An importable distribution whose filename unambiguously identifies its - release (i.e. project and version), and whose contents unamabiguously - specify what releases of other projects will satisfy its runtime - requirements. - -extra - An "extra" is an optional feature of a release, that may impose additional - runtime requirements. For example, if docutils PDF support required a - PDF support library to be present, docutils could define its PDF support as - an "extra", and list what other project releases need to be available in - order to provide it. - -environment - A collection of distributions potentially available for importing, but not - necessarily active. More than one distribution (i.e. release version) for - a given project may be present in an environment. - -working set - A collection of distributions actually available for importing, as on - ``sys.path``. At most one distribution (release version) of a given - project may be present in a working set, as otherwise there would be - ambiguity as to what to import. - -eggs - Eggs are pluggable distributions in one of the three formats currently - supported by ``pkg_resources``. There are built eggs, development eggs, - and egg links. Built eggs are directories or zipfiles whose name ends - with ``.egg`` and follows the egg naming conventions, and contain an - ``EGG-INFO`` subdirectory (zipped or otherwise). Development eggs are - normal directories of Python code with one or more ``ProjectName.egg-info`` - subdirectories. The development egg format is also used to provide a - default version of a distribution that is available to software that - doesn't use ``pkg_resources`` to request specific versions. Egg links - are ``*.egg-link`` files that contain the name of a built or - development egg, to support symbolic linking on platforms that do not - have native symbolic links (or where the symbolic link support is - limited). - -(For more information about these terms and concepts, see also this -`architectural overview`_ of ``pkg_resources`` and Python Eggs in general.) - -.. _architectural overview: http://mail.python.org/pipermail/distutils-sig/2005-June/004652.html - - -.. ----------------- -.. Developer's Guide -.. ----------------- - -.. This section isn't written yet. Currently planned topics include - Accessing Resources - Finding and Activating Package Distributions - get_provider() - require() - WorkingSet - iter_distributions - Running Scripts - Configuration - Namespace Packages - Extensible Applications and Frameworks - Locating entry points - Activation listeners - Metadata access - Extended Discovery and Installation - Supporting Custom PEP 302 Implementations -.. For now, please check out the extensive `API Reference`_ below. - - -------------- -API Reference -------------- - -Namespace Package Support -========================= - -A namespace package is a package that only contains other packages and modules, -with no direct contents of its own. Such packages can be split across -multiple, separately-packaged distributions. Normally, you do not need to use -the namespace package APIs directly; instead you should supply the -``namespace_packages`` argument to ``setup()`` in your project's ``setup.py``. -See the `setuptools documentation on namespace packages`_ for more information. - -However, if for some reason you need to manipulate namespace packages or -directly alter ``sys.path`` at runtime, you may find these APIs useful: - -``declare_namespace(name)`` - Declare that the dotted package name `name` is a "namespace package" whose - contained packages and modules may be spread across multiple distributions. - The named package's ``__path__`` will be extended to include the - corresponding package in all distributions on ``sys.path`` that contain a - package of that name. (More precisely, if an importer's - ``find_module(name)`` returns a loader, then it will also be searched for - the package's contents.) Whenever a Distribution's ``activate()`` method - is invoked, it checks for the presence of namespace packages and updates - their ``__path__`` contents accordingly. - -Applications that manipulate namespace packages or directly alter ``sys.path`` -at runtime may also need to use this API function: - -``fixup_namespace_packages(path_item)`` - Declare that `path_item` is a newly added item on ``sys.path`` that may - need to be used to update existing namespace packages. Ordinarily, this is - called for you when an egg is automatically added to ``sys.path``, but if - your application modifies ``sys.path`` to include locations that may - contain portions of a namespace package, you will need to call this - function to ensure they are added to the existing namespace packages. - -Although by default ``pkg_resources`` only supports namespace packages for -filesystem and zip importers, you can extend its support to other "importers" -compatible with PEP 302 using the ``register_namespace_handler()`` function. -See the section below on `Supporting Custom Importers`_ for details. - -.. _setuptools documentation on namespace packages: http://peak.telecommunity.com/DevCenter/setuptools#namespace-packages - - -``WorkingSet`` Objects -====================== - -The ``WorkingSet`` class provides access to a collection of "active" -distributions. In general, there is only one meaningful ``WorkingSet`` -instance: the one that represents the distributions that are currently active -on ``sys.path``. This global instance is available under the name -``working_set`` in the ``pkg_resources`` module. However, specialized -tools may wish to manipulate working sets that don't correspond to -``sys.path``, and therefore may wish to create other ``WorkingSet`` instances. - -It's important to note that the global ``working_set`` object is initialized -from ``sys.path`` when ``pkg_resources`` is first imported, but is only updated -if you do all future ``sys.path`` manipulation via ``pkg_resources`` APIs. If -you manually modify ``sys.path``, you must invoke the appropriate methods on -the ``working_set`` instance to keep it in sync. Unfortunately, Python does -not provide any way to detect arbitrary changes to a list object like -``sys.path``, so ``pkg_resources`` cannot automatically update the -``working_set`` based on changes to ``sys.path``. - -``WorkingSet(entries=None)`` - Create a ``WorkingSet`` from an iterable of path entries. If `entries` - is not supplied, it defaults to the value of ``sys.path`` at the time - the constructor is called. - - Note that you will not normally construct ``WorkingSet`` instances - yourself, but instead you will implicitly or explicitly use the global - ``working_set`` instance. For the most part, the ``pkg_resources`` API - is designed so that the ``working_set`` is used by default, such that you - don't have to explicitly refer to it most of the time. - -All distributions available directly on ``sys.path`` will be activated -automatically when ``pkg_resources`` is imported. This behaviour can cause -version conflicts for applications which require non-default versions of -those distributions. To handle this situation, ``pkg_resources`` checks for a -``__requires__`` attribute in the ``__main__`` module when initializing the -default working set, and uses this to ensure a suitable version of each -affected distribution is activated. For example:: - - __requires__ = ["CherryPy < 3"] # Must be set before pkg_resources import - import pkg_resources - - -Basic ``WorkingSet`` Methods ----------------------------- - -The following methods of ``WorkingSet`` objects are also available as module- -level functions in ``pkg_resources`` that apply to the default ``working_set`` -instance. Thus, you can use e.g. ``pkg_resources.require()`` as an -abbreviation for ``pkg_resources.working_set.require()``: - - -``require(*requirements)`` - Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - - For the syntax of requirement specifiers, see the section below on - `Requirements Parsing`_. - - In general, it should not be necessary for you to call this method - directly. It's intended more for use in quick-and-dirty scripting and - interactive interpreter hacking than for production use. If you're creating - an actual library or application, it's strongly recommended that you create - a "setup.py" script using ``setuptools``, and declare all your requirements - there. That way, tools like EasyInstall can automatically detect what - requirements your package has, and deal with them accordingly. - - Note that calling ``require('SomePackage')`` will not install - ``SomePackage`` if it isn't already present. If you need to do this, you - should use the ``resolve()`` method instead, which allows you to pass an - ``installer`` callback that will be invoked when a needed distribution - can't be found on the local machine. You can then have this callback - display a dialog, automatically download the needed distribution, or - whatever else is appropriate for your application. See the documentation - below on the ``resolve()`` method for more information, and also on the - ``obtain()`` method of ``Environment`` objects. - -``run_script(requires, script_name)`` - Locate distribution specified by `requires` and run its `script_name` - script. `requires` must be a string containing a requirement specifier. - (See `Requirements Parsing`_ below for the syntax.) - - The script, if found, will be executed in *the caller's globals*. That's - because this method is intended to be called from wrapper scripts that - act as a proxy for the "real" scripts in a distribution. A wrapper script - usually doesn't need to do anything but invoke this function with the - correct arguments. - - If you need more control over the script execution environment, you - probably want to use the ``run_script()`` method of a ``Distribution`` - object's `Metadata API`_ instead. - -``iter_entry_points(group, name=None)`` - Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching both - `group` and `name` are yielded. Entry points are yielded from the active - distributions in the order that the distributions appear in the working - set. (For the global ``working_set``, this should be the same as the order - that they are listed in ``sys.path``.) Note that within the entry points - advertised by an individual distribution, there is no particular ordering. - - Please see the section below on `Entry Points`_ for more information. - - -``WorkingSet`` Methods and Attributes -------------------------------------- - -These methods are used to query or manipulate the contents of a specific -working set, so they must be explicitly invoked on a particular ``WorkingSet`` -instance: - -``add_entry(entry)`` - Add a path item to the ``entries``, finding any distributions on it. You - should use this when you add additional items to ``sys.path`` and you want - the global ``working_set`` to reflect the change. This method is also - called by the ``WorkingSet()`` constructor during initialization. - - This method uses ``find_distributions(entry,True)`` to find distributions - corresponding to the path entry, and then ``add()`` them. `entry` is - always appended to the ``entries`` attribute, even if it is already - present, however. (This is because ``sys.path`` can contain the same value - more than once, and the ``entries`` attribute should be able to reflect - this.) - -``__contains__(dist)`` - True if `dist` is active in this ``WorkingSet``. Note that only one - distribution for a given project can be active in a given ``WorkingSet``. - -``__iter__()`` - Yield distributions for non-duplicate projects in the working set. - The yield order is the order in which the items' path entries were - added to the working set. - -``find(req)`` - Find a distribution matching `req` (a ``Requirement`` instance). - If there is an active distribution for the requested project, this - returns it, as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - -``resolve(requirements, env=None, installer=None)`` - List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, an ``Environment`` is created from the working set's - ``entries``. `installer`, if supplied, will be invoked with each - requirement that cannot be met by an already-installed distribution; it - should return a ``Distribution`` or ``None``. (See the ``obtain()`` method - of `Environment Objects`_, below, for more information on the `installer` - argument.) - -``add(dist, entry=None)`` - Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to ``dist.location``. On exit from - this routine, `entry` is added to the end of the working set's ``.entries`` - (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution active in the set. If it's - successfully added, any callbacks registered with the ``subscribe()`` - method will be called. (See `Receiving Change Notifications`_, below.) - - Note: ``add()`` is automatically called for you by the ``require()`` - method, so you don't normally need to use this method directly. - -``entries`` - This attribute represents a "shadow" ``sys.path``, primarily useful for - debugging. If you are experiencing import problems, you should check - the global ``working_set`` object's ``entries`` against ``sys.path``, to - ensure that they match. If they do not, then some part of your program - is manipulating ``sys.path`` without updating the ``working_set`` - accordingly. IMPORTANT NOTE: do not directly manipulate this attribute! - Setting it equal to ``sys.path`` will not fix your problem, any more than - putting black tape over an "engine warning" light will fix your car! If - this attribute is out of sync with ``sys.path``, it's merely an *indicator* - of the problem, not the cause of it. - - -Receiving Change Notifications ------------------------------- - -Extensible applications and frameworks may need to receive notification when -a new distribution (such as a plug-in component) has been added to a working -set. This is what the ``subscribe()`` method and ``add_activation_listener()`` -function are for. - -``subscribe(callback)`` - Invoke ``callback(distribution)`` once for each active distribution that is - in the set now, or gets added later. Because the callback is invoked for - already-active distributions, you do not need to loop over the working set - yourself to deal with the existing items; just register the callback and - be prepared for the fact that it will be called immediately by this method. - - Note that callbacks *must not* allow exceptions to propagate, or they will - interfere with the operation of other callbacks and possibly result in an - inconsistent working set state. Callbacks should use a try/except block - to ignore, log, or otherwise process any errors, especially since the code - that caused the callback to be invoked is unlikely to be able to handle - the errors any better than the callback itself. - -``pkg_resources.add_activation_listener()`` is an alternate spelling of -``pkg_resources.working_set.subscribe()``. - - -Locating Plugins ----------------- - -Extensible applications will sometimes have a "plugin directory" or a set of -plugin directories, from which they want to load entry points or other -metadata. The ``find_plugins()`` method allows you to do this, by scanning an -environment for the newest version of each project that can be safely loaded -without conflicts or missing requirements. - -``find_plugins(plugin_env, full_env=None, fallback=True)`` - Scan `plugin_env` and identify which distributions could be added to this - working set without version conflicts or missing requirements. - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print "Couldn't load", errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains only - distributions that are in the project's "plugin directory" or directories. - The `full_env`, if supplied, should be an ``Environment`` instance that - contains all currently-available distributions. - - If `full_env` is not supplied, one is created automatically from the - ``WorkingSet`` this method is called on, which will typically mean that - every directory on ``sys.path`` will be scanned for distributions. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` that - were loadable, along with any other distributions that are needed to resolve - their dependencies. `error_info` is a dictionary mapping unloadable plugin - distributions to an exception instance describing the error that occurred. - Usually this will be a ``DistributionNotFound`` or ``VersionConflict`` - instance. - - Most applications will use this method mainly on the master ``working_set`` - instance in ``pkg_resources``, and then immediately add the returned - distributions to the working set so that they are available on sys.path. - This will make it possible to find any entry points, and allow any other - metadata tracking and hooks to be activated. - - The resolution algorithm used by ``find_plugins()`` is as follows. First, - the project names of the distributions present in `plugin_env` are sorted. - Then, each project's eggs are tried in descending version order (i.e., - newest version first). - - An attempt is made to resolve each egg's dependencies. If the attempt is - successful, the egg and its dependencies are added to the output list and to - a temporary copy of the working set. The resolution process continues with - the next project name, and no older eggs for that project are tried. - - If the resolution attempt fails, however, the error is added to the error - dictionary. If the `fallback` flag is true, the next older version of the - plugin is tried, until a working version is found. If false, the resolution - process continues with the next plugin project name. - - Some applications may have stricter fallback requirements than others. For - example, an application that has a database schema or persistent objects - may not be able to safely downgrade a version of a package. Others may want - to ensure that a new plugin configuration is either 100% good or else - revert to a known-good configuration. (That is, they may wish to revert to - a known configuration if the `error_info` return value is non-empty.) - - Note that this algorithm gives precedence to satisfying the dependencies of - alphabetically prior project names in case of version conflicts. If two - projects named "AaronsPlugin" and "ZekesPlugin" both need different versions - of "TomsLibrary", then "AaronsPlugin" will win and "ZekesPlugin" will be - disabled due to version conflict. - - -``Environment`` Objects -======================= - -An "environment" is a collection of ``Distribution`` objects, usually ones -that are present and potentially importable on the current platform. -``Environment`` objects are used by ``pkg_resources`` to index available -distributions during dependency resolution. - -``Environment(search_path=None, platform=get_supported_platform(), python=PY_MAJOR)`` - Create an environment snapshot by scanning `search_path` for distributions - compatible with `platform` and `python`. `search_path` should be a - sequence of strings such as might be used on ``sys.path``. If a - `search_path` isn't supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'2.4'``); - it defaults to the currently-running version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to include *all* distributions, not just those compatible with the - running platform or Python version. - - Note that `search_path` is scanned immediately for distributions, and the - resulting ``Environment`` is a snapshot of the found distributions. It - is not automatically updated if the system's state changes due to e.g. - installation or removal of distributions. - -``__getitem__(project_name)`` - Returns a list of distributions for the given project name, ordered - from newest to oldest version. (And highest to lowest format precedence - for distributions that contain the same version of the project.) If there - are no distributions for the project, returns an empty list. - -``__iter__()`` - Yield the unique project names of the distributions in this environment. - The yielded names are always in lower case. - -``add(dist)`` - Add `dist` to the environment if it matches the platform and python version - specified at creation time, and only if the distribution hasn't already - been added. (i.e., adding the same distribution more than once is a no-op.) - -``remove(dist)`` - Remove `dist` from the environment. - -``can_add(dist)`` - Is distribution `dist` acceptable for this environment? If it's not - compatible with the ``platform`` and ``python`` version values specified - when the environment was created, a false value is returned. - -``__add__(dist_or_env)`` (``+`` operator) - Add a distribution or environment to an ``Environment`` instance, returning - a *new* environment object that contains all the distributions previously - contained by both. The new environment will have a ``platform`` and - ``python`` of ``None``, meaning that it will not reject any distributions - from being added to it; it will simply accept whatever is added. If you - want the added items to be filtered for platform and Python version, or - you want to add them to the *same* environment instance, you should use - in-place addition (``+=``) instead. - -``__iadd__(dist_or_env)`` (``+=`` operator) - Add a distribution or environment to an ``Environment`` instance - *in-place*, updating the existing instance and returning it. The - ``platform`` and ``python`` filter attributes take effect, so distributions - in the source that do not have a suitable platform string or Python version - are silently ignored. - -``best_match(req, working_set, installer=None)`` - Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution isn't - active, this method returns the newest distribution in the environment - that meets the ``Requirement`` in `req`. If no suitable distribution is - found, and `installer` is supplied, then the result of calling - the environment's ``obtain(req, installer)`` method will be returned. - -``obtain(requirement, installer=None)`` - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument. - -``scan(search_path=None)`` - Scan `search_path` for distributions usable on `platform` - - Any distributions found are added to the environment. `search_path` should - be a sequence of strings such as might be used on ``sys.path``. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. This - method is a shortcut for using the ``find_distributions()`` function to - find the distributions from each item in `search_path`, and then calling - ``add()`` to add each one to the environment. - - -``Requirement`` Objects -======================= - -``Requirement`` objects express what versions of a project are suitable for -some purpose. These objects (or their string form) are used by various -``pkg_resources`` APIs in order to find distributions that a script or -distribution needs. - - -Requirements Parsing --------------------- - -``parse_requirements(s)`` - Yield ``Requirement`` objects for a string or iterable of lines. Each - requirement must start on a new line. See below for syntax. - -``Requirement.parse(s)`` - Create a ``Requirement`` object from a string or iterable of lines. A - ``ValueError`` is raised if the string or lines do not contain a valid - requirement specifier, or if they contain more than one specifier. (To - parse multiple specifiers from a string or iterable of strings, use - ``parse_requirements()`` instead.) - - The syntax of a requirement specifier can be defined in EBNF as follows:: - - requirement ::= project_name versionspec? extras? - versionspec ::= comparison version (',' comparison version)* - comparison ::= '<' | '<=' | '!=' | '==' | '>=' | '>' - extras ::= '[' extralist? ']' - extralist ::= identifier (',' identifier)* - project_name ::= identifier - identifier ::= [-A-Za-z0-9_]+ - version ::= [-A-Za-z0-9_.]+ - - Tokens can be separated by whitespace, and a requirement can be continued - over multiple lines using a backslash (``\\``). Line-end comments (using - ``#``) are also allowed. - - Some examples of valid requirement specifiers:: - - FooProject >= 1.2 - Fizzy [foo, bar] - PickyThing<1.6,>1.9,!=1.9.6,<2.0a0,==2.4c1 - SomethingWhoseVersionIDontCareAbout - - The project name is the only required portion of a requirement string, and - if it's the only thing supplied, the requirement will accept any version - of that project. - - The "extras" in a requirement are used to request optional features of a - project, that may require additional project distributions in order to - function. For example, if the hypothetical "Report-O-Rama" project offered - optional PDF support, it might require an additional library in order to - provide that support. Thus, a project needing Report-O-Rama's PDF features - could use a requirement of ``Report-O-Rama[PDF]`` to request installation - or activation of both Report-O-Rama and any libraries it needs in order to - provide PDF support. For example, you could use:: - - easy_install.py Report-O-Rama[PDF] - - To install the necessary packages using the EasyInstall program, or call - ``pkg_resources.require('Report-O-Rama[PDF]')`` to add the necessary - distributions to sys.path at runtime. - - -``Requirement`` Methods and Attributes --------------------------------------- - -``__contains__(dist_or_version)`` - Return true if `dist_or_version` fits the criteria for this requirement. - If `dist_or_version` is a ``Distribution`` object, its project name must - match the requirement's project name, and its version must meet the - requirement's version criteria. If `dist_or_version` is a string, it is - parsed using the ``parse_version()`` utility function. Otherwise, it is - assumed to be an already-parsed version. - - The ``Requirement`` object's version specifiers (``.specs``) are internally - sorted into ascending version order, and used to establish what ranges of - versions are acceptable. Adjacent redundant conditions are effectively - consolidated (e.g. ``">1, >2"`` produces the same results as ``">1"``, and - ``"<2,<3"`` produces the same results as``"<3"``). ``"!="`` versions are - excised from the ranges they fall within. The version being tested for - acceptability is then checked for membership in the resulting ranges. - (Note that providing conflicting conditions for the same version (e.g. - ``"<2,>=2"`` or ``"==2,!=2"``) is meaningless and may therefore produce - bizarre results when compared with actual version number(s).) - -``__eq__(other_requirement)`` - A requirement compares equal to another requirement if they have - case-insensitively equal project names, version specifiers, and "extras". - (The order that extras and version specifiers are in is also ignored.) - Equal requirements also have equal hashes, so that requirements can be - used in sets or as dictionary keys. - -``__str__()`` - The string form of a ``Requirement`` is a string that, if passed to - ``Requirement.parse()``, would return an equal ``Requirement`` object. - -``project_name`` - The name of the required project - -``key`` - An all-lowercase version of the ``project_name``, useful for comparison - or indexing. - -``extras`` - A tuple of names of "extras" that this requirement calls for. (These will - be all-lowercase and normalized using the ``safe_extra()`` parsing utility - function, so they may not exactly equal the extras the requirement was - created with.) - -``specs`` - A list of ``(op,version)`` tuples, sorted in ascending parsed-version - order. The `op` in each tuple is a comparison operator, represented as - a string. The `version` is the (unparsed) version number. The relative - order of tuples containing the same version numbers is undefined, since - having more than one operator for a given version is either redundant or - self-contradictory. - - -Entry Points -============ - -Entry points are a simple way for distributions to "advertise" Python objects -(such as functions or classes) for use by other distributions. Extensible -applications and frameworks can search for entry points with a particular name -or group, either from a specific distribution or from all active distributions -on sys.path, and then inspect or load the advertised objects at will. - -Entry points belong to "groups" which are named with a dotted name similar to -a Python package or module name. For example, the ``setuptools`` package uses -an entry point named ``distutils.commands`` in order to find commands defined -by distutils extensions. ``setuptools`` treats the names of entry points -defined in that group as the acceptable commands for a setup script. - -In a similar way, other packages can define their own entry point groups, -either using dynamic names within the group (like ``distutils.commands``), or -possibly using predefined names within the group. For example, a blogging -framework that offers various pre- or post-publishing hooks might define an -entry point group and look for entry points named "pre_process" and -"post_process" within that group. - -To advertise an entry point, a project needs to use ``setuptools`` and provide -an ``entry_points`` argument to ``setup()`` in its setup script, so that the -entry points will be included in the distribution's metadata. For more -details, see the ``setuptools`` documentation. (XXX link here to setuptools) - -Each project distribution can advertise at most one entry point of a given -name within the same entry point group. For example, a distutils extension -could advertise two different ``distutils.commands`` entry points, as long as -they had different names. However, there is nothing that prevents *different* -projects from advertising entry points of the same name in the same group. In -some cases, this is a desirable thing, since the application or framework that -uses the entry points may be calling them as hooks, or in some other way -combining them. It is up to the application or framework to decide what to do -if multiple distributions advertise an entry point; some possibilities include -using both entry points, displaying an error message, using the first one found -in sys.path order, etc. - - -Convenience API ---------------- - -In the following functions, the `dist` argument can be a ``Distribution`` -instance, a ``Requirement`` instance, or a string specifying a requirement -(i.e. project name, version, etc.). If the argument is a string or -``Requirement``, the specified distribution is located (and added to sys.path -if not already present). An error will be raised if a matching distribution is -not available. - -The `group` argument should be a string containing a dotted identifier, -identifying an entry point group. If you are defining an entry point group, -you should include some portion of your package's name in the group name so as -to avoid collision with other packages' entry point groups. - -``load_entry_point(dist, group, name)`` - Load the named entry point from the specified distribution, or raise - ``ImportError``. - -``get_entry_info(dist, group, name)`` - Return an ``EntryPoint`` object for the given `group` and `name` from - the specified distribution. Returns ``None`` if the distribution has not - advertised a matching entry point. - -``get_entry_map(dist, group=None)`` - Return the distribution's entry point map for `group`, or the full entry - map for the distribution. This function always returns a dictionary, - even if the distribution advertises no entry points. If `group` is given, - the dictionary maps entry point names to the corresponding ``EntryPoint`` - object. If `group` is None, the dictionary maps group names to - dictionaries that then map entry point names to the corresponding - ``EntryPoint`` instance in that group. - -``iter_entry_points(group, name=None)`` - Yield entry point objects from `group` matching `name`. - - If `name` is None, yields all entry points in `group` from all - distributions in the working set on sys.path, otherwise only ones matching - both `group` and `name` are yielded. Entry points are yielded from - the active distributions in the order that the distributions appear on - sys.path. (Within entry points for a particular distribution, however, - there is no particular ordering.) - - (This API is actually a method of the global ``working_set`` object; see - the section above on `Basic WorkingSet Methods`_ for more information.) - - -Creating and Parsing --------------------- - -``EntryPoint(name, module_name, attrs=(), extras=(), dist=None)`` - Create an ``EntryPoint`` instance. `name` is the entry point name. The - `module_name` is the (dotted) name of the module containing the advertised - object. `attrs` is an optional tuple of names to look up from the - module to obtain the advertised object. For example, an `attrs` of - ``("foo","bar")`` and a `module_name` of ``"baz"`` would mean that the - advertised object could be obtained by the following code:: - - import baz - advertised_object = baz.foo.bar - - The `extras` are an optional tuple of "extra feature" names that the - distribution needs in order to provide this entry point. When the - entry point is loaded, these extra features are looked up in the `dist` - argument to find out what other distributions may need to be activated - on sys.path; see the ``load()`` method for more details. The `extras` - argument is only meaningful if `dist` is specified. `dist` must be - a ``Distribution`` instance. - -``EntryPoint.parse(src, dist=None)`` (classmethod) - Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional, as is the whitespace shown between - some of the items. The `dist` argument is passed through to the - ``EntryPoint()`` constructor, along with the other values parsed from - `src`. - -``EntryPoint.parse_group(group, lines, dist=None)`` (classmethod) - Parse `lines` (a string or sequence of lines) to create a dictionary - mapping entry point names to ``EntryPoint`` objects. ``ValueError`` is - raised if entry point names are duplicated, if `group` is not a valid - entry point group name, or if there are any syntax errors. (Note: the - `group` parameter is used only for validation and to create more - informative error messages.) If `dist` is provided, it will be used to - set the ``dist`` attribute of the created ``EntryPoint`` objects. - -``EntryPoint.parse_map(data, dist=None)`` (classmethod) - Parse `data` into a dictionary mapping group names to dictionaries mapping - entry point names to ``EntryPoint`` objects. If `data` is a dictionary, - then the keys are used as group names and the values are passed to - ``parse_group()`` as the `lines` argument. If `data` is a string or - sequence of lines, it is first split into .ini-style sections (using - the ``split_sections()`` utility function) and the section names are used - as group names. In either case, the `dist` argument is passed through to - ``parse_group()`` so that the entry points will be linked to the specified - distribution. - - -``EntryPoint`` Objects ----------------------- - -For simple introspection, ``EntryPoint`` objects have attributes that -correspond exactly to the constructor argument names: ``name``, -``module_name``, ``attrs``, ``extras``, and ``dist`` are all available. In -addition, the following methods are provided: - -``load(require=True, env=None, installer=None)`` - Load the entry point, returning the advertised Python object, or raise - ``ImportError`` if it cannot be obtained. If `require` is a true value, - then ``require(env, installer)`` is called before attempting the import. - -``require(env=None, installer=None)`` - Ensure that any "extras" needed by the entry point are available on - sys.path. ``UnknownExtra`` is raised if the ``EntryPoint`` has ``extras``, - but no ``dist``, or if the named extras are not defined by the - distribution. If `env` is supplied, it must be an ``Environment``, and it - will be used to search for needed distributions if they are not already - present on sys.path. If `installer` is supplied, it must be a callable - taking a ``Requirement`` instance and returning a matching importable - ``Distribution`` instance or None. - -``__str__()`` - The string form of an ``EntryPoint`` is a string that could be passed to - ``EntryPoint.parse()`` to produce an equivalent ``EntryPoint``. - - -``Distribution`` Objects -======================== - -``Distribution`` objects represent collections of Python code that may or may -not be importable, and may or may not have metadata and resources associated -with them. Their metadata may include information such as what other projects -the distribution depends on, what entry points the distribution advertises, and -so on. - - -Getting or Creating Distributions ---------------------------------- - -Most commonly, you'll obtain ``Distribution`` objects from a ``WorkingSet`` or -an ``Environment``. (See the sections above on `WorkingSet Objects`_ and -`Environment Objects`_, which are containers for active distributions and -available distributions, respectively.) You can also obtain ``Distribution`` -objects from one of these high-level APIs: - -``find_distributions(path_item, only=False)`` - Yield distributions accessible via `path_item`. If `only` is true, yield - only distributions whose ``location`` is equal to `path_item`. In other - words, if `only` is true, this yields any distributions that would be - importable if `path_item` were on ``sys.path``. If `only` is false, this - also yields distributions that are "in" or "under" `path_item`, but would - not be importable unless their locations were also added to ``sys.path``. - -``get_distribution(dist_spec)`` - Return a ``Distribution`` object for a given ``Requirement`` or string. - If `dist_spec` is already a ``Distribution`` instance, it is returned. - If it is a ``Requirement`` object or a string that can be parsed into one, - it is used to locate and activate a matching distribution, which is then - returned. - -However, if you're creating specialized tools for working with distributions, -or creating a new distribution format, you may also need to create -``Distribution`` objects directly, using one of the three constructors below. - -These constructors all take an optional `metadata` argument, which is used to -access any resources or metadata associated with the distribution. `metadata` -must be an object that implements the ``IResourceProvider`` interface, or None. -If it is None, an ``EmptyProvider`` is used instead. ``Distribution`` objects -implement both the `IResourceProvider`_ and `IMetadataProvider Methods`_ by -delegating them to the `metadata` object. - -``Distribution.from_location(location, basename, metadata=None, **kw)`` (classmethod) - Create a distribution for `location`, which must be a string such as a - URL, filename, or other string that might be used on ``sys.path``. - `basename` is a string naming the distribution, like ``Foo-1.2-py2.4.egg``. - If `basename` ends with ``.egg``, then the project's name, version, python - version and platform are extracted from the filename and used to set those - properties of the created distribution. Any additional keyword arguments - are forwarded to the ``Distribution()`` constructor. - -``Distribution.from_filename(filename, metadata=None**kw)`` (classmethod) - Create a distribution by parsing a local filename. This is a shorter way - of saying ``Distribution.from_location(normalize_path(filename), - os.path.basename(filename), metadata)``. In other words, it creates a - distribution whose location is the normalize form of the filename, parsing - name and version information from the base portion of the filename. Any - additional keyword arguments are forwarded to the ``Distribution()`` - constructor. - -``Distribution(location,metadata,project_name,version,py_version,platform,precedence)`` - Create a distribution by setting its properties. All arguments are - optional and default to None, except for `py_version` (which defaults to - the current Python version) and `precedence` (which defaults to - ``EGG_DIST``; for more details see ``precedence`` under `Distribution - Attributes`_ below). Note that it's usually easier to use the - ``from_filename()`` or ``from_location()`` constructors than to specify - all these arguments individually. - - -``Distribution`` Attributes ---------------------------- - -location - A string indicating the distribution's location. For an importable - distribution, this is the string that would be added to ``sys.path`` to - make it actively importable. For non-importable distributions, this is - simply a filename, URL, or other way of locating the distribution. - -project_name - A string, naming the project that this distribution is for. Project names - are defined by a project's setup script, and they are used to identify - projects on PyPI. When a ``Distribution`` is constructed, the - `project_name` argument is passed through the ``safe_name()`` utility - function to filter out any unacceptable characters. - -key - ``dist.key`` is short for ``dist.project_name.lower()``. It's used for - case-insensitive comparison and indexing of distributions by project name. - -extras - A list of strings, giving the names of extra features defined by the - project's dependency list (the ``extras_require`` argument specified in - the project's setup script). - -version - A string denoting what release of the project this distribution contains. - When a ``Distribution`` is constructed, the `version` argument is passed - through the ``safe_version()`` utility function to filter out any - unacceptable characters. If no `version` is specified at construction - time, then attempting to access this attribute later will cause the - ``Distribution`` to try to discover its version by reading its ``PKG-INFO`` - metadata file. If ``PKG-INFO`` is unavailable or can't be parsed, - ``ValueError`` is raised. - -parsed_version - The ``parsed_version`` is a tuple representing a "parsed" form of the - distribution's ``version``. ``dist.parsed_version`` is a shortcut for - calling ``parse_version(dist.version)``. It is used to compare or sort - distributions by version. (See the `Parsing Utilities`_ section below for - more information on the ``parse_version()`` function.) Note that accessing - ``parsed_version`` may result in a ``ValueError`` if the ``Distribution`` - was constructed without a `version` and without `metadata` capable of - supplying the missing version info. - -py_version - The major/minor Python version the distribution supports, as a string. - For example, "2.7" or "3.4". The default is the current version of Python. - -platform - A string representing the platform the distribution is intended for, or - ``None`` if the distribution is "pure Python" and therefore cross-platform. - See `Platform Utilities`_ below for more information on platform strings. - -precedence - A distribution's ``precedence`` is used to determine the relative order of - two distributions that have the same ``project_name`` and - ``parsed_version``. The default precedence is ``pkg_resources.EGG_DIST``, - which is the highest (i.e. most preferred) precedence. The full list - of predefined precedences, from most preferred to least preferred, is: - ``EGG_DIST``, ``BINARY_DIST``, ``SOURCE_DIST``, ``CHECKOUT_DIST``, and - ``DEVELOP_DIST``. Normally, precedences other than ``EGG_DIST`` are used - only by the ``setuptools.package_index`` module, when sorting distributions - found in a package index to determine their suitability for installation. - "System" and "Development" eggs (i.e., ones that use the ``.egg-info`` - format), however, are automatically given a precedence of ``DEVELOP_DIST``. - - - -``Distribution`` Methods ------------------------- - -``activate(path=None)`` - Ensure distribution is importable on `path`. If `path` is None, - ``sys.path`` is used instead. This ensures that the distribution's - ``location`` is in the `path` list, and it also performs any necessary - namespace package fixups or declarations. (That is, if the distribution - contains namespace packages, this method ensures that they are declared, - and that the distribution's contents for those namespace packages are - merged with the contents provided by any other active distributions. See - the section above on `Namespace Package Support`_ for more information.) - - ``pkg_resources`` adds a notification callback to the global ``working_set`` - that ensures this method is called whenever a distribution is added to it. - Therefore, you should not normally need to explicitly call this method. - (Note that this means that namespace packages on ``sys.path`` are always - imported as soon as ``pkg_resources`` is, which is another reason why - namespace packages should not contain any code or import statements.) - -``as_requirement()`` - Return a ``Requirement`` instance that matches this distribution's project - name and version. - -``requires(extras=())`` - List the ``Requirement`` objects that specify this distribution's - dependencies. If `extras` is specified, it should be a sequence of names - of "extras" defined by the distribution, and the list returned will then - include any dependencies needed to support the named "extras". - -``clone(**kw)`` - Create a copy of the distribution. Any supplied keyword arguments override - the corresponding argument to the ``Distribution()`` constructor, allowing - you to change some of the copied distribution's attributes. - -``egg_name()`` - Return what this distribution's standard filename should be, not including - the ".egg" extension. For example, a distribution for project "Foo" - version 1.2 that runs on Python 2.3 for Windows would have an ``egg_name()`` - of ``Foo-1.2-py2.3-win32``. Any dashes in the name or version are - converted to underscores. (``Distribution.from_location()`` will convert - them back when parsing a ".egg" file name.) - -``__cmp__(other)``, ``__hash__()`` - Distribution objects are hashed and compared on the basis of their parsed - version and precedence, followed by their key (lowercase project name), - location, Python version, and platform. - -The following methods are used to access ``EntryPoint`` objects advertised -by the distribution. See the section above on `Entry Points`_ for more -detailed information about these operations: - -``get_entry_info(group, name)`` - Return the ``EntryPoint`` object for `group` and `name`, or None if no - such point is advertised by this distribution. - -``get_entry_map(group=None)`` - Return the entry point map for `group`. If `group` is None, return - a dictionary mapping group names to entry point maps for all groups. - (An entry point map is a dictionary of entry point names to ``EntryPoint`` - objects.) - -``load_entry_point(group, name)`` - Short for ``get_entry_info(group, name).load()``. Returns the object - advertised by the named entry point, or raises ``ImportError`` if - the entry point isn't advertised by this distribution, or there is some - other import problem. - -In addition to the above methods, ``Distribution`` objects also implement all -of the `IResourceProvider`_ and `IMetadataProvider Methods`_ (which are -documented in later sections): - -* ``has_metadata(name)`` -* ``metadata_isdir(name)`` -* ``metadata_listdir(name)`` -* ``get_metadata(name)`` -* ``get_metadata_lines(name)`` -* ``run_script(script_name, namespace)`` -* ``get_resource_filename(manager, resource_name)`` -* ``get_resource_stream(manager, resource_name)`` -* ``get_resource_string(manager, resource_name)`` -* ``has_resource(resource_name)`` -* ``resource_isdir(resource_name)`` -* ``resource_listdir(resource_name)`` - -If the distribution was created with a `metadata` argument, these resource and -metadata access methods are all delegated to that `metadata` provider. -Otherwise, they are delegated to an ``EmptyProvider``, so that the distribution -will appear to have no resources or metadata. This delegation approach is used -so that supporting custom importers or new distribution formats can be done -simply by creating an appropriate `IResourceProvider`_ implementation; see the -section below on `Supporting Custom Importers`_ for more details. - - -``ResourceManager`` API -======================= - -The ``ResourceManager`` class provides uniform access to package resources, -whether those resources exist as files and directories or are compressed in -an archive of some kind. - -Normally, you do not need to create or explicitly manage ``ResourceManager`` -instances, as the ``pkg_resources`` module creates a global instance for you, -and makes most of its methods available as top-level names in the -``pkg_resources`` module namespace. So, for example, this code actually -calls the ``resource_string()`` method of the global ``ResourceManager``:: - - import pkg_resources - my_data = pkg_resources.resource_string(__name__, "foo.dat") - -Thus, you can use the APIs below without needing an explicit -``ResourceManager`` instance; just import and use them as needed. - - -Basic Resource Access ---------------------- - -In the following methods, the `package_or_requirement` argument may be either -a Python package/module name (e.g. ``foo.bar``) or a ``Requirement`` instance. -If it is a package or module name, the named module or package must be -importable (i.e., be in a distribution or directory on ``sys.path``), and the -`resource_name` argument is interpreted relative to the named package. (Note -that if a module name is used, then the resource name is relative to the -package immediately containing the named module. Also, you should not use use -a namespace package name, because a namespace package can be spread across -multiple distributions, and is therefore ambiguous as to which distribution -should be searched for the resource.) - -If it is a ``Requirement``, then the requirement is automatically resolved -(searching the current ``Environment`` if necessary) and a matching -distribution is added to the ``WorkingSet`` and ``sys.path`` if one was not -already present. (Unless the ``Requirement`` can't be satisfied, in which -case an exception is raised.) The `resource_name` argument is then interpreted -relative to the root of the identified distribution; i.e. its first path -segment will be treated as a peer of the top-level modules or packages in the -distribution. - -Note that resource names must be ``/``-separated paths and cannot be absolute -(i.e. no leading ``/``) or contain relative names like ``".."``. Do *not* use -``os.path`` routines to manipulate resource paths, as they are *not* filesystem -paths. - -``resource_exists(package_or_requirement, resource_name)`` - Does the named resource exist? Return ``True`` or ``False`` accordingly. - -``resource_stream(package_or_requirement, resource_name)`` - Return a readable file-like object for the specified resource; it may be - an actual file, a ``StringIO``, or some similar object. The stream is - in "binary mode", in the sense that whatever bytes are in the resource - will be read as-is. - -``resource_string(package_or_requirement, resource_name)`` - Return the specified resource as a string. The resource is read in - binary fashion, such that the returned string contains exactly the bytes - that are stored in the resource. - -``resource_isdir(package_or_requirement, resource_name)`` - Is the named resource a directory? Return ``True`` or ``False`` - accordingly. - -``resource_listdir(package_or_requirement, resource_name)`` - List the contents of the named resource directory, just like ``os.listdir`` - except that it works even if the resource is in a zipfile. - -Note that only ``resource_exists()`` and ``resource_isdir()`` are insensitive -as to the resource type. You cannot use ``resource_listdir()`` on a file -resource, and you can't use ``resource_string()`` or ``resource_stream()`` on -directory resources. Using an inappropriate method for the resource type may -result in an exception or undefined behavior, depending on the platform and -distribution format involved. - - -Resource Extraction -------------------- - -``resource_filename(package_or_requirement, resource_name)`` - Sometimes, it is not sufficient to access a resource in string or stream - form, and a true filesystem filename is needed. In such cases, you can - use this method (or module-level function) to obtain a filename for a - resource. If the resource is in an archive distribution (such as a zipped - egg), it will be extracted to a cache directory, and the filename within - the cache will be returned. If the named resource is a directory, then - all resources within that directory (including subdirectories) are also - extracted. If the named resource is a C extension or "eager resource" - (see the ``setuptools`` documentation for details), then all C extensions - and eager resources are extracted at the same time. - - Archived resources are extracted to a cache location that can be managed by - the following two methods: - -``set_extraction_path(path)`` - Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which is - based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the resource provider. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. (On - Windows, for example, you can't unlink .pyd or .dll files that are still - in use.) - - Note that you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``. - -``cleanup_resources(force=False)`` - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - - -"Provider" Interface --------------------- - -If you are implementing an ``IResourceProvider`` and/or ``IMetadataProvider`` -for a new distribution archive format, you may need to use the following -``IResourceManager`` methods to co-ordinate extraction of resources to the -filesystem. If you're not implementing an archive format, however, you have -no need to use these methods. Unlike the other methods listed above, they are -*not* available as top-level functions tied to the global ``ResourceManager``; -you must therefore have an explicit ``ResourceManager`` instance to use them. - -``get_cache_path(archive_name, names=())`` - Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - -``extraction_error()`` - Raise an ``ExtractionError`` describing the active exception as interfering - with the extraction process. You should call this if you encounter any - OS errors extracting the file to the cache path; it will format the - operating system exception for you, and add other information to the - ``ExtractionError`` instance that may be needed by programs that want to - wrap or handle extraction errors themselves. - -``postprocess(tempname, filename)`` - Perform any platform-specific postprocessing of `tempname`. - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - - -Metadata API -============ - -The metadata API is used to access metadata resources bundled in a pluggable -distribution. Metadata resources are virtual files or directories containing -information about the distribution, such as might be used by an extensible -application or framework to connect "plugins". Like other kinds of resources, -metadata resource names are ``/``-separated and should not contain ``..`` or -begin with a ``/``. You should not use ``os.path`` routines to manipulate -resource paths. - -The metadata API is provided by objects implementing the ``IMetadataProvider`` -or ``IResourceProvider`` interfaces. ``Distribution`` objects implement this -interface, as do objects returned by the ``get_provider()`` function: - -``get_provider(package_or_requirement)`` - If a package name is supplied, return an ``IResourceProvider`` for the - package. If a ``Requirement`` is supplied, resolve it by returning a - ``Distribution`` from the current working set (searching the current - ``Environment`` if necessary and adding the newly found ``Distribution`` - to the working set). If the named package can't be imported, or the - ``Requirement`` can't be satisfied, an exception is raised. - - NOTE: if you use a package name rather than a ``Requirement``, the object - you get back may not be a pluggable distribution, depending on the method - by which the package was installed. In particular, "development" packages - and "single-version externally-managed" packages do not have any way to - map from a package name to the corresponding project's metadata. Do not - write code that passes a package name to ``get_provider()`` and then tries - to retrieve project metadata from the returned object. It may appear to - work when the named package is in an ``.egg`` file or directory, but - it will fail in other installation scenarios. If you want project - metadata, you need to ask for a *project*, not a package. - - -``IMetadataProvider`` Methods ------------------------------ - -The methods provided by objects (such as ``Distribution`` instances) that -implement the ``IMetadataProvider`` or ``IResourceProvider`` interfaces are: - -``has_metadata(name)`` - Does the named metadata resource exist? - -``metadata_isdir(name)`` - Is the named metadata resource a directory? - -``metadata_listdir(name)`` - List of metadata names in the directory (like ``os.listdir()``) - -``get_metadata(name)`` - Return the named metadata resource as a string. The data is read in binary - mode; i.e., the exact bytes of the resource file are returned. - -``get_metadata_lines(name)`` - Yield named metadata resource as list of non-blank non-comment lines. This - is short for calling ``yield_lines(provider.get_metadata(name))``. See the - section on `yield_lines()`_ below for more information on the syntax it - recognizes. - -``run_script(script_name, namespace)`` - Execute the named script in the supplied namespace dictionary. Raises - ``ResolutionError`` if there is no script by that name in the ``scripts`` - metadata directory. `namespace` should be a Python dictionary, usually - a module dictionary if the script is being run as a module. - - -Exceptions -========== - -``pkg_resources`` provides a simple exception hierarchy for problems that may -occur when processing requests to locate and activate packages:: - - ResolutionError - DistributionNotFound - VersionConflict - UnknownExtra - - ExtractionError - -``ResolutionError`` - This class is used as a base class for the other three exceptions, so that - you can catch all of them with a single "except" clause. It is also raised - directly for miscellaneous requirement-resolution problems like trying to - run a script that doesn't exist in the distribution it was requested from. - -``DistributionNotFound`` - A distribution needed to fulfill a requirement could not be found. - -``VersionConflict`` - The requested version of a project conflicts with an already-activated - version of the same project. - -``UnknownExtra`` - One of the "extras" requested was not recognized by the distribution it - was requested from. - -``ExtractionError`` - A problem occurred extracting a resource to the Python Egg cache. The - following attributes are available on instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - - -Supporting Custom Importers -=========================== - -By default, ``pkg_resources`` supports normal filesystem imports, and -``zipimport`` importers. If you wish to use the ``pkg_resources`` features -with other (PEP 302-compatible) importers or module loaders, you may need to -register various handlers and support functions using these APIs: - -``register_finder(importer_type, distribution_finder)`` - Register `distribution_finder` to find distributions in ``sys.path`` items. - `importer_type` is the type or class of a PEP 302 "Importer" (``sys.path`` - item handler), and `distribution_finder` is a callable that, when passed a - path item, the importer instance, and an `only` flag, yields - ``Distribution`` instances found under that path item. (The `only` flag, - if true, means the finder should yield only ``Distribution`` objects whose - ``location`` is equal to the path item provided.) - - See the source of the ``pkg_resources.find_on_path`` function for an - example finder function. - -``register_loader_type(loader_type, provider_factory)`` - Register `provider_factory` to make ``IResourceProvider`` objects for - `loader_type`. `loader_type` is the type or class of a PEP 302 - ``module.__loader__``, and `provider_factory` is a function that, when - passed a module object, returns an `IResourceProvider`_ for that module, - allowing it to be used with the `ResourceManager API`_. - -``register_namespace_handler(importer_type, namespace_handler)`` - Register `namespace_handler` to declare namespace packages for the given - `importer_type`. `importer_type` is the type or class of a PEP 302 - "importer" (sys.path item handler), and `namespace_handler` is a callable - with a signature like this:: - - def namespace_handler(importer, path_entry, moduleName, module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the relevant importer object has - already agreed that it can handle the relevant path item. The handler - should only return a subpath if the module ``__path__`` does not already - contain an equivalent subpath. Otherwise, it should return None. - - For an example namespace handler, see the source of the - ``pkg_resources.file_ns_handler`` function, which is used for both zipfile - importing and regular importing. - - -IResourceProvider ------------------ - -``IResourceProvider`` is an abstract class that documents what methods are -required of objects returned by a `provider_factory` registered with -``register_loader_type()``. ``IResourceProvider`` is a subclass of -``IMetadataProvider``, so objects that implement this interface must also -implement all of the `IMetadataProvider Methods`_ as well as the methods -shown here. The `manager` argument to the methods below must be an object -that supports the full `ResourceManager API`_ documented above. - -``get_resource_filename(manager, resource_name)`` - Return a true filesystem path for `resource_name`, co-ordinating the - extraction with `manager`, if the resource must be unpacked to the - filesystem. - -``get_resource_stream(manager, resource_name)`` - Return a readable file-like object for `resource_name`. - -``get_resource_string(manager, resource_name)`` - Return a string containing the contents of `resource_name`. - -``has_resource(resource_name)`` - Does the package contain the named resource? - -``resource_isdir(resource_name)`` - Is the named resource a directory? Return a false value if the resource - does not exist or is not a directory. - -``resource_listdir(resource_name)`` - Return a list of the contents of the resource directory, ala - ``os.listdir()``. Requesting the contents of a non-existent directory may - raise an exception. - -Note, by the way, that your provider classes need not (and should not) subclass -``IResourceProvider`` or ``IMetadataProvider``! These classes exist solely -for documentation purposes and do not provide any useful implementation code. -You may instead wish to subclass one of the `built-in resource providers`_. - - -Built-in Resource Providers ---------------------------- - -``pkg_resources`` includes several provider classes that are automatically used -where appropriate. Their inheritance tree looks like this:: - - NullProvider - EggProvider - DefaultProvider - PathMetadata - ZipProvider - EggMetadata - EmptyProvider - FileMetadata - - -``NullProvider`` - This provider class is just an abstract base that provides for common - provider behaviors (such as running scripts), given a definition for just - a few abstract methods. - -``EggProvider`` - This provider class adds in some egg-specific features that are common - to zipped and unzipped eggs. - -``DefaultProvider`` - This provider class is used for unpacked eggs and "plain old Python" - filesystem modules. - -``ZipProvider`` - This provider class is used for all zipped modules, whether they are eggs - or not. - -``EmptyProvider`` - This provider class always returns answers consistent with a provider that - has no metadata or resources. ``Distribution`` objects created without - a ``metadata`` argument use an instance of this provider class instead. - Since all ``EmptyProvider`` instances are equivalent, there is no need - to have more than one instance. ``pkg_resources`` therefore creates a - global instance of this class under the name ``empty_provider``, and you - may use it if you have need of an ``EmptyProvider`` instance. - -``PathMetadata(path, egg_info)`` - Create an ``IResourceProvider`` for a filesystem-based distribution, where - `path` is the filesystem location of the importable modules, and `egg_info` - is the filesystem location of the distribution's metadata directory. - `egg_info` should usually be the ``EGG-INFO`` subdirectory of `path` for an - "unpacked egg", and a ``ProjectName.egg-info`` subdirectory of `path` for - a "development egg". However, other uses are possible for custom purposes. - -``EggMetadata(zipimporter)`` - Create an ``IResourceProvider`` for a zipfile-based distribution. The - `zipimporter` should be a ``zipimport.zipimporter`` instance, and may - represent a "basket" (a zipfile containing multiple ".egg" subdirectories) - a specific egg *within* a basket, or a zipfile egg (where the zipfile - itself is a ".egg"). It can also be a combination, such as a zipfile egg - that also contains other eggs. - -``FileMetadata(path_to_pkg_info)`` - Create an ``IResourceProvider`` that provides exactly one metadata - resource: ``PKG-INFO``. The supplied path should be a distutils PKG-INFO - file. This is basically the same as an ``EmptyProvider``, except that - requests for ``PKG-INFO`` will be answered using the contents of the - designated file. (This provider is used to wrap ``.egg-info`` files - installed by vendor-supplied system packages.) - - -Utility Functions -================= - -In addition to its high-level APIs, ``pkg_resources`` also includes several -generally-useful utility routines. These routines are used to implement the -high-level APIs, but can also be quite useful by themselves. - - -Parsing Utilities ------------------ - -``parse_version(version)`` - Parse a project's version string, returning a value that can be used to - compare versions by chronological order. Semantically, the format is a - rough cross between distutils' ``StrictVersion`` and ``LooseVersion`` - classes; if you give it versions that would work with ``StrictVersion``, - then they will compare the same way. Otherwise, comparisons are more like - a "smarter" form of ``LooseVersion``. It is *possible* to create - pathological version coding schemes that will fool this parser, but they - should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". Any "-" - characters preceding a pre-release indicator are removed. (In versions of - setuptools prior to 0.6a9, "-" characters were not removed, leading to the - unintuitive result that "0.2-rc1" was considered a newer version than - "0.2".) - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them. And the string "dev" is treated as if it were an "@" sign; - that is, a version coming before even "a" or "alpha". - -.. _yield_lines(): - -``yield_lines(strs)`` - Yield non-empty/non-comment lines from a string/unicode or a possibly- - nested sequence thereof. If `strs` is an instance of ``basestring``, it - is split into lines, and each non-blank, non-comment line is yielded after - stripping leading and trailing whitespace. (Lines whose first non-blank - character is ``#`` are considered comment lines.) - - If `strs` is not an instance of ``basestring``, it is iterated over, and - each item is passed recursively to ``yield_lines()``, so that an arbitarily - nested sequence of strings, or sequences of sequences of strings can be - flattened out to the lines contained therein. So for example, passing - a file object or a list of strings to ``yield_lines`` will both work. - (Note that between each string in a sequence of strings there is assumed to - be an implicit line break, so lines cannot bridge two strings in a - sequence.) - - This routine is used extensively by ``pkg_resources`` to parse metadata - and file formats of various kinds, and most other ``pkg_resources`` - parsing functions that yield multiple values will use it to break up their - input. However, this routine is idempotent, so calling ``yield_lines()`` - on the output of another call to ``yield_lines()`` is completely harmless. - -``split_sections(strs)`` - Split a string (or possibly-nested iterable thereof), yielding ``(section, - content)`` pairs found using an ``.ini``-like syntax. Each ``section`` is - a whitespace-stripped version of the section name ("``[section]``") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any non-blank, non-comment lines before - the first section header, they're yielded in a first ``section`` of - ``None``. - - This routine uses ``yield_lines()`` as its front end, so you can pass in - anything that ``yield_lines()`` accepts, such as an open text file, string, - or sequence of strings. ``ValueError`` is raised if a malformed section - header is found (i.e. a line starting with ``[`` but not ending with - ``]``). - - Note that this simplistic parser assumes that any line whose first nonblank - character is ``[`` is a section heading, so it can't support .ini format - variations that allow ``[`` as the first nonblank character on other lines. - -``safe_name(name)`` - Return a "safe" form of a project's name, suitable for use in a - ``Requirement`` string, as a distribution name, or a PyPI project name. - All non-alphanumeric runs are condensed to single "-" characters, such that - a name like "The $$$ Tree" becomes "The-Tree". Note that if you are - generating a filename from this value you should combine it with a call to - ``to_filename()`` so all dashes ("-") are replaced by underscores ("_"). - See ``to_filename()``. - -``safe_version(version)`` - Similar to ``safe_name()`` except that spaces in the input become dots, and - dots are allowed to exist in the output. As with ``safe_name()``, if you - are generating a filename from this you should replace any "-" characters - in the output with underscores. - -``safe_extra(extra)`` - Return a "safe" form of an extra's name, suitable for use in a requirement - string or a setup script's ``extras_require`` keyword. This routine is - similar to ``safe_name()`` except that non-alphanumeric runs are replaced - by a single underbar (``_``), and the result is lowercased. - -``to_filename(name_or_version)`` - Escape a name or version string so it can be used in a dash-separated - filename (or ``#egg=name-version`` tag) without ambiguity. You - should only pass in values that were returned by ``safe_name()`` or - ``safe_version()``. - - -Platform Utilities ------------------- - -``get_build_platform()`` - Return this platform's identifier string. For Windows, the return value - is ``"win32"``, and for Mac OS X it is a string of the form - ``"macosx-10.4-ppc"``. All other platforms return the same uname-based - string that the ``distutils.util.get_platform()`` function returns. - This string is the minimum platform version required by distributions built - on the local machine. (Backward compatibility note: setuptools versions - prior to 0.6b1 called this function ``get_platform()``, and the function is - still available under that name for backward compatibility reasons.) - -``get_supported_platform()`` (New in 0.6b1) - This is the similar to ``get_build_platform()``, but is the maximum - platform version that the local machine supports. You will usually want - to use this value as the ``provided`` argument to the - ``compatible_platforms()`` function. - -``compatible_platforms(provided, required)`` - Return true if a distribution built on the `provided` platform may be used - on the `required` platform. If either platform value is ``None``, it is - considered a wildcard, and the platforms are therefore compatible. - Likewise, if the platform strings are equal, they're also considered - compatible, and ``True`` is returned. Currently, the only non-equal - platform strings that are considered compatible are Mac OS X platform - strings with the same hardware type (e.g. ``ppc``) and major version - (e.g. ``10``) with the `provided` platform's minor version being less than - or equal to the `required` platform's minor version. - -``get_default_cache()`` - Determine the default cache location for extracting resources from zipped - eggs. This routine returns the ``PYTHON_EGG_CACHE`` environment variable, - if set. Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of - the user's "Application Data" directory. On all other systems, it returns - ``os.path.expanduser("~/.python-eggs")`` if ``PYTHON_EGG_CACHE`` is not - set. - - -PEP 302 Utilities ------------------ - -``get_importer(path_item)`` - Retrieve a PEP 302 "importer" for the given path item (which need not - actually be on ``sys.path``). This routine simulates the PEP 302 protocol - for obtaining an "importer" object. It first checks for an importer for - the path item in ``sys.path_importer_cache``, and if not found it calls - each of the ``sys.path_hooks`` and caches the result if a good importer is - found. If no importer is found, this routine returns an ``ImpWrapper`` - instance that wraps the builtin import machinery as a PEP 302-compliant - "importer" object. This ``ImpWrapper`` is *not* cached; instead a new - instance is returned each time. - - (Note: When run under Python 2.5, this function is simply an alias for - ``pkgutil.get_importer()``, and instead of ``pkg_resources.ImpWrapper`` - instances, it may return ``pkgutil.ImpImporter`` instances.) - - -File/Path Utilities -------------------- - -``ensure_directory(path)`` - Ensure that the parent directory (``os.path.dirname``) of `path` actually - exists, using ``os.makedirs()`` if necessary. - -``normalize_path(path)`` - Return a "normalized" version of `path`, such that two paths represent - the same filesystem location if they have equal ``normalized_path()`` - values. Specifically, this is a shortcut for calling ``os.path.realpath`` - and ``os.path.normcase`` on `path`. Unfortunately, on certain platforms - (notably Cygwin and Mac OS X) the ``normcase`` function does not accurately - reflect the platform's case-sensitivity, so there is always the possibility - of two apparently-different paths being equal on such platforms. - -History -------- - -0.6c9 - * Fix ``resource_listdir('')`` always returning an empty list for zipped eggs. - -0.6c7 - * Fix package precedence problem where single-version eggs installed in - ``site-packages`` would take precedence over ``.egg`` files (or directories) - installed in ``site-packages``. - -0.6c6 - * Fix extracted C extensions not having executable permissions under Cygwin. - - * Allow ``.egg-link`` files to contain relative paths. - - * Fix cache dir defaults on Windows when multiple environment vars are needed - to construct a path. - -0.6c4 - * Fix "dev" versions being considered newer than release candidates. - -0.6c3 - * Python 2.5 compatibility fixes. - -0.6c2 - * Fix a problem with eggs specified directly on ``PYTHONPATH`` on - case-insensitive filesystems possibly not showing up in the default - working set, due to differing normalizations of ``sys.path`` entries. - -0.6b3 - * Fixed a duplicate path insertion problem on case-insensitive filesystems. - -0.6b1 - * Split ``get_platform()`` into ``get_supported_platform()`` and - ``get_build_platform()`` to work around a Mac versioning problem that caused - the behavior of ``compatible_platforms()`` to be platform specific. - - * Fix entry point parsing when a standalone module name has whitespace - between it and the extras. - -0.6a11 - * Added ``ExtractionError`` and ``ResourceManager.extraction_error()`` so that - cache permission problems get a more user-friendly explanation of the - problem, and so that programs can catch and handle extraction errors if they - need to. - -0.6a10 - * Added the ``extras`` attribute to ``Distribution``, the ``find_plugins()`` - method to ``WorkingSet``, and the ``__add__()`` and ``__iadd__()`` methods - to ``Environment``. - - * ``safe_name()`` now allows dots in project names. - - * There is a new ``to_filename()`` function that escapes project names and - versions for safe use in constructing egg filenames from a Distribution - object's metadata. - - * Added ``Distribution.clone()`` method, and keyword argument support to other - ``Distribution`` constructors. - - * Added the ``DEVELOP_DIST`` precedence, and automatically assign it to - eggs using ``.egg-info`` format. - -0.6a9 - * Don't raise an error when an invalid (unfinished) distribution is found - unless absolutely necessary. Warn about skipping invalid/unfinished eggs - when building an Environment. - - * Added support for ``.egg-info`` files or directories with version/platform - information embedded in the filename, so that system packagers have the - option of including ``PKG-INFO`` files to indicate the presence of a - system-installed egg, without needing to use ``.egg`` directories, zipfiles, - or ``.pth`` manipulation. - - * Changed ``parse_version()`` to remove dashes before pre-release tags, so - that ``0.2-rc1`` is considered an *older* version than ``0.2``, and is equal - to ``0.2rc1``. The idea that a dash *always* meant a post-release version - was highly non-intuitive to setuptools users and Python developers, who - seem to want to use ``-rc`` version numbers a lot. - -0.6a8 - * Fixed a problem with ``WorkingSet.resolve()`` that prevented version - conflicts from being detected at runtime. - - * Improved runtime conflict warning message to identify a line in the user's - program, rather than flagging the ``warn()`` call in ``pkg_resources``. - - * Avoid giving runtime conflict warnings for namespace packages, even if they - were declared by a different package than the one currently being activated. - - * Fix path insertion algorithm for case-insensitive filesystems. - - * Fixed a problem with nested namespace packages (e.g. ``peak.util``) not - being set as an attribute of their parent package. - -0.6a6 - * Activated distributions are now inserted in ``sys.path`` (and the working - set) just before the directory that contains them, instead of at the end. - This allows e.g. eggs in ``site-packages`` to override unmanaged modules in - the same location, and allows eggs found earlier on ``sys.path`` to override - ones found later. - - * When a distribution is activated, it now checks whether any contained - non-namespace modules have already been imported and issues a warning if - a conflicting module has already been imported. - - * Changed dependency processing so that it's breadth-first, allowing a - depender's preferences to override those of a dependee, to prevent conflicts - when a lower version is acceptable to the dependee, but not the depender. - - * Fixed a problem extracting zipped files on Windows, when the egg in question - has had changed contents but still has the same version number. - -0.6a4 - * Fix a bug in ``WorkingSet.resolve()`` that was introduced in 0.6a3. - -0.6a3 - * Added ``safe_extra()`` parsing utility routine, and use it for Requirement, - EntryPoint, and Distribution objects' extras handling. - -0.6a1 - * Enhanced performance of ``require()`` and related operations when all - requirements are already in the working set, and enhanced performance of - directory scanning for distributions. - - * Fixed some problems using ``pkg_resources`` w/PEP 302 loaders other than - ``zipimport``, and the previously-broken "eager resource" support. - - * Fixed ``pkg_resources.resource_exists()`` not working correctly, along with - some other resource API bugs. - - * Many API changes and enhancements: - - * Added ``EntryPoint``, ``get_entry_map``, ``load_entry_point``, and - ``get_entry_info`` APIs for dynamic plugin discovery. - - * ``list_resources`` is now ``resource_listdir`` (and it actually works) - - * Resource API functions like ``resource_string()`` that accepted a package - name and resource name, will now also accept a ``Requirement`` object in - place of the package name (to allow access to non-package data files in - an egg). - - * ``get_provider()`` will now accept a ``Requirement`` instance or a module - name. If it is given a ``Requirement``, it will return a corresponding - ``Distribution`` (by calling ``require()`` if a suitable distribution - isn't already in the working set), rather than returning a metadata and - resource provider for a specific module. (The difference is in how - resource paths are interpreted; supplying a module name means resources - path will be module-relative, rather than relative to the distribution's - root.) - - * ``Distribution`` objects now implement the ``IResourceProvider`` and - ``IMetadataProvider`` interfaces, so you don't need to reference the (no - longer available) ``metadata`` attribute to get at these interfaces. - - * ``Distribution`` and ``Requirement`` both have a ``project_name`` - attribute for the project name they refer to. (Previously these were - ``name`` and ``distname`` attributes.) - - * The ``path`` attribute of ``Distribution`` objects is now ``location``, - because it isn't necessarily a filesystem path (and hasn't been for some - time now). The ``location`` of ``Distribution`` objects in the filesystem - should always be normalized using ``pkg_resources.normalize_path()``; all - of the setuptools and EasyInstall code that generates distributions from - the filesystem (including ``Distribution.from_filename()``) ensure this - invariant, but if you use a more generic API like ``Distribution()`` or - ``Distribution.from_location()`` you should take care that you don't - create a distribution with an un-normalized filesystem path. - - * ``Distribution`` objects now have an ``as_requirement()`` method that - returns a ``Requirement`` for the distribution's project name and version. - - * Distribution objects no longer have an ``installed_on()`` method, and the - ``install_on()`` method is now ``activate()`` (but may go away altogether - soon). The ``depends()`` method has also been renamed to ``requires()``, - and ``InvalidOption`` is now ``UnknownExtra``. - - * ``find_distributions()`` now takes an additional argument called ``only``, - that tells it to only yield distributions whose location is the passed-in - path. (It defaults to False, so that the default behavior is unchanged.) - - * ``AvailableDistributions`` is now called ``Environment``, and the - ``get()``, ``__len__()``, and ``__contains__()`` methods were removed, - because they weren't particularly useful. ``__getitem__()`` no longer - raises ``KeyError``; it just returns an empty list if there are no - distributions for the named project. - - * The ``resolve()`` method of ``Environment`` is now a method of - ``WorkingSet`` instead, and the ``best_match()`` method now uses a working - set instead of a path list as its second argument. - - * There is a new ``pkg_resources.add_activation_listener()`` API that lets - you register a callback for notifications about distributions added to - ``sys.path`` (including the distributions already on it). This is - basically a hook for extensible applications and frameworks to be able to - search for plugin metadata in distributions added at runtime. - -0.5a13 - * Fixed a bug in resource extraction from nested packages in a zipped egg. - -0.5a12 - * Updated extraction/cache mechanism for zipped resources to avoid inter- - process and inter-thread races during extraction. The default cache - location can now be set via the ``PYTHON_EGGS_CACHE`` environment variable, - and the default Windows cache is now a ``Python-Eggs`` subdirectory of the - current user's "Application Data" directory, if the ``PYTHON_EGGS_CACHE`` - variable isn't set. - -0.5a10 - * Fix a problem with ``pkg_resources`` being confused by non-existent eggs on - ``sys.path`` (e.g. if a user deletes an egg without removing it from the - ``easy-install.pth`` file). - - * Fix a problem with "basket" support in ``pkg_resources``, where egg-finding - never actually went inside ``.egg`` files. - - * Made ``pkg_resources`` import the module you request resources from, if it's - not already imported. - -0.5a4 - * ``pkg_resources.AvailableDistributions.resolve()`` and related methods now - accept an ``installer`` argument: a callable taking one argument, a - ``Requirement`` instance. The callable must return a ``Distribution`` - object, or ``None`` if no distribution is found. This feature is used by - EasyInstall to resolve dependencies by recursively invoking itself. - -0.4a4 - * Fix problems with ``resource_listdir()``, ``resource_isdir()`` and resource - directory extraction for zipped eggs. - -0.4a3 - * Fixed scripts not being able to see a ``__file__`` variable in ``__main__`` - - * Fixed a problem with ``resource_isdir()`` implementation that was introduced - in 0.4a2. - -0.4a1 - * Fixed a bug in requirements processing for exact versions (i.e. ``==`` and - ``!=``) when only one condition was included. - - * Added ``safe_name()`` and ``safe_version()`` APIs to clean up handling of - arbitrary distribution names and versions found on PyPI. - -0.3a4 - * ``pkg_resources`` now supports resource directories, not just the resources - in them. In particular, there are ``resource_listdir()`` and - ``resource_isdir()`` APIs. - - * ``pkg_resources`` now supports "egg baskets" -- .egg zipfiles which contain - multiple distributions in subdirectories whose names end with ``.egg``. - Having such a "basket" in a directory on ``sys.path`` is equivalent to - having the individual eggs in that directory, but the contained eggs can - be individually added (or not) to ``sys.path``. Currently, however, there - is no automated way to create baskets. - - * Namespace package manipulation is now protected by the Python import lock. - -0.3a1 - * Initial release. - diff --git a/libs/setuptools-2.2/docs/python3.txt b/libs/setuptools-2.2/docs/python3.txt deleted file mode 100644 index 1e01995..0000000 --- a/libs/setuptools-2.2/docs/python3.txt +++ /dev/null @@ -1,124 +0,0 @@ -===================================================== -Supporting both Python 2 and Python 3 with Setuptools -===================================================== - -Starting with Distribute version 0.6.2 and Setuptools 0.7, the Setuptools -project supported Python 3. Installing and -using setuptools for Python 3 code works exactly the same as for Python 2 -code, but Setuptools also helps you to support Python 2 and Python 3 from -the same source code by letting you run 2to3 on the code as a part of the -build process, by setting the keyword parameter ``use_2to3`` to True. - - -Setuptools as help during porting -================================= - -Setuptools can make the porting process much easier by automatically running -2to3 as a part of the test running. To do this you need to configure the -setup.py so that you can run the unit tests with ``python setup.py test``. - -See :ref:`test` for more information on this. - -Once you have the tests running under Python 2, you can add the use_2to3 -keyword parameters to setup(), and start running the tests under Python 3. -The test command will now first run the build command during which the code -will be converted with 2to3, and the tests will then be run from the build -directory, as opposed from the source directory as is normally done. - -Setuptools will convert all Python files, and also all doctests in Python -files. However, if you have doctests located in separate text files, these -will not automatically be converted. By adding them to the -``convert_2to3_doctests`` keyword parameter Setuptools will convert them as -well. - -By default, the conversion uses all fixers in the ``lib2to3.fixers`` package. -To use additional fixers, the parameter ``use_2to3_fixers`` can be set -to a list of names of packages containing fixers. To exclude fixers, the -parameter ``use_2to3_exclude_fixers`` can be set to fixer names to be -skipped. - -A typical setup.py can look something like this:: - - from setuptools import setup - - setup( - name='your.module', - version = '1.0', - description='This is your awesome module', - author='You', - author_email='your@email', - package_dir = {'': 'src'}, - packages = ['your', 'you.module'], - test_suite = 'your.module.tests', - use_2to3 = True, - convert_2to3_doctests = ['src/your/module/README.txt'], - use_2to3_fixers = ['your.fixers'], - use_2to3_exclude_fixers = ['lib2to3.fixes.fix_import'], - ) - -Differential conversion ------------------------ - -Note that a file will only be copied and converted during the build process -if the source file has been changed. If you add a file to the doctests -that should be converted, it will not be converted the next time you run -the tests, since it hasn't been modified. You need to remove it from the -build directory. Also if you run the build, install or test commands before -adding the use_2to3 parameter, you will have to remove the build directory -before you run the test command, as the files otherwise will seem updated, -and no conversion will happen. - -In general, if code doesn't seem to be converted, deleting the build directory -and trying again is a good saferguard against the build directory getting -"out of sync" with the source directory. - -Distributing Python 3 modules -============================= - -You can distribute your modules with Python 3 support in different ways. A -normal source distribution will work, but can be slow in installing, as the -2to3 process will be run during the install. But you can also distribute -the module in binary format, such as a binary egg. That egg will contain the -already converted code, and hence no 2to3 conversion is needed during install. - -Advanced features -================= - -If you don't want to run the 2to3 conversion on the doctests in Python files, -you can turn that off by setting ``setuptools.use_2to3_on_doctests = False``. - -Note on compatibility with older versions of setuptools -======================================================= - -Setuptools earlier than 0.7 does not know about the new keyword parameters to -support Python 3. -As a result it will warn about the unknown keyword parameters if you use -those versions of setuptools instead of Distribute under Python 2. This output -is not an error, and -install process will continue as normal, but if you want to get rid of that -error this is easy. Simply conditionally add the new parameters into an extra -dict and pass that dict into setup():: - - from setuptools import setup - import sys - - extra = {} - if sys.version_info >= (3,): - extra['use_2to3'] = True - extra['convert_2to3_doctests'] = ['src/your/module/README.txt'] - extra['use_2to3_fixers'] = ['your.fixers'] - - setup( - name='your.module', - version = '1.0', - description='This is your awesome module', - author='You', - author_email='your@email', - package_dir = {'': 'src'}, - packages = ['your', 'you.module'], - test_suite = 'your.module.tests', - **extra - ) - -This way the parameters will only be used under Python 3, where Distribute or -Setuptools 0.7 or later is required. diff --git a/libs/setuptools-2.2/docs/releases.txt b/libs/setuptools-2.2/docs/releases.txt deleted file mode 100644 index 5d1419b..0000000 --- a/libs/setuptools-2.2/docs/releases.txt +++ /dev/null @@ -1,18 +0,0 @@ -=============== -Release Process -=============== - -In order to allow for rapid, predictable releases, Setuptools uses a -mechanical technique for releases. The release script, ``release.py`` in the -repository, defines the details of the releases, and is executed by the -`jaraco.packaging `_ release -module. The script does some checks (some interactive) and fully automates -the release process. - -A Setuptools release manager must have maintainer access on PyPI to the -project and administrative access to the BitBucket project. - -Release Managers ----------------- - -Currently, the project has one release manager, Jason R. Coombs. diff --git a/libs/setuptools-2.2/docs/roadmap.txt b/libs/setuptools-2.2/docs/roadmap.txt deleted file mode 100644 index 8f175b9..0000000 --- a/libs/setuptools-2.2/docs/roadmap.txt +++ /dev/null @@ -1,6 +0,0 @@ -======= -Roadmap -======= - -Setuptools is primarily in maintenance mode. The project attempts to address -user issues, concerns, and feature requests in a timely fashion. diff --git a/libs/setuptools-2.2/docs/setuptools.txt b/libs/setuptools-2.2/docs/setuptools.txt deleted file mode 100644 index d48ad34..0000000 --- a/libs/setuptools-2.2/docs/setuptools.txt +++ /dev/null @@ -1,2696 +0,0 @@ -================================================== -Building and Distributing Packages with Setuptools -================================================== - -``Setuptools`` is a collection of enhancements to the Python ``distutils`` -(for Python 2.6 and up) that allow developers to more easily build and -distribute Python packages, especially ones that have dependencies on other -packages. - -Packages built and distributed using ``setuptools`` look to the user like -ordinary Python packages based on the ``distutils``. Your users don't need to -install or even know about setuptools in order to use them, and you don't -have to include the entire setuptools package in your distributions. By -including just a single `bootstrap module`_ (a 12K .py file), your package will -automatically download and install ``setuptools`` if the user is building your -package from source and doesn't have a suitable version already installed. - -.. _bootstrap module: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - -Feature Highlights: - -* Automatically find/download/install/upgrade dependencies at build time using - the `EasyInstall tool `_, - which supports downloading via HTTP, FTP, Subversion, and SourceForge, and - automatically scans web pages linked from PyPI to find download links. (It's - the closest thing to CPAN currently available for Python.) - -* Create `Python Eggs `_ - - a single-file importable distribution format - -* Enhanced support for accessing data files hosted in zipped packages. - -* Automatically include all packages in your source tree, without listing them - individually in setup.py - -* Automatically include all relevant files in your source distributions, - without needing to create a ``MANIFEST.in`` file, and without having to force - regeneration of the ``MANIFEST`` file when your source tree changes. - -* Automatically generate wrapper scripts or Windows (console and GUI) .exe - files for any number of "main" functions in your project. (Note: this is not - a py2exe replacement; the .exe files rely on the local Python installation.) - -* Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and - still work even when the end-user doesn't have Pyrex installed (as long as - you include the Pyrex-generated C in your source distribution) - -* Command aliases - create project-specific, per-user, or site-wide shortcut - names for commonly used commands and options - -* PyPI upload support - upload your source distributions and eggs to PyPI - -* Deploy your project in "development mode", such that it's available on - ``sys.path``, yet can still be edited directly from its source checkout. - -* Easily extend the distutils with new commands or ``setup()`` arguments, and - distribute/reuse your extensions for multiple projects, without copying code. - -* Create extensible applications and frameworks that automatically discover - extensions, using simple "entry points" declared in a project's setup script. - -In addition to the PyPI downloads, the development version of ``setuptools`` -is available from the `Python SVN sandbox`_, and in-development versions of the -`0.6 branch`_ are available as well. - -.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - -.. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev - -.. contents:: **Table of Contents** - -.. _ez_setup.py: `bootstrap module`_ - - ------------------ -Developer's Guide ------------------ - - -Installing ``setuptools`` -========================= - -Please follow the `EasyInstall Installation Instructions`_ to install the -current stable version of setuptools. In particular, be sure to read the -section on `Custom Installation Locations`_ if you are installing anywhere -other than Python's ``site-packages`` directory. - -.. _EasyInstall Installation Instructions: easy_install.html#installation-instructions - -.. _Custom Installation Locations: easy_install.html#custom-installation-locations - -If you want the current in-development version of setuptools, you should first -install a stable version, and then run:: - - ez_setup.py setuptools==dev - -This will download and install the latest development (i.e. unstable) version -of setuptools from the Python Subversion sandbox. - - -Basic Use -========= - -For basic use of setuptools, just import things from setuptools instead of -the distutils. Here's a minimal setup script using setuptools:: - - from setuptools import setup, find_packages - setup( - name = "HelloWorld", - version = "0.1", - packages = find_packages(), - ) - -As you can see, it doesn't take much to use setuptools in a project. -Just by doing the above, this project will be able to produce eggs, upload to -PyPI, and automatically include all packages in the directory where the -setup.py lives. See the `Command Reference`_ section below to see what -commands you can give to this setup script. - -Of course, before you release your project to PyPI, you'll want to add a bit -more information to your setup script to help people find or learn about your -project. And maybe your project will have grown by then to include a few -dependencies, and perhaps some data files and scripts:: - - from setuptools import setup, find_packages - setup( - name = "HelloWorld", - version = "0.1", - packages = find_packages(), - scripts = ['say_hello.py'], - - # Project uses reStructuredText, so ensure that the docutils get - # installed or upgraded on the target machine - install_requires = ['docutils>=0.3'], - - package_data = { - # If any package contains *.txt or *.rst files, include them: - '': ['*.txt', '*.rst'], - # And include any *.msg files found in the 'hello' package, too: - 'hello': ['*.msg'], - }, - - # metadata for upload to PyPI - author = "Me", - author_email = "me@example.com", - description = "This is an Example Package", - license = "PSF", - keywords = "hello world example examples", - url = "http://example.com/HelloWorld/", # project home page, if any - - # could also include long_description, download_url, classifiers, etc. - ) - -In the sections that follow, we'll explain what most of these ``setup()`` -arguments do (except for the metadata ones), and the various ways you might use -them in your own project(s). - - -Specifying Your Project's Version ---------------------------------- - -Setuptools can work well with most versioning schemes; there are, however, a -few special things to watch out for, in order to ensure that setuptools and -EasyInstall can always tell what version of your package is newer than another -version. Knowing these things will also help you correctly specify what -versions of other projects your project depends on. - -A version consists of an alternating series of release numbers and pre-release -or post-release tags. A release number is a series of digits punctuated by -dots, such as ``2.4`` or ``0.5``. Each series of digits is treated -numerically, so releases ``2.1`` and ``2.1.0`` are different ways to spell the -same release number, denoting the first subrelease of release 2. But ``2.10`` -is the *tenth* subrelease of release 2, and so is a different and newer release -from ``2.1`` or ``2.1.0``. Leading zeros within a series of digits are also -ignored, so ``2.01`` is the same as ``2.1``, and different from ``2.0.1``. - -Following a release number, you can have either a pre-release or post-release -tag. Pre-release tags make a version be considered *older* than the version -they are appended to. So, revision ``2.4`` is *newer* than revision ``2.4c1``, -which in turn is newer than ``2.4b1`` or ``2.4a1``. Postrelease tags make -a version be considered *newer* than the version they are appended to. So, -revisions like ``2.4-1`` and ``2.4pl3`` are newer than ``2.4``, but are *older* -than ``2.4.1`` (which has a higher release number). - -A pre-release tag is a series of letters that are alphabetically before -"final". Some examples of prerelease tags would include ``alpha``, ``beta``, -``a``, ``c``, ``dev``, and so on. You do not have to place a dot or dash -before the prerelease tag if it's immediately after a number, but it's okay to -do so if you prefer. Thus, ``2.4c1`` and ``2.4.c1`` and ``2.4-c1`` all -represent release candidate 1 of version ``2.4``, and are treated as identical -by setuptools. - -In addition, there are three special prerelease tags that are treated as if -they were the letter ``c``: ``pre``, ``preview``, and ``rc``. So, version -``2.4rc1``, ``2.4pre1`` and ``2.4preview1`` are all the exact same version as -``2.4c1``, and are treated as identical by setuptools. - -A post-release tag is either a series of letters that are alphabetically -greater than or equal to "final", or a dash (``-``). Post-release tags are -generally used to separate patch numbers, port numbers, build numbers, revision -numbers, or date stamps from the release number. For example, the version -``2.4-r1263`` might denote Subversion revision 1263 of a post-release patch of -version ``2.4``. Or you might use ``2.4-20051127`` to denote a date-stamped -post-release. - -Notice that after each pre or post-release tag, you are free to place another -release number, followed again by more pre- or post-release tags. For example, -``0.6a9.dev-r41475`` could denote Subversion revision 41475 of the in- -development version of the ninth alpha of release 0.6. Notice that ``dev`` is -a pre-release tag, so this version is a *lower* version number than ``0.6a9``, -which would be the actual ninth alpha of release 0.6. But the ``-r41475`` is -a post-release tag, so this version is *newer* than ``0.6a9.dev``. - -For the most part, setuptools' interpretation of version numbers is intuitive, -but here are a few tips that will keep you out of trouble in the corner cases: - -* Don't stick adjoining pre-release tags together without a dot or number - between them. Version ``1.9adev`` is the ``adev`` prerelease of ``1.9``, - *not* a development pre-release of ``1.9a``. Use ``.dev`` instead, as in - ``1.9a.dev``, or separate the prerelease tags with a number, as in - ``1.9a0dev``. ``1.9a.dev``, ``1.9a0dev``, and even ``1.9.a.dev`` are - identical versions from setuptools' point of view, so you can use whatever - scheme you prefer. - -* If you want to be certain that your chosen numbering scheme works the way - you think it will, you can use the ``pkg_resources.parse_version()`` function - to compare different version numbers:: - - >>> from pkg_resources import parse_version - >>> parse_version('1.9.a.dev') == parse_version('1.9a0dev') - True - >>> parse_version('2.1-rc2') < parse_version('2.1') - True - >>> parse_version('0.6a9dev-r41475') < parse_version('0.6a9') - True - -Once you've decided on a version numbering scheme for your project, you can -have setuptools automatically tag your in-development releases with various -pre- or post-release tags. See the following sections for more details: - -* `Tagging and "Daily Build" or "Snapshot" Releases`_ -* `Managing "Continuous Releases" Using Subversion`_ -* The `egg_info`_ command - - -New and Changed ``setup()`` Keywords -==================================== - -The following keyword arguments to ``setup()`` are added or changed by -``setuptools``. All of them are optional; you do not have to supply them -unless you need the associated ``setuptools`` feature. - -``include_package_data`` - If set to ``True``, this tells ``setuptools`` to automatically include any - data files it finds inside your package directories, that are either under - CVS or Subversion control, or which are specified by your ``MANIFEST.in`` - file. For more information, see the section below on `Including Data - Files`_. - -``exclude_package_data`` - A dictionary mapping package names to lists of glob patterns that should - be *excluded* from your package directories. You can use this to trim back - any excess files included by ``include_package_data``. For a complete - description and examples, see the section below on `Including Data Files`_. - -``package_data`` - A dictionary mapping package names to lists of glob patterns. For a - complete description and examples, see the section below on `Including - Data Files`_. You do not need to use this option if you are using - ``include_package_data``, unless you need to add e.g. files that are - generated by your setup script and build process. (And are therefore not - in source control or are files that you don't want to include in your - source distribution.) - -``zip_safe`` - A boolean (True or False) flag specifying whether the project can be - safely installed and run from a zip file. If this argument is not - supplied, the ``bdist_egg`` command will have to analyze all of your - project's contents for possible problems each time it buids an egg. - -``install_requires`` - A string or list of strings specifying what other distributions need to - be installed when this one is. See the section below on `Declaring - Dependencies`_ for details and examples of the format of this argument. - -``entry_points`` - A dictionary mapping entry point group names to strings or lists of strings - defining the entry points. Entry points are used to support dynamic - discovery of services or plugins provided by a project. See `Dynamic - Discovery of Services and Plugins`_ for details and examples of the format - of this argument. In addition, this keyword is used to support `Automatic - Script Creation`_. - -``extras_require`` - A dictionary mapping names of "extras" (optional features of your project) - to strings or lists of strings specifying what other distributions must be - installed to support those features. See the section below on `Declaring - Dependencies`_ for details and examples of the format of this argument. - -``setup_requires`` - A string or list of strings specifying what other distributions need to - be present in order for the *setup script* to run. ``setuptools`` will - attempt to obtain these (even going so far as to download them using - ``EasyInstall``) before processing the rest of the setup script or commands. - This argument is needed if you are using distutils extensions as part of - your build process; for example, extensions that process setup() arguments - and turn them into EGG-INFO metadata files. - - (Note: projects listed in ``setup_requires`` will NOT be automatically - installed on the system where the setup script is being run. They are - simply downloaded to the setup directory if they're not locally available - already. If you want them to be installed, as well as being available - when the setup script is run, you should add them to ``install_requires`` - **and** ``setup_requires``.) - -``dependency_links`` - A list of strings naming URLs to be searched when satisfying dependencies. - These links will be used if needed to install packages specified by - ``setup_requires`` or ``tests_require``. They will also be written into - the egg's metadata for use by tools like EasyInstall to use when installing - an ``.egg`` file. - -``namespace_packages`` - A list of strings naming the project's "namespace packages". A namespace - package is a package that may be split across multiple project - distributions. For example, Zope 3's ``zope`` package is a namespace - package, because subpackages like ``zope.interface`` and ``zope.publisher`` - may be distributed separately. The egg runtime system can automatically - merge such subpackages into a single parent package at runtime, as long - as you declare them in each project that contains any subpackages of the - namespace package, and as long as the namespace package's ``__init__.py`` - does not contain any code other than a namespace declaration. See the - section below on `Namespace Packages`_ for more information. - -``test_suite`` - A string naming a ``unittest.TestCase`` subclass (or a package or module - containing one or more of them, or a method of such a subclass), or naming - a function that can be called with no arguments and returns a - ``unittest.TestSuite``. If the named suite is a module, and the module - has an ``additional_tests()`` function, it is called and the results are - added to the tests to be run. If the named suite is a package, any - submodules and subpackages are recursively added to the overall test suite. - - Specifying this argument enables use of the `test`_ command to run the - specified test suite, e.g. via ``setup.py test``. See the section on the - `test`_ command below for more details. - -``tests_require`` - If your project's tests need one or more additional packages besides those - needed to install it, you can use this option to specify them. It should - be a string or list of strings specifying what other distributions need to - be present for the package's tests to run. When you run the ``test`` - command, ``setuptools`` will attempt to obtain these (even going - so far as to download them using ``EasyInstall``). Note that these - required projects will *not* be installed on the system where the tests - are run, but only downloaded to the project's setup directory if they're - not already installed locally. - -.. _test_loader: - -``test_loader`` - If you would like to use a different way of finding tests to run than what - setuptools normally uses, you can specify a module name and class name in - this argument. The named class must be instantiable with no arguments, and - its instances must support the ``loadTestsFromNames()`` method as defined - in the Python ``unittest`` module's ``TestLoader`` class. Setuptools will - pass only one test "name" in the `names` argument: the value supplied for - the ``test_suite`` argument. The loader you specify may interpret this - string in any way it likes, as there are no restrictions on what may be - contained in a ``test_suite`` string. - - The module name and class name must be separated by a ``:``. The default - value of this argument is ``"setuptools.command.test:ScanningLoader"``. If - you want to use the default ``unittest`` behavior, you can specify - ``"unittest:TestLoader"`` as your ``test_loader`` argument instead. This - will prevent automatic scanning of submodules and subpackages. - - The module and class you specify here may be contained in another package, - as long as you use the ``tests_require`` option to ensure that the package - containing the loader class is available when the ``test`` command is run. - -``eager_resources`` - A list of strings naming resources that should be extracted together, if - any of them is needed, or if any C extensions included in the project are - imported. This argument is only useful if the project will be installed as - a zipfile, and there is a need to have all of the listed resources be - extracted to the filesystem *as a unit*. Resources listed here - should be '/'-separated paths, relative to the source root, so to list a - resource ``foo.png`` in package ``bar.baz``, you would include the string - ``bar/baz/foo.png`` in this argument. - - If you only need to obtain resources one at a time, or you don't have any C - extensions that access other files in the project (such as data files or - shared libraries), you probably do NOT need this argument and shouldn't - mess with it. For more details on how this argument works, see the section - below on `Automatic Resource Extraction`_. - -``use_2to3`` - Convert the source code from Python 2 to Python 3 with 2to3 during the - build process. See :doc:`python3` for more details. - -``convert_2to3_doctests`` - List of doctest source files that need to be converted with 2to3. - See :doc:`python3` for more details. - -``use_2to3_fixers`` - A list of modules to search for additional fixers to be used during - the 2to3 conversion. See :doc:`python3` for more details. - - -Using ``find_packages()`` -------------------------- - -For simple projects, it's usually easy enough to manually add packages to -the ``packages`` argument of ``setup()``. However, for very large projects -(Twisted, PEAK, Zope, Chandler, etc.), it can be a big burden to keep the -package list updated. That's what ``setuptools.find_packages()`` is for. - -``find_packages()`` takes a source directory, and a list of package names or -patterns to exclude. If omitted, the source directory defaults to the same -directory as the setup script. Some projects use a ``src`` or ``lib`` -directory as the root of their source tree, and those projects would of course -use ``"src"`` or ``"lib"`` as the first argument to ``find_packages()``. (And -such projects also need something like ``package_dir = {'':'src'}`` in their -``setup()`` arguments, but that's just a normal distutils thing.) - -Anyway, ``find_packages()`` walks the target directory, and finds Python -packages by looking for ``__init__.py`` files. It then filters the list of -packages using the exclusion patterns. - -Exclusion patterns are package names, optionally including wildcards. For -example, ``find_packages(exclude=["*.tests"])`` will exclude all packages whose -last name part is ``tests``. Or, ``find_packages(exclude=["*.tests", -"*.tests.*"])`` will also exclude any subpackages of packages named ``tests``, -but it still won't exclude a top-level ``tests`` package or the children -thereof. In fact, if you really want no ``tests`` packages at all, you'll need -something like this:: - - find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]) - -in order to cover all the bases. Really, the exclusion patterns are intended -to cover simpler use cases than this, like excluding a single, specified -package and its subpackages. - -Regardless of the target directory or exclusions, the ``find_packages()`` -function returns a list of package names suitable for use as the ``packages`` -argument to ``setup()``, and so is usually the easiest way to set that -argument in your setup script. Especially since it frees you from having to -remember to modify your setup script whenever your project grows additional -top-level packages or subpackages. - - -Automatic Script Creation -========================= - -Packaging and installing scripts can be a bit awkward with the distutils. For -one thing, there's no easy way to have a script's filename match local -conventions on both Windows and POSIX platforms. For another, you often have -to create a separate file just for the "main" script, when your actual "main" -is a function in a module somewhere. And even in Python 2.4, using the ``-m`` -option only works for actual ``.py`` files that aren't installed in a package. - -``setuptools`` fixes all of these problems by automatically generating scripts -for you with the correct extension, and on Windows it will even create an -``.exe`` file so that users don't have to change their ``PATHEXT`` settings. -The way to use this feature is to define "entry points" in your setup script -that indicate what function the generated script should import and run. For -example, to create two console scripts called ``foo`` and ``bar``, and a GUI -script called ``baz``, you might do something like this:: - - setup( - # other arguments here... - entry_points = { - 'console_scripts': [ - 'foo = my_package.some_module:main_func', - 'bar = other_module:some_func', - ], - 'gui_scripts': [ - 'baz = my_package_gui.start_func', - ] - } - ) - -When this project is installed on non-Windows platforms (using "setup.py -install", "setup.py develop", or by using EasyInstall), a set of ``foo``, -``bar``, and ``baz`` scripts will be installed that import ``main_func`` and -``some_func`` from the specified modules. The functions you specify are called -with no arguments, and their return value is passed to ``sys.exit()``, so you -can return an errorlevel or message to print to stderr. - -On Windows, a set of ``foo.exe``, ``bar.exe``, and ``baz.exe`` launchers are -created, alongside a set of ``foo.py``, ``bar.py``, and ``baz.pyw`` files. The -``.exe`` wrappers find and execute the right version of Python to run the -``.py`` or ``.pyw`` file. - -You may define as many "console script" and "gui script" entry points as you -like, and each one can optionally specify "extras" that it depends on, that -will be added to ``sys.path`` when the script is run. For more information on -"extras", see the section below on `Declaring Extras`_. For more information -on "entry points" in general, see the section below on `Dynamic Discovery of -Services and Plugins`_. - - -"Eggsecutable" Scripts ----------------------- - -Occasionally, there are situations where it's desirable to make an ``.egg`` -file directly executable. You can do this by including an entry point such -as the following:: - - setup( - # other arguments here... - entry_points = { - 'setuptools.installation': [ - 'eggsecutable = my_package.some_module:main_func', - ] - } - ) - -Any eggs built from the above setup script will include a short excecutable -prelude that imports and calls ``main_func()`` from ``my_package.some_module``. -The prelude can be run on Unix-like platforms (including Mac and Linux) by -invoking the egg with ``/bin/sh``, or by enabling execute permissions on the -``.egg`` file. For the executable prelude to run, the appropriate version of -Python must be available via the ``PATH`` environment variable, under its -"long" name. That is, if the egg is built for Python 2.3, there must be a -``python2.3`` executable present in a directory on ``PATH``. - -This feature is primarily intended to support ez_setup the installation of -setuptools itself on non-Windows platforms, but may also be useful for other -projects as well. - -IMPORTANT NOTE: Eggs with an "eggsecutable" header cannot be renamed, or -invoked via symlinks. They *must* be invoked using their original filename, in -order to ensure that, once running, ``pkg_resources`` will know what project -and version is in use. The header script will check this and exit with an -error if the ``.egg`` file has been renamed or is invoked via a symlink that -changes its base name. - - -Declaring Dependencies -====================== - -``setuptools`` supports automatically installing dependencies when a package is -installed, and including information about dependencies in Python Eggs (so that -package management tools like EasyInstall can use the information). - -``setuptools`` and ``pkg_resources`` use a common syntax for specifying a -project's required dependencies. This syntax consists of a project's PyPI -name, optionally followed by a comma-separated list of "extras" in square -brackets, optionally followed by a comma-separated list of version -specifiers. A version specifier is one of the operators ``<``, ``>``, ``<=``, -``>=``, ``==`` or ``!=``, followed by a version identifier. Tokens may be -separated by whitespace, but any whitespace or nonstandard characters within a -project name or version identifier must be replaced with ``-``. - -Version specifiers for a given project are internally sorted into ascending -version order, and used to establish what ranges of versions are acceptable. -Adjacent redundant conditions are also consolidated (e.g. ``">1, >2"`` becomes -``">1"``, and ``"<2,<3"`` becomes ``"<3"``). ``"!="`` versions are excised from -the ranges they fall within. A project's version is then checked for -membership in the resulting ranges. (Note that providing conflicting conditions -for the same version (e.g. "<2,>=2" or "==2,!=2") is meaningless and may -therefore produce bizarre results.) - -Here are some example requirement specifiers:: - - docutils >= 0.3 - - # comment lines and \ continuations are allowed in requirement strings - BazSpam ==1.1, ==1.2, ==1.3, ==1.4, ==1.5, \ - ==1.6, ==1.7 # and so are line-end comments - - PEAK[FastCGI, reST]>=0.5a4 - - setuptools==0.5a7 - -The simplest way to include requirement specifiers is to use the -``install_requires`` argument to ``setup()``. It takes a string or list of -strings containing requirement specifiers. If you include more than one -requirement in a string, each requirement must begin on a new line. - -This has three effects: - -1. When your project is installed, either by using EasyInstall, ``setup.py - install``, or ``setup.py develop``, all of the dependencies not already - installed will be located (via PyPI), downloaded, built (if necessary), - and installed. - -2. Any scripts in your project will be installed with wrappers that verify - the availability of the specified dependencies at runtime, and ensure that - the correct versions are added to ``sys.path`` (e.g. if multiple versions - have been installed). - -3. Python Egg distributions will include a metadata file listing the - dependencies. - -Note, by the way, that if you declare your dependencies in ``setup.py``, you do -*not* need to use the ``require()`` function in your scripts or modules, as -long as you either install the project or use ``setup.py develop`` to do -development work on it. (See `"Development Mode"`_ below for more details on -using ``setup.py develop``.) - - -Dependencies that aren't in PyPI --------------------------------- - -If your project depends on packages that aren't registered in PyPI, you may -still be able to depend on them, as long as they are available for download -as: - -- an egg, in the standard distutils ``sdist`` format, -- a single ``.py`` file, or -- a VCS repository (Subversion, Mercurial, or Git). - -You just need to add some URLs to the ``dependency_links`` argument to -``setup()``. - -The URLs must be either: - -1. direct download URLs, -2. the URLs of web pages that contain direct download links, or -3. the repository's URL - -In general, it's better to link to web pages, because it is usually less -complex to update a web page than to release a new version of your project. -You can also use a SourceForge ``showfiles.php`` link in the case where a -package you depend on is distributed via SourceForge. - -If you depend on a package that's distributed as a single ``.py`` file, you -must include an ``"#egg=project-version"`` suffix to the URL, to give a project -name and version number. (Be sure to escape any dashes in the name or version -by replacing them with underscores.) EasyInstall will recognize this suffix -and automatically create a trivial ``setup.py`` to wrap the single ``.py`` file -as an egg. - -In the case of a VCS checkout, you should also append ``#egg=project-version`` -in order to identify for what package that checkout should be used. You can -append ``@REV`` to the URL's path (before the fragment) to specify a revision. -Additionally, you can also force the VCS being used by prepending the URL with -a certain prefix. Currently available are: - -- ``svn+URL`` for Subversion, -- ``git+URL`` for Git, and -- ``hg+URL`` for Mercurial - -A more complete example would be: - - ``vcs+proto://host/path@revision#egg=project-version`` - -Be careful with the version. It should match the one inside the project files. -If you want to disregard the version, you have to omit it both in the -``requires`` and in the URL's fragment. - -This will do a checkout (or a clone, in Git and Mercurial parlance) to a -temporary folder and run ``setup.py bdist_egg``. - -The ``dependency_links`` option takes the form of a list of URL strings. For -example, the below will cause EasyInstall to search the specified page for -eggs or source distributions, if the package's dependencies aren't already -installed:: - - setup( - ... - dependency_links = [ - "http://peak.telecommunity.com/snapshots/" - ], - ) - - -.. _Declaring Extras: - - -Declaring "Extras" (optional features with their own dependencies) ------------------------------------------------------------------- - -Sometimes a project has "recommended" dependencies, that are not required for -all uses of the project. For example, a project might offer optional PDF -output if ReportLab is installed, and reStructuredText support if docutils is -installed. These optional features are called "extras", and setuptools allows -you to define their requirements as well. In this way, other projects that -require these optional features can force the additional requirements to be -installed, by naming the desired extras in their ``install_requires``. - -For example, let's say that Project A offers optional PDF and reST support:: - - setup( - name="Project-A", - ... - extras_require = { - 'PDF': ["ReportLab>=1.2", "RXP"], - 'reST': ["docutils>=0.3"], - } - ) - -As you can see, the ``extras_require`` argument takes a dictionary mapping -names of "extra" features, to strings or lists of strings describing those -features' requirements. These requirements will *not* be automatically -installed unless another package depends on them (directly or indirectly) by -including the desired "extras" in square brackets after the associated project -name. (Or if the extras were listed in a requirement spec on the EasyInstall -command line.) - -Extras can be used by a project's `entry points`_ to specify dynamic -dependencies. For example, if Project A includes a "rst2pdf" script, it might -declare it like this, so that the "PDF" requirements are only resolved if the -"rst2pdf" script is run:: - - setup( - name="Project-A", - ... - entry_points = { - 'console_scripts': [ - 'rst2pdf = project_a.tools.pdfgen [PDF]', - 'rst2html = project_a.tools.htmlgen', - # more script entry points ... - ], - } - ) - -Projects can also use another project's extras when specifying dependencies. -For example, if project B needs "project A" with PDF support installed, it -might declare the dependency like this:: - - setup( - name="Project-B", - install_requires = ["Project-A[PDF]"], - ... - ) - -This will cause ReportLab to be installed along with project A, if project B is -installed -- even if project A was already installed. In this way, a project -can encapsulate groups of optional "downstream dependencies" under a feature -name, so that packages that depend on it don't have to know what the downstream -dependencies are. If a later version of Project A builds in PDF support and -no longer needs ReportLab, or if it ends up needing other dependencies besides -ReportLab in order to provide PDF support, Project B's setup information does -not need to change, but the right packages will still be installed if needed. - -Note, by the way, that if a project ends up not needing any other packages to -support a feature, it should keep an empty requirements list for that feature -in its ``extras_require`` argument, so that packages depending on that feature -don't break (due to an invalid feature name). For example, if Project A above -builds in PDF support and no longer needs ReportLab, it could change its -setup to this:: - - setup( - name="Project-A", - ... - extras_require = { - 'PDF': [], - 'reST': ["docutils>=0.3"], - } - ) - -so that Package B doesn't have to remove the ``[PDF]`` from its requirement -specifier. - - -Including Data Files -==================== - -The distutils have traditionally allowed installation of "data files", which -are placed in a platform-specific location. However, the most common use case -for data files distributed with a package is for use *by* the package, usually -by including the data files in the package directory. - -Setuptools offers three ways to specify data files to be included in your -packages. First, you can simply use the ``include_package_data`` keyword, -e.g.:: - - from setuptools import setup, find_packages - setup( - ... - include_package_data = True - ) - -This tells setuptools to install any data files it finds in your packages. -The data files must be under CVS or Subversion control, or else they must be -specified via the distutils' ``MANIFEST.in`` file. (They can also be tracked -by another revision control system, using an appropriate plugin. See the -section below on `Adding Support for Other Revision Control Systems`_ for -information on how to write such plugins.) - -If the data files are not under version control, or are not in a supported -version control system, or if you want finer-grained control over what files -are included (for example, if you have documentation files in your package -directories and want to exclude them from installation), then you can also use -the ``package_data`` keyword, e.g.:: - - from setuptools import setup, find_packages - setup( - ... - package_data = { - # If any package contains *.txt or *.rst files, include them: - '': ['*.txt', '*.rst'], - # And include any *.msg files found in the 'hello' package, too: - 'hello': ['*.msg'], - } - ) - -The ``package_data`` argument is a dictionary that maps from package names to -lists of glob patterns. The globs may include subdirectory names, if the data -files are contained in a subdirectory of the package. For example, if the -package tree looks like this:: - - setup.py - src/ - mypkg/ - __init__.py - mypkg.txt - data/ - somefile.dat - otherdata.dat - -The setuptools setup file might look like this:: - - from setuptools import setup, find_packages - setup( - ... - packages = find_packages('src'), # include all packages under src - package_dir = {'':'src'}, # tell distutils packages are under src - - package_data = { - # If any package contains *.txt files, include them: - '': ['*.txt'], - # And include any *.dat files found in the 'data' subdirectory - # of the 'mypkg' package, also: - 'mypkg': ['data/*.dat'], - } - ) - -Notice that if you list patterns in ``package_data`` under the empty string, -these patterns are used to find files in every package, even ones that also -have their own patterns listed. Thus, in the above example, the ``mypkg.txt`` -file gets included even though it's not listed in the patterns for ``mypkg``. - -Also notice that if you use paths, you *must* use a forward slash (``/``) as -the path separator, even if you are on Windows. Setuptools automatically -converts slashes to appropriate platform-specific separators at build time. - -(Note: although the ``package_data`` argument was previously only available in -``setuptools``, it was also added to the Python ``distutils`` package as of -Python 2.4; there is `some documentation for the feature`__ available on the -python.org website. If using the setuptools-specific ``include_package_data`` -argument, files specified by ``package_data`` will *not* be automatically -added to the manifest unless they are tracked by a supported version control -system, or are listed in the MANIFEST.in file.) - -__ http://docs.python.org/dist/node11.html - -Sometimes, the ``include_package_data`` or ``package_data`` options alone -aren't sufficient to precisely define what files you want included. For -example, you may want to include package README files in your revision control -system and source distributions, but exclude them from being installed. So, -setuptools offers an ``exclude_package_data`` option as well, that allows you -to do things like this:: - - from setuptools import setup, find_packages - setup( - ... - packages = find_packages('src'), # include all packages under src - package_dir = {'':'src'}, # tell distutils packages are under src - - include_package_data = True, # include everything in source control - - # ...but exclude README.txt from all packages - exclude_package_data = { '': ['README.txt'] }, - ) - -The ``exclude_package_data`` option is a dictionary mapping package names to -lists of wildcard patterns, just like the ``package_data`` option. And, just -as with that option, a key of ``''`` will apply the given pattern(s) to all -packages. However, any files that match these patterns will be *excluded* -from installation, even if they were listed in ``package_data`` or were -included as a result of using ``include_package_data``. - -In summary, the three options allow you to: - -``include_package_data`` - Accept all data files and directories matched by ``MANIFEST.in`` or found - in source control. - -``package_data`` - Specify additional patterns to match files and directories that may or may - not be matched by ``MANIFEST.in`` or found in source control. - -``exclude_package_data`` - Specify patterns for data files and directories that should *not* be - included when a package is installed, even if they would otherwise have - been included due to the use of the preceding options. - -NOTE: Due to the way the distutils build process works, a data file that you -include in your project and then stop including may be "orphaned" in your -project's build directories, requiring you to run ``setup.py clean --all`` to -fully remove them. This may also be important for your users and contributors -if they track intermediate revisions of your project using Subversion; be sure -to let them know when you make changes that remove files from inclusion so they -can run ``setup.py clean --all``. - - -Accessing Data Files at Runtime -------------------------------- - -Typically, existing programs manipulate a package's ``__file__`` attribute in -order to find the location of data files. However, this manipulation isn't -compatible with PEP 302-based import hooks, including importing from zip files -and Python Eggs. It is strongly recommended that, if you are using data files, -you should use the `Resource Management API`_ of ``pkg_resources`` to access -them. The ``pkg_resources`` module is distributed as part of setuptools, so if -you're using setuptools to distribute your package, there is no reason not to -use its resource management API. See also `Accessing Package Resources`_ for -a quick example of converting code that uses ``__file__`` to use -``pkg_resources`` instead. - -.. _Resource Management API: http://peak.telecommunity.com/DevCenter/PythonEggs#resource-management -.. _Accessing Package Resources: http://peak.telecommunity.com/DevCenter/PythonEggs#accessing-package-resources - - -Non-Package Data Files ----------------------- - -The ``distutils`` normally install general "data files" to a platform-specific -location (e.g. ``/usr/share``). This feature intended to be used for things -like documentation, example configuration files, and the like. ``setuptools`` -does not install these data files in a separate location, however. They are -bundled inside the egg file or directory, alongside the Python modules and -packages. The data files can also be accessed using the `Resource Management -API`_, by specifying a ``Requirement`` instead of a package name:: - - from pkg_resources import Requirement, resource_filename - filename = resource_filename(Requirement.parse("MyProject"),"sample.conf") - -The above code will obtain the filename of the "sample.conf" file in the data -root of the "MyProject" distribution. - -Note, by the way, that this encapsulation of data files means that you can't -actually install data files to some arbitrary location on a user's machine; -this is a feature, not a bug. You can always include a script in your -distribution that extracts and copies your the documentation or data files to -a user-specified location, at their discretion. If you put related data files -in a single directory, you can use ``resource_filename()`` with the directory -name to get a filesystem directory that then can be copied with the ``shutil`` -module. (Even if your package is installed as a zipfile, calling -``resource_filename()`` on a directory will return an actual filesystem -directory, whose contents will be that entire subtree of your distribution.) - -(Of course, if you're writing a new package, you can just as easily place your -data files or directories inside one of your packages, rather than using the -distutils' approach. However, if you're updating an existing application, it -may be simpler not to change the way it currently specifies these data files.) - - -Automatic Resource Extraction ------------------------------ - -If you are using tools that expect your resources to be "real" files, or your -project includes non-extension native libraries or other files that your C -extensions expect to be able to access, you may need to list those files in -the ``eager_resources`` argument to ``setup()``, so that the files will be -extracted together, whenever a C extension in the project is imported. - -This is especially important if your project includes shared libraries *other* -than distutils-built C extensions, and those shared libraries use file -extensions other than ``.dll``, ``.so``, or ``.dylib``, which are the -extensions that setuptools 0.6a8 and higher automatically detects as shared -libraries and adds to the ``native_libs.txt`` file for you. Any shared -libraries whose names do not end with one of those extensions should be listed -as ``eager_resources``, because they need to be present in the filesystem when -he C extensions that link to them are used. - -The ``pkg_resources`` runtime for compressed packages will automatically -extract *all* C extensions and ``eager_resources`` at the same time, whenever -*any* C extension or eager resource is requested via the ``resource_filename()`` -API. (C extensions are imported using ``resource_filename()`` internally.) -This ensures that C extensions will see all of the "real" files that they -expect to see. - -Note also that you can list directory resource names in ``eager_resources`` as -well, in which case the directory's contents (including subdirectories) will be -extracted whenever any C extension or eager resource is requested. - -Please note that if you're not sure whether you need to use this argument, you -don't! It's really intended to support projects with lots of non-Python -dependencies and as a last resort for crufty projects that can't otherwise -handle being compressed. If your package is pure Python, Python plus data -files, or Python plus C, you really don't need this. You've got to be using -either C or an external program that needs "real" files in your project before -there's any possibility of ``eager_resources`` being relevant to your project. - - -Extensible Applications and Frameworks -====================================== - - -.. _Entry Points: - -Dynamic Discovery of Services and Plugins ------------------------------------------ - -``setuptools`` supports creating libraries that "plug in" to extensible -applications and frameworks, by letting you register "entry points" in your -project that can be imported by the application or framework. - -For example, suppose that a blogging tool wants to support plugins -that provide translation for various file types to the blog's output format. -The framework might define an "entry point group" called ``blogtool.parsers``, -and then allow plugins to register entry points for the file extensions they -support. - -This would allow people to create distributions that contain one or more -parsers for different file types, and then the blogging tool would be able to -find the parsers at runtime by looking up an entry point for the file -extension (or mime type, or however it wants to). - -Note that if the blogging tool includes parsers for certain file formats, it -can register these as entry points in its own setup script, which means it -doesn't have to special-case its built-in formats. They can just be treated -the same as any other plugin's entry points would be. - -If you're creating a project that plugs in to an existing application or -framework, you'll need to know what entry points or entry point groups are -defined by that application or framework. Then, you can register entry points -in your setup script. Here are a few examples of ways you might register an -``.rst`` file parser entry point in the ``blogtool.parsers`` entry point group, -for our hypothetical blogging tool:: - - setup( - # ... - entry_points = {'blogtool.parsers': '.rst = some_module:SomeClass'} - ) - - setup( - # ... - entry_points = {'blogtool.parsers': ['.rst = some_module:a_func']} - ) - - setup( - # ... - entry_points = """ - [blogtool.parsers] - .rst = some.nested.module:SomeClass.some_classmethod [reST] - """, - extras_require = dict(reST = "Docutils>=0.3.5") - ) - -The ``entry_points`` argument to ``setup()`` accepts either a string with -``.ini``-style sections, or a dictionary mapping entry point group names to -either strings or lists of strings containing entry point specifiers. An -entry point specifier consists of a name and value, separated by an ``=`` -sign. The value consists of a dotted module name, optionally followed by a -``:`` and a dotted identifier naming an object within the module. It can -also include a bracketed list of "extras" that are required for the entry -point to be used. When the invoking application or framework requests loading -of an entry point, any requirements implied by the associated extras will be -passed to ``pkg_resources.require()``, so that an appropriate error message -can be displayed if the needed package(s) are missing. (Of course, the -invoking app or framework can ignore such errors if it wants to make an entry -point optional if a requirement isn't installed.) - - -Defining Additional Metadata ----------------------------- - -Some extensible applications and frameworks may need to define their own kinds -of metadata to include in eggs, which they can then access using the -``pkg_resources`` metadata APIs. Ordinarily, this is done by having plugin -developers include additional files in their ``ProjectName.egg-info`` -directory. However, since it can be tedious to create such files by hand, you -may want to create a distutils extension that will create the necessary files -from arguments to ``setup()``, in much the same way that ``setuptools`` does -for many of the ``setup()`` arguments it adds. See the section below on -`Creating distutils Extensions`_ for more details, especially the subsection on -`Adding new EGG-INFO Files`_. - - -"Development Mode" -================== - -Under normal circumstances, the ``distutils`` assume that you are going to -build a distribution of your project, not use it in its "raw" or "unbuilt" -form. If you were to use the ``distutils`` that way, you would have to rebuild -and reinstall your project every time you made a change to it during -development. - -Another problem that sometimes comes up with the ``distutils`` is that you may -need to do development on two related projects at the same time. You may need -to put both projects' packages in the same directory to run them, but need to -keep them separate for revision control purposes. How can you do this? - -Setuptools allows you to deploy your projects for use in a common directory or -staging area, but without copying any files. Thus, you can edit each project's -code in its checkout directory, and only need to run build commands when you -change a project's C extensions or similarly compiled files. You can even -deploy a project into another project's checkout directory, if that's your -preferred way of working (as opposed to using a common independent staging area -or the site-packages directory). - -To do this, use the ``setup.py develop`` command. It works very similarly to -``setup.py install`` or the EasyInstall tool, except that it doesn't actually -install anything. Instead, it creates a special ``.egg-link`` file in the -deployment directory, that links to your project's source code. And, if your -deployment directory is Python's ``site-packages`` directory, it will also -update the ``easy-install.pth`` file to include your project's source code, -thereby making it available on ``sys.path`` for all programs using that Python -installation. - -If you have enabled the ``use_2to3`` flag, then of course the ``.egg-link`` -will not link directly to your source code when run under Python 3, since -that source code would be made for Python 2 and not work under Python 3. -Instead the ``setup.py develop`` will build Python 3 code under the ``build`` -directory, and link there. This means that after doing code changes you will -have to run ``setup.py build`` before these changes are picked up by your -Python 3 installation. - -In addition, the ``develop`` command creates wrapper scripts in the target -script directory that will run your in-development scripts after ensuring that -all your ``install_requires`` packages are available on ``sys.path``. - -You can deploy the same project to multiple staging areas, e.g. if you have -multiple projects on the same machine that are sharing the same project you're -doing development work. - -When you're done with a given development task, you can remove the project -source from a staging area using ``setup.py develop --uninstall``, specifying -the desired staging area if it's not the default. - -There are several options to control the precise behavior of the ``develop`` -command; see the section on the `develop`_ command below for more details. - -Note that you can also apply setuptools commands to non-setuptools projects, -using commands like this:: - - python -c "import setuptools; execfile('setup.py')" develop - -That is, you can simply list the normal setup commands and options following -the quoted part. - - -Distributing a ``setuptools``-based project -=========================================== - -Using ``setuptools``... Without bundling it! ---------------------------------------------- - -Your users might not have ``setuptools`` installed on their machines, or even -if they do, it might not be the right version. Fixing this is easy; just -download `ez_setup.py`_, and put it in the same directory as your ``setup.py`` -script. (Be sure to add it to your revision control system, too.) Then add -these two lines to the very top of your setup script, before the script imports -anything from setuptools: - -.. code-block:: python - - import ez_setup - ez_setup.use_setuptools() - -That's it. The ``ez_setup`` module will automatically download a matching -version of ``setuptools`` from PyPI, if it isn't present on the target system. -Whenever you install an updated version of setuptools, you should also update -your projects' ``ez_setup.py`` files, so that a matching version gets installed -on the target machine(s). - -By the way, setuptools supports the new PyPI "upload" command, so you can use -``setup.py sdist upload`` or ``setup.py bdist_egg upload`` to upload your -source or egg distributions respectively. Your project's current version must -be registered with PyPI first, of course; you can use ``setup.py register`` to -do that. Or you can do it all in one step, e.g. ``setup.py register sdist -bdist_egg upload`` will register the package, build source and egg -distributions, and then upload them both to PyPI, where they'll be easily -found by other projects that depend on them. - -(By the way, if you need to distribute a specific version of ``setuptools``, -you can specify the exact version and base download URL as parameters to the -``use_setuptools()`` function. See the function's docstring for details.) - - -What Your Users Should Know ---------------------------- - -In general, a setuptools-based project looks just like any distutils-based -project -- as long as your users have an internet connection and are installing -to ``site-packages``, that is. But for some users, these conditions don't -apply, and they may become frustrated if this is their first encounter with -a setuptools-based project. To keep these users happy, you should review the -following topics in your project's installation instructions, if they are -relevant to your project and your target audience isn't already familiar with -setuptools and ``easy_install``. - -Network Access - If your project is using ``ez_setup``, you should inform users of the - need to either have network access, or to preinstall the correct version of - setuptools using the `EasyInstall installation instructions`_. Those - instructions also have tips for dealing with firewalls as well as how to - manually download and install setuptools. - -Custom Installation Locations - You should inform your users that if they are installing your project to - somewhere other than the main ``site-packages`` directory, they should - first install setuptools using the instructions for `Custom Installation - Locations`_, before installing your project. - -Your Project's Dependencies - If your project depends on other projects that may need to be downloaded - from PyPI or elsewhere, you should list them in your installation - instructions, or tell users how to find out what they are. While most - users will not need this information, any users who don't have unrestricted - internet access may have to find, download, and install the other projects - manually. (Note, however, that they must still install those projects - using ``easy_install``, or your project will not know they are installed, - and your setup script will try to download them again.) - - If you want to be especially friendly to users with limited network access, - you may wish to build eggs for your project and its dependencies, making - them all available for download from your site, or at least create a page - with links to all of the needed eggs. In this way, users with limited - network access can manually download all the eggs to a single directory, - then use the ``-f`` option of ``easy_install`` to specify the directory - to find eggs in. Users who have full network access can just use ``-f`` - with the URL of your download page, and ``easy_install`` will find all the - needed eggs using your links directly. This is also useful when your - target audience isn't able to compile packages (e.g. most Windows users) - and your package or some of its dependencies include C code. - -Subversion or CVS Users and Co-Developers - Users and co-developers who are tracking your in-development code using - CVS, Subversion, or some other revision control system should probably read - this manual's sections regarding such development. Alternately, you may - wish to create a quick-reference guide containing the tips from this manual - that apply to your particular situation. For example, if you recommend - that people use ``setup.py develop`` when tracking your in-development - code, you should let them know that this needs to be run after every update - or commit. - - Similarly, if you remove modules or data files from your project, you - should remind them to run ``setup.py clean --all`` and delete any obsolete - ``.pyc`` or ``.pyo``. (This tip applies to the distutils in general, not - just setuptools, but not everybody knows about them; be kind to your users - by spelling out your project's best practices rather than leaving them - guessing.) - -Creating System Packages - Some users want to manage all Python packages using a single package - manager, and sometimes that package manager isn't ``easy_install``! - Setuptools currently supports ``bdist_rpm``, ``bdist_wininst``, and - ``bdist_dumb`` formats for system packaging. If a user has a locally- - installed "bdist" packaging tool that internally uses the distutils - ``install`` command, it should be able to work with ``setuptools``. Some - examples of "bdist" formats that this should work with include the - ``bdist_nsi`` and ``bdist_msi`` formats for Windows. - - However, packaging tools that build binary distributions by running - ``setup.py install`` on the command line or as a subprocess will require - modification to work with setuptools. They should use the - ``--single-version-externally-managed`` option to the ``install`` command, - combined with the standard ``--root`` or ``--record`` options. - See the `install command`_ documentation below for more details. The - ``bdist_deb`` command is an example of a command that currently requires - this kind of patching to work with setuptools. - - If you or your users have a problem building a usable system package for - your project, please report the problem via the mailing list so that - either the "bdist" tool in question or setuptools can be modified to - resolve the issue. - - -Setting the ``zip_safe`` flag ------------------------------ - -For maximum performance, Python packages are best installed as zip files. -Not all packages, however, are capable of running in compressed form, because -they may expect to be able to access either source code or data files as -normal operating system files. So, ``setuptools`` can install your project -as a zipfile or a directory, and its default choice is determined by the -project's ``zip_safe`` flag. - -You can pass a True or False value for the ``zip_safe`` argument to the -``setup()`` function, or you can omit it. If you omit it, the ``bdist_egg`` -command will analyze your project's contents to see if it can detect any -conditions that would prevent it from working in a zipfile. It will output -notices to the console about any such conditions that it finds. - -Currently, this analysis is extremely conservative: it will consider the -project unsafe if it contains any C extensions or datafiles whatsoever. This -does *not* mean that the project can't or won't work as a zipfile! It just -means that the ``bdist_egg`` authors aren't yet comfortable asserting that -the project *will* work. If the project contains no C or data files, and does -no ``__file__`` or ``__path__`` introspection or source code manipulation, then -there is an extremely solid chance the project will work when installed as a -zipfile. (And if the project uses ``pkg_resources`` for all its data file -access, then C extensions and other data files shouldn't be a problem at all. -See the `Accessing Data Files at Runtime`_ section above for more information.) - -However, if ``bdist_egg`` can't be *sure* that your package will work, but -you've checked over all the warnings it issued, and you are either satisfied it -*will* work (or if you want to try it for yourself), then you should set -``zip_safe`` to ``True`` in your ``setup()`` call. If it turns out that it -doesn't work, you can always change it to ``False``, which will force -``setuptools`` to install your project as a directory rather than as a zipfile. - -Of course, the end-user can still override either decision, if they are using -EasyInstall to install your package. And, if you want to override for testing -purposes, you can just run ``setup.py easy_install --zip-ok .`` or ``setup.py -easy_install --always-unzip .`` in your project directory. to install the -package as a zipfile or directory, respectively. - -In the future, as we gain more experience with different packages and become -more satisfied with the robustness of the ``pkg_resources`` runtime, the -"zip safety" analysis may become less conservative. However, we strongly -recommend that you determine for yourself whether your project functions -correctly when installed as a zipfile, correct any problems if you can, and -then make an explicit declaration of ``True`` or ``False`` for the ``zip_safe`` -flag, so that it will not be necessary for ``bdist_egg`` or ``EasyInstall`` to -try to guess whether your project can work as a zipfile. - - -Namespace Packages ------------------- - -Sometimes, a large package is more useful if distributed as a collection of -smaller eggs. However, Python does not normally allow the contents of a -package to be retrieved from more than one location. "Namespace packages" -are a solution for this problem. When you declare a package to be a namespace -package, it means that the package has no meaningful contents in its -``__init__.py``, and that it is merely a container for modules and subpackages. - -The ``pkg_resources`` runtime will then automatically ensure that the contents -of namespace packages that are spread over multiple eggs or directories are -combined into a single "virtual" package. - -The ``namespace_packages`` argument to ``setup()`` lets you declare your -project's namespace packages, so that they will be included in your project's -metadata. The argument should list the namespace packages that the egg -participates in. For example, the ZopeInterface project might do this:: - - setup( - # ... - namespace_packages = ['zope'] - ) - -because it contains a ``zope.interface`` package that lives in the ``zope`` -namespace package. Similarly, a project for a standalone ``zope.publisher`` -would also declare the ``zope`` namespace package. When these projects are -installed and used, Python will see them both as part of a "virtual" ``zope`` -package, even though they will be installed in different locations. - -Namespace packages don't have to be top-level packages. For example, Zope 3's -``zope.app`` package is a namespace package, and in the future PEAK's -``peak.util`` package will be too. - -Note, by the way, that your project's source tree must include the namespace -packages' ``__init__.py`` files (and the ``__init__.py`` of any parent -packages), in a normal Python package layout. These ``__init__.py`` files -*must* contain the line:: - - __import__('pkg_resources').declare_namespace(__name__) - -This code ensures that the namespace package machinery is operating and that -the current package is registered as a namespace package. - -You must NOT include any other code and data in a namespace package's -``__init__.py``. Even though it may appear to work during development, or when -projects are installed as ``.egg`` files, it will not work when the projects -are installed using "system" packaging tools -- in such cases the -``__init__.py`` files will not be installed, let alone executed. - -You must include the ``declare_namespace()`` line in the ``__init__.py`` of -*every* project that has contents for the namespace package in question, in -order to ensure that the namespace will be declared regardless of which -project's copy of ``__init__.py`` is loaded first. If the first loaded -``__init__.py`` doesn't declare it, it will never *be* declared, because no -other copies will ever be loaded!) - - -TRANSITIONAL NOTE -~~~~~~~~~~~~~~~~~ - -Setuptools automatically calls ``declare_namespace()`` for you at runtime, -but future versions may *not*. This is because the automatic declaration -feature has some negative side effects, such as needing to import all namespace -packages during the initialization of the ``pkg_resources`` runtime, and also -the need for ``pkg_resources`` to be explicitly imported before any namespace -packages work at all. In some future releases, you'll be responsible -for including your own declaration lines, and the automatic declaration feature -will be dropped to get rid of the negative side effects. - -During the remainder of the current development cycle, therefore, setuptools -will warn you about missing ``declare_namespace()`` calls in your -``__init__.py`` files, and you should correct these as soon as possible -before the compatibility support is removed. -Namespace packages without declaration lines will not work -correctly once a user has upgraded to a later version, so it's important that -you make this change now in order to avoid having your code break in the field. -Our apologies for the inconvenience, and thank you for your patience. - - - -Tagging and "Daily Build" or "Snapshot" Releases ------------------------------------------------- - -When a set of related projects are under development, it may be important to -track finer-grained version increments than you would normally use for e.g. -"stable" releases. While stable releases might be measured in dotted numbers -with alpha/beta/etc. status codes, development versions of a project often -need to be tracked by revision or build number or even build date. This is -especially true when projects in development need to refer to one another, and -therefore may literally need an up-to-the-minute version of something! - -To support these scenarios, ``setuptools`` allows you to "tag" your source and -egg distributions by adding one or more of the following to the project's -"official" version identifier: - -* A manually-specified pre-release tag, such as "build" or "dev", or a - manually-specified post-release tag, such as a build or revision number - (``--tag-build=STRING, -bSTRING``) - -* A "last-modified revision number" string generated automatically from - Subversion's metadata (assuming your project is being built from a Subversion - "working copy") (``--tag-svn-revision, -r``) - -* An 8-character representation of the build date (``--tag-date, -d``), as - a postrelease tag - -You can add these tags by adding ``egg_info`` and the desired options to -the command line ahead of the ``sdist`` or ``bdist`` commands that you want -to generate a daily build or snapshot for. See the section below on the -`egg_info`_ command for more details. - -(Also, before you release your project, be sure to see the section above on -`Specifying Your Project's Version`_ for more information about how pre- and -post-release tags affect how setuptools and EasyInstall interpret version -numbers. This is important in order to make sure that dependency processing -tools will know which versions of your project are newer than others.) - -Finally, if you are creating builds frequently, and either building them in a -downloadable location or are copying them to a distribution server, you should -probably also check out the `rotate`_ command, which lets you automatically -delete all but the N most-recently-modified distributions matching a glob -pattern. So, you can use a command line like:: - - setup.py egg_info -rbDEV bdist_egg rotate -m.egg -k3 - -to build an egg whose version info includes 'DEV-rNNNN' (where NNNN is the -most recent Subversion revision that affected the source tree), and then -delete any egg files from the distribution directory except for the three -that were built most recently. - -If you have to manage automated builds for multiple packages, each with -different tagging and rotation policies, you may also want to check out the -`alias`_ command, which would let each package define an alias like ``daily`` -that would perform the necessary tag, build, and rotate commands. Then, a -simpler script or cron job could just run ``setup.py daily`` in each project -directory. (And, you could also define sitewide or per-user default versions -of the ``daily`` alias, so that projects that didn't define their own would -use the appropriate defaults.) - - -Generating Source Distributions -------------------------------- - -``setuptools`` enhances the distutils' default algorithm for source file -selection, so that all files managed by CVS or Subversion in your project tree -are included in any source distribution you build. This is a big improvement -over having to manually write a ``MANIFEST.in`` file and try to keep it in -sync with your project. So, if you are using CVS or Subversion, and your -source distributions only need to include files that you're tracking in -revision control, don't create a a ``MANIFEST.in`` file for your project. -(And, if you already have one, you might consider deleting it the next time -you would otherwise have to change it.) - -(NOTE: other revision control systems besides CVS and Subversion can be -supported using plugins; see the section below on `Adding Support for Other -Revision Control Systems`_ for information on how to write such plugins.) - -If you need to include automatically generated files, or files that are kept in -an unsupported revision control system, you'll need to create a ``MANIFEST.in`` -file to specify any files that the default file location algorithm doesn't -catch. See the distutils documentation for more information on the format of -the ``MANIFEST.in`` file. - -But, be sure to ignore any part of the distutils documentation that deals with -``MANIFEST`` or how it's generated from ``MANIFEST.in``; setuptools shields you -from these issues and doesn't work the same way in any case. Unlike the -distutils, setuptools regenerates the source distribution manifest file -every time you build a source distribution, and it builds it inside the -project's ``.egg-info`` directory, out of the way of your main project -directory. You therefore need not worry about whether it is up-to-date or not. - -Indeed, because setuptools' approach to determining the contents of a source -distribution is so much simpler, its ``sdist`` command omits nearly all of -the options that the distutils' more complex ``sdist`` process requires. For -all practical purposes, you'll probably use only the ``--formats`` option, if -you use any option at all. - -(By the way, if you're using some other revision control system, you might -consider creating and publishing a `revision control plugin for setuptools`_.) - - -.. _revision control plugin for setuptools: `Adding Support for Other Revision Control Systems`_ - - -Making your package available for EasyInstall ---------------------------------------------- - -If you use the ``register`` command (``setup.py register``) to register your -package with PyPI, that's most of the battle right there. (See the -`docs for the register command`_ for more details.) - -.. _docs for the register command: http://docs.python.org/dist/package-index.html - -If you also use the `upload`_ command to upload actual distributions of your -package, that's even better, because EasyInstall will be able to find and -download them directly from your project's PyPI page. - -However, there may be reasons why you don't want to upload distributions to -PyPI, and just want your existing distributions (or perhaps a Subversion -checkout) to be used instead. - -So here's what you need to do before running the ``register`` command. There -are three ``setup()`` arguments that affect EasyInstall: - -``url`` and ``download_url`` - These become links on your project's PyPI page. EasyInstall will examine - them to see if they link to a package ("primary links"), or whether they are - HTML pages. If they're HTML pages, EasyInstall scans all HREF's on the - page for primary links - -``long_description`` - EasyInstall will check any URLs contained in this argument to see if they - are primary links. - -A URL is considered a "primary link" if it is a link to a .tar.gz, .tgz, .zip, -.egg, .egg.zip, .tar.bz2, or .exe file, or if it has an ``#egg=project`` or -``#egg=project-version`` fragment identifier attached to it. EasyInstall -attempts to determine a project name and optional version number from the text -of a primary link *without* downloading it. When it has found all the primary -links, EasyInstall will select the best match based on requested version, -platform compatibility, and other criteria. - -So, if your ``url`` or ``download_url`` point either directly to a downloadable -source distribution, or to HTML page(s) that have direct links to such, then -EasyInstall will be able to locate downloads automatically. If you want to -make Subversion checkouts available, then you should create links with either -``#egg=project`` or ``#egg=project-version`` added to the URL. You should -replace ``project`` and ``version`` with the values they would have in an egg -filename. (Be sure to actually generate an egg and then use the initial part -of the filename, rather than trying to guess what the escaped form of the -project name and version number will be.) - -Note that Subversion checkout links are of lower precedence than other kinds -of distributions, so EasyInstall will not select a Subversion checkout for -downloading unless it has a version included in the ``#egg=`` suffix, and -it's a higher version than EasyInstall has seen in any other links for your -project. - -As a result, it's a common practice to use mark checkout URLs with a version of -"dev" (i.e., ``#egg=projectname-dev``), so that users can do something like -this:: - - easy_install --editable projectname==dev - -in order to check out the in-development version of ``projectname``. - - -Managing "Continuous Releases" Using Subversion ------------------------------------------------ - -If you expect your users to track in-development versions of your project via -Subversion, there are a few additional steps you should take to ensure that -things work smoothly with EasyInstall. First, you should add the following -to your project's ``setup.cfg`` file: - -.. code-block:: ini - - [egg_info] - tag_build = .dev - tag_svn_revision = 1 - -This will tell ``setuptools`` to generate package version numbers like -``1.0a1.dev-r1263``, which will be considered to be an *older* release than -``1.0a1``. Thus, when you actually release ``1.0a1``, the entire egg -infrastructure (including ``setuptools``, ``pkg_resources`` and EasyInstall) -will know that ``1.0a1`` supersedes any interim snapshots from Subversion, and -handle upgrades accordingly. - -(Note: the project version number you specify in ``setup.py`` should always be -the *next* version of your software, not the last released version. -Alternately, you can leave out the ``tag_build=.dev``, and always use the -*last* release as a version number, so that your post-1.0 builds are labelled -``1.0-r1263``, indicating a post-1.0 patchlevel. Most projects so far, -however, seem to prefer to think of their project as being a future version -still under development, rather than a past version being patched. It is of -course possible for a single project to have both situations, using -post-release numbering on release branches, and pre-release numbering on the -trunk. But you don't have to make things this complex if you don't want to.) - -Commonly, projects releasing code from Subversion will include a PyPI link to -their checkout URL (as described in the previous section) with an -``#egg=projectname-dev`` suffix. This allows users to request EasyInstall -to download ``projectname==dev`` in order to get the latest in-development -code. Note that if your project depends on such in-progress code, you may wish -to specify your ``install_requires`` (or other requirements) to include -``==dev``, e.g.: - -.. code-block:: python - - install_requires = ["OtherProject>=0.2a1.dev-r143,==dev"] - -The above example says, "I really want at least this particular development -revision number, but feel free to follow and use an ``#egg=OtherProject-dev`` -link if you find one". This avoids the need to have actual source or binary -distribution snapshots of in-development code available, just to be able to -depend on the latest and greatest a project has to offer. - -A final note for Subversion development: if you are using SVN revision tags -as described in this section, it's a good idea to run ``setup.py develop`` -after each Subversion checkin or update, because your project's version number -will be changing, and your script wrappers need to be updated accordingly. - -Also, if the project's requirements have changed, the ``develop`` command will -take care of fetching the updated dependencies, building changed extensions, -etc. Be sure to also remind any of your users who check out your project -from Subversion that they need to run ``setup.py develop`` after every update -in order to keep their checkout completely in sync. - - -Making "Official" (Non-Snapshot) Releases -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -When you make an official release, creating source or binary distributions, -you will need to override the tag settings from ``setup.cfg``, so that you -don't end up registering versions like ``foobar-0.7a1.dev-r34832``. This is -easy to do if you are developing on the trunk and using tags or branches for -your releases - just make the change to ``setup.cfg`` after branching or -tagging the release, so the trunk will still produce development snapshots. - -Alternately, if you are not branching for releases, you can override the -default version options on the command line, using something like:: - - python setup.py egg_info -RDb "" sdist bdist_egg register upload - -The first part of this command (``egg_info -RDb ""``) will override the -configured tag information, before creating source and binary eggs, registering -the project with PyPI, and uploading the files. Thus, these commands will use -the plain version from your ``setup.py``, without adding the Subversion -revision number or build designation string. - -Of course, if you will be doing this a lot, you may wish to create a personal -alias for this operation, e.g.:: - - python setup.py alias -u release egg_info -RDb "" - -You can then use it like this:: - - python setup.py release sdist bdist_egg register upload - -Or of course you can create more elaborate aliases that do all of the above. -See the sections below on the `egg_info`_ and `alias`_ commands for more ideas. - - - -Distributing Extensions compiled with Pyrex -------------------------------------------- - -``setuptools`` includes transparent support for building Pyrex extensions, as -long as you define your extensions using ``setuptools.Extension``, *not* -``distutils.Extension``. You must also not import anything from Pyrex in -your setup script. - -If you follow these rules, you can safely list ``.pyx`` files as the source -of your ``Extension`` objects in the setup script. ``setuptools`` will detect -at build time whether Pyrex is installed or not. If it is, then ``setuptools`` -will use it. If not, then ``setuptools`` will silently change the -``Extension`` objects to refer to the ``.c`` counterparts of the ``.pyx`` -files, so that the normal distutils C compilation process will occur. - -Of course, for this to work, your source distributions must include the C -code generated by Pyrex, as well as your original ``.pyx`` files. This means -that you will probably want to include current ``.c`` files in your revision -control system, rebuilding them whenever you check changes in for the ``.pyx`` -source files. This will ensure that people tracking your project in CVS or -Subversion will be able to build it even if they don't have Pyrex installed, -and that your source releases will be similarly usable with or without Pyrex. - - ------------------ -Command Reference ------------------ - -.. _alias: - -``alias`` - Define shortcuts for commonly used commands -======================================================= - -Sometimes, you need to use the same commands over and over, but you can't -necessarily set them as defaults. For example, if you produce both development -snapshot releases and "stable" releases of a project, you may want to put -the distributions in different places, or use different ``egg_info`` tagging -options, etc. In these cases, it doesn't make sense to set the options in -a distutils configuration file, because the values of the options changed based -on what you're trying to do. - -Setuptools therefore allows you to define "aliases" - shortcut names for -an arbitrary string of commands and options, using ``setup.py alias aliasname -expansion``, where aliasname is the name of the new alias, and the remainder of -the command line supplies its expansion. For example, this command defines -a sitewide alias called "daily", that sets various ``egg_info`` tagging -options:: - - setup.py alias --global-config daily egg_info --tag-svn-revision \ - --tag-build=development - -Once the alias is defined, it can then be used with other setup commands, -e.g.:: - - setup.py daily bdist_egg # generate a daily-build .egg file - setup.py daily sdist # generate a daily-build source distro - setup.py daily sdist bdist_egg # generate both - -The above commands are interpreted as if the word ``daily`` were replaced with -``egg_info --tag-svn-revision --tag-build=development``. - -Note that setuptools will expand each alias *at most once* in a given command -line. This serves two purposes. First, if you accidentally create an alias -loop, it will have no effect; you'll instead get an error message about an -unknown command. Second, it allows you to define an alias for a command, that -uses that command. For example, this (project-local) alias:: - - setup.py alias bdist_egg bdist_egg rotate -k1 -m.egg - -redefines the ``bdist_egg`` command so that it always runs the ``rotate`` -command afterwards to delete all but the newest egg file. It doesn't loop -indefinitely on ``bdist_egg`` because the alias is only expanded once when -used. - -You can remove a defined alias with the ``--remove`` (or ``-r``) option, e.g.:: - - setup.py alias --global-config --remove daily - -would delete the "daily" alias we defined above. - -Aliases can be defined on a project-specific, per-user, or sitewide basis. The -default is to define or remove a project-specific alias, but you can use any of -the `configuration file options`_ (listed under the `saveopts`_ command, below) -to determine which distutils configuration file an aliases will be added to -(or removed from). - -Note that if you omit the "expansion" argument to the ``alias`` command, -you'll get output showing that alias' current definition (and what -configuration file it's defined in). If you omit the alias name as well, -you'll get a listing of all current aliases along with their configuration -file locations. - - -``bdist_egg`` - Create a Python Egg for the project -=================================================== - -This command generates a Python Egg (``.egg`` file) for the project. Python -Eggs are the preferred binary distribution format for EasyInstall, because they -are cross-platform (for "pure" packages), directly importable, and contain -project metadata including scripts and information about the project's -dependencies. They can be simply downloaded and added to ``sys.path`` -directly, or they can be placed in a directory on ``sys.path`` and then -automatically discovered by the egg runtime system. - -This command runs the `egg_info`_ command (if it hasn't already run) to update -the project's metadata (``.egg-info``) directory. If you have added any extra -metadata files to the ``.egg-info`` directory, those files will be included in -the new egg file's metadata directory, for use by the egg runtime system or by -any applications or frameworks that use that metadata. - -You won't usually need to specify any special options for this command; just -use ``bdist_egg`` and you're done. But there are a few options that may -be occasionally useful: - -``--dist-dir=DIR, -d DIR`` - Set the directory where the ``.egg`` file will be placed. If you don't - supply this, then the ``--dist-dir`` setting of the ``bdist`` command - will be used, which is usually a directory named ``dist`` in the project - directory. - -``--plat-name=PLATFORM, -p PLATFORM`` - Set the platform name string that will be embedded in the egg's filename - (assuming the egg contains C extensions). This can be used to override - the distutils default platform name with something more meaningful. Keep - in mind, however, that the egg runtime system expects to see eggs with - distutils platform names, so it may ignore or reject eggs with non-standard - platform names. Similarly, the EasyInstall program may ignore them when - searching web pages for download links. However, if you are - cross-compiling or doing some other unusual things, you might find a use - for this option. - -``--exclude-source-files`` - Don't include any modules' ``.py`` files in the egg, just compiled Python, - C, and data files. (Note that this doesn't affect any ``.py`` files in the - EGG-INFO directory or its subdirectories, since for example there may be - scripts with a ``.py`` extension which must still be retained.) We don't - recommend that you use this option except for packages that are being - bundled for proprietary end-user applications, or for "embedded" scenarios - where space is at an absolute premium. On the other hand, if your package - is going to be installed and used in compressed form, you might as well - exclude the source because Python's ``traceback`` module doesn't currently - understand how to display zipped source code anyway, or how to deal with - files that are in a different place from where their code was compiled. - -There are also some options you will probably never need, but which are there -because they were copied from similar ``bdist`` commands used as an example for -creating this one. They may be useful for testing and debugging, however, -which is why we kept them: - -``--keep-temp, -k`` - Keep the contents of the ``--bdist-dir`` tree around after creating the - ``.egg`` file. - -``--bdist-dir=DIR, -b DIR`` - Set the temporary directory for creating the distribution. The entire - contents of this directory are zipped to create the ``.egg`` file, after - running various installation commands to copy the package's modules, data, - and extensions here. - -``--skip-build`` - Skip doing any "build" commands; just go straight to the - install-and-compress phases. - - -.. _develop: - -``develop`` - Deploy the project source in "Development Mode" -============================================================= - -This command allows you to deploy your project's source for use in one or more -"staging areas" where it will be available for importing. This deployment is -done in such a way that changes to the project source are immediately available -in the staging area(s), without needing to run a build or install step after -each change. - -The ``develop`` command works by creating an ``.egg-link`` file (named for the -project) in the given staging area. If the staging area is Python's -``site-packages`` directory, it also updates an ``easy-install.pth`` file so -that the project is on ``sys.path`` by default for all programs run using that -Python installation. - -The ``develop`` command also installs wrapper scripts in the staging area (or -a separate directory, as specified) that will ensure the project's dependencies -are available on ``sys.path`` before running the project's source scripts. -And, it ensures that any missing project dependencies are available in the -staging area, by downloading and installing them if necessary. - -Last, but not least, the ``develop`` command invokes the ``build_ext -i`` -command to ensure any C extensions in the project have been built and are -up-to-date, and the ``egg_info`` command to ensure the project's metadata is -updated (so that the runtime and wrappers know what the project's dependencies -are). If you make any changes to the project's setup script or C extensions, -you should rerun the ``develop`` command against all relevant staging areas to -keep the project's scripts, metadata and extensions up-to-date. Most other -kinds of changes to your project should not require any build operations or -rerunning ``develop``, but keep in mind that even minor changes to the setup -script (e.g. changing an entry point definition) require you to re-run the -``develop`` or ``test`` commands to keep the distribution updated. - -Here are some of the options that the ``develop`` command accepts. Note that -they affect the project's dependencies as well as the project itself, so if you -have dependencies that need to be installed and you use ``--exclude-scripts`` -(for example), the dependencies' scripts will not be installed either! For -this reason, you may want to use EasyInstall to install the project's -dependencies before using the ``develop`` command, if you need finer control -over the installation options for dependencies. - -``--uninstall, -u`` - Un-deploy the current project. You may use the ``--install-dir`` or ``-d`` - option to designate the staging area. The created ``.egg-link`` file will - be removed, if present and it is still pointing to the project directory. - The project directory will be removed from ``easy-install.pth`` if the - staging area is Python's ``site-packages`` directory. - - Note that this option currently does *not* uninstall script wrappers! You - must uninstall them yourself, or overwrite them by using EasyInstall to - activate a different version of the package. You can also avoid installing - script wrappers in the first place, if you use the ``--exclude-scripts`` - (aka ``-x``) option when you run ``develop`` to deploy the project. - -``--multi-version, -m`` - "Multi-version" mode. Specifying this option prevents ``develop`` from - adding an ``easy-install.pth`` entry for the project(s) being deployed, and - if an entry for any version of a project already exists, the entry will be - removed upon successful deployment. In multi-version mode, no specific - version of the package is available for importing, unless you use - ``pkg_resources.require()`` to put it on ``sys.path``, or you are running - a wrapper script generated by ``setuptools`` or EasyInstall. (In which - case the wrapper script calls ``require()`` for you.) - - Note that if you install to a directory other than ``site-packages``, - this option is automatically in effect, because ``.pth`` files can only be - used in ``site-packages`` (at least in Python 2.3 and 2.4). So, if you use - the ``--install-dir`` or ``-d`` option (or they are set via configuration - file(s)) your project and its dependencies will be deployed in multi- - version mode. - -``--install-dir=DIR, -d DIR`` - Set the installation directory (staging area). If this option is not - directly specified on the command line or in a distutils configuration - file, the distutils default installation location is used. Normally, this - will be the ``site-packages`` directory, but if you are using distutils - configuration files, setting things like ``prefix`` or ``install_lib``, - then those settings are taken into account when computing the default - staging area. - -``--script-dir=DIR, -s DIR`` - Set the script installation directory. If you don't supply this option - (via the command line or a configuration file), but you *have* supplied - an ``--install-dir`` (via command line or config file), then this option - defaults to the same directory, so that the scripts will be able to find - their associated package installation. Otherwise, this setting defaults - to the location where the distutils would normally install scripts, taking - any distutils configuration file settings into account. - -``--exclude-scripts, -x`` - Don't deploy script wrappers. This is useful if you don't want to disturb - existing versions of the scripts in the staging area. - -``--always-copy, -a`` - Copy all needed distributions to the staging area, even if they - are already present in another directory on ``sys.path``. By default, if - a requirement can be met using a distribution that is already available in - a directory on ``sys.path``, it will not be copied to the staging area. - -``--egg-path=DIR`` - Force the generated ``.egg-link`` file to use a specified relative path - to the source directory. This can be useful in circumstances where your - installation directory is being shared by code running under multiple - platforms (e.g. Mac and Windows) which have different absolute locations - for the code under development, but the same *relative* locations with - respect to the installation directory. If you use this option when - installing, you must supply the same relative path when uninstalling. - -In addition to the above options, the ``develop`` command also accepts all of -the same options accepted by ``easy_install``. If you've configured any -``easy_install`` settings in your ``setup.cfg`` (or other distutils config -files), the ``develop`` command will use them as defaults, unless you override -them in a ``[develop]`` section or on the command line. - - -``easy_install`` - Find and install packages -============================================ - -This command runs the `EasyInstall tool -`_ for you. It is exactly -equivalent to running the ``easy_install`` command. All command line arguments -following this command are consumed and not processed further by the distutils, -so this must be the last command listed on the command line. Please see -the EasyInstall documentation for the options reference and usage examples. -Normally, there is no reason to use this command via the command line, as you -can just use ``easy_install`` directly. It's only listed here so that you know -it's a distutils command, which means that you can: - -* create command aliases that use it, -* create distutils extensions that invoke it as a subcommand, and -* configure options for it in your ``setup.cfg`` or other distutils config - files. - - -.. _egg_info: - -``egg_info`` - Create egg metadata and set build tags -===================================================== - -This command performs two operations: it updates a project's ``.egg-info`` -metadata directory (used by the ``bdist_egg``, ``develop``, and ``test`` -commands), and it allows you to temporarily change a project's version string, -to support "daily builds" or "snapshot" releases. It is run automatically by -the ``sdist``, ``bdist_egg``, ``develop``, ``register``, and ``test`` commands -in order to update the project's metadata, but you can also specify it -explicitly in order to temporarily change the project's version string while -executing other commands. (It also generates the``.egg-info/SOURCES.txt`` -manifest file, which is used when you are building source distributions.) - -In addition to writing the core egg metadata defined by ``setuptools`` and -required by ``pkg_resources``, this command can be extended to write other -metadata files as well, by defining entry points in the ``egg_info.writers`` -group. See the section on `Adding new EGG-INFO Files`_ below for more details. -Note that using additional metadata writers may require you to include a -``setup_requires`` argument to ``setup()`` in order to ensure that the desired -writers are available on ``sys.path``. - - -Release Tagging Options ------------------------ - -The following options can be used to modify the project's version string for -all remaining commands on the setup command line. The options are processed -in the order shown, so if you use more than one, the requested tags will be -added in the following order: - -``--tag-build=NAME, -b NAME`` - Append NAME to the project's version string. Due to the way setuptools - processes "pre-release" version suffixes beginning with the letters "a" - through "e" (like "alpha", "beta", and "candidate"), you will usually want - to use a tag like ".build" or ".dev", as this will cause the version number - to be considered *lower* than the project's default version. (If you - want to make the version number *higher* than the default version, you can - always leave off --tag-build and then use one or both of the following - options.) - - If you have a default build tag set in your ``setup.cfg``, you can suppress - it on the command line using ``-b ""`` or ``--tag-build=""`` as an argument - to the ``egg_info`` command. - -``--tag-svn-revision, -r`` - If the current directory is a Subversion checkout (i.e. has a ``.svn`` - subdirectory, this appends a string of the form "-rNNNN" to the project's - version string, where NNNN is the revision number of the most recent - modification to the current directory, as obtained from the ``svn info`` - command. - - If the current directory is not a Subversion checkout, the command will - look for a ``PKG-INFO`` file instead, and try to find the revision number - from that, by looking for a "-rNNNN" string at the end of the version - number. (This is so that building a package from a source distribution of - a Subversion snapshot will produce a binary with the correct version - number.) - - If there is no ``PKG-INFO`` file, or the version number contained therein - does not end with ``-r`` and a number, then ``-r0`` is used. - -``--no-svn-revision, -R`` - Don't include the Subversion revision in the version number. This option - is included so you can override a default setting put in ``setup.cfg``. - -``--tag-date, -d`` - Add a date stamp of the form "-YYYYMMDD" (e.g. "-20050528") to the - project's version number. - -``--no-date, -D`` - Don't include a date stamp in the version number. This option is included - so you can override a default setting in ``setup.cfg``. - - -(Note: Because these options modify the version number used for source and -binary distributions of your project, you should first make sure that you know -how the resulting version numbers will be interpreted by automated tools -like EasyInstall. See the section above on `Specifying Your Project's -Version`_ for an explanation of pre- and post-release tags, as well as tips on -how to choose and verify a versioning scheme for your your project.) - -For advanced uses, there is one other option that can be set, to change the -location of the project's ``.egg-info`` directory. Commands that need to find -the project's source directory or metadata should get it from this setting: - - -Other ``egg_info`` Options --------------------------- - -``--egg-base=SOURCEDIR, -e SOURCEDIR`` - Specify the directory that should contain the .egg-info directory. This - should normally be the root of your project's source tree (which is not - necessarily the same as your project directory; some projects use a ``src`` - or ``lib`` subdirectory as the source root). You should not normally need - to specify this directory, as it is normally determined from the - ``package_dir`` argument to the ``setup()`` function, if any. If there is - no ``package_dir`` set, this option defaults to the current directory. - - -``egg_info`` Examples ---------------------- - -Creating a dated "nightly build" snapshot egg:: - - python setup.py egg_info --tag-date --tag-build=DEV bdist_egg - -Creating and uploading a release with no version tags, even if some default -tags are specified in ``setup.cfg``:: - - python setup.py egg_info -RDb "" sdist bdist_egg register upload - -(Notice that ``egg_info`` must always appear on the command line *before* any -commands that you want the version changes to apply to.) - - -.. _install command: - -``install`` - Run ``easy_install`` or old-style installation -============================================================ - -The setuptools ``install`` command is basically a shortcut to run the -``easy_install`` command on the current project. However, for convenience -in creating "system packages" of setuptools-based projects, you can also -use this option: - -``--single-version-externally-managed`` - This boolean option tells the ``install`` command to perform an "old style" - installation, with the addition of an ``.egg-info`` directory so that the - installed project will still have its metadata available and operate - normally. If you use this option, you *must* also specify the ``--root`` - or ``--record`` options (or both), because otherwise you will have no way - to identify and remove the installed files. - -This option is automatically in effect when ``install`` is invoked by another -distutils command, so that commands like ``bdist_wininst`` and ``bdist_rpm`` -will create system packages of eggs. It is also automatically in effect if -you specify the ``--root`` option. - - -``install_egg_info`` - Install an ``.egg-info`` directory in ``site-packages`` -============================================================================== - -Setuptools runs this command as part of ``install`` operations that use the -``--single-version-externally-managed`` options. You should not invoke it -directly; it is documented here for completeness and so that distutils -extensions such as system package builders can make use of it. This command -has only one option: - -``--install-dir=DIR, -d DIR`` - The parent directory where the ``.egg-info`` directory will be placed. - Defaults to the same as the ``--install-dir`` option specified for the - ``install_lib`` command, which is usually the system ``site-packages`` - directory. - -This command assumes that the ``egg_info`` command has been given valid options -via the command line or ``setup.cfg``, as it will invoke the ``egg_info`` -command and use its options to locate the project's source ``.egg-info`` -directory. - - -.. _rotate: - -``rotate`` - Delete outdated distribution files -=============================================== - -As you develop new versions of your project, your distribution (``dist``) -directory will gradually fill up with older source and/or binary distribution -files. The ``rotate`` command lets you automatically clean these up, keeping -only the N most-recently modified files matching a given pattern. - -``--match=PATTERNLIST, -m PATTERNLIST`` - Comma-separated list of glob patterns to match. This option is *required*. - The project name and ``-*`` is prepended to the supplied patterns, in order - to match only distributions belonging to the current project (in case you - have a shared distribution directory for multiple projects). Typically, - you will use a glob pattern like ``.zip`` or ``.egg`` to match files of - the specified type. Note that each supplied pattern is treated as a - distinct group of files for purposes of selecting files to delete. - -``--keep=COUNT, -k COUNT`` - Number of matching distributions to keep. For each group of files - identified by a pattern specified with the ``--match`` option, delete all - but the COUNT most-recently-modified files in that group. This option is - *required*. - -``--dist-dir=DIR, -d DIR`` - Directory where the distributions are. This defaults to the value of the - ``bdist`` command's ``--dist-dir`` option, which will usually be the - project's ``dist`` subdirectory. - -**Example 1**: Delete all .tar.gz files from the distribution directory, except -for the 3 most recently modified ones:: - - setup.py rotate --match=.tar.gz --keep=3 - -**Example 2**: Delete all Python 2.3 or Python 2.4 eggs from the distribution -directory, except the most recently modified one for each Python version:: - - setup.py rotate --match=-py2.3*.egg,-py2.4*.egg --keep=1 - - -.. _saveopts: - -``saveopts`` - Save used options to a configuration file -======================================================== - -Finding and editing ``distutils`` configuration files can be a pain, especially -since you also have to translate the configuration options from command-line -form to the proper configuration file format. You can avoid these hassles by -using the ``saveopts`` command. Just add it to the command line to save the -options you used. For example, this command builds the project using -the ``mingw32`` C compiler, then saves the --compiler setting as the default -for future builds (even those run implicitly by the ``install`` command):: - - setup.py build --compiler=mingw32 saveopts - -The ``saveopts`` command saves all options for every commmand specified on the -command line to the project's local ``setup.cfg`` file, unless you use one of -the `configuration file options`_ to change where the options are saved. For -example, this command does the same as above, but saves the compiler setting -to the site-wide (global) distutils configuration:: - - setup.py build --compiler=mingw32 saveopts -g - -Note that it doesn't matter where you place the ``saveopts`` command on the -command line; it will still save all the options specified for all commands. -For example, this is another valid way to spell the last example:: - - setup.py saveopts -g build --compiler=mingw32 - -Note, however, that all of the commands specified are always run, regardless of -where ``saveopts`` is placed on the command line. - - -Configuration File Options --------------------------- - -Normally, settings such as options and aliases are saved to the project's -local ``setup.cfg`` file. But you can override this and save them to the -global or per-user configuration files, or to a manually-specified filename. - -``--global-config, -g`` - Save settings to the global ``distutils.cfg`` file inside the ``distutils`` - package directory. You must have write access to that directory to use - this option. You also can't combine this option with ``-u`` or ``-f``. - -``--user-config, -u`` - Save settings to the current user's ``~/.pydistutils.cfg`` (POSIX) or - ``$HOME/pydistutils.cfg`` (Windows) file. You can't combine this option - with ``-g`` or ``-f``. - -``--filename=FILENAME, -f FILENAME`` - Save settings to the specified configuration file to use. You can't - combine this option with ``-g`` or ``-u``. Note that if you specify a - non-standard filename, the ``distutils`` and ``setuptools`` will not - use the file's contents. This option is mainly included for use in - testing. - -These options are used by other ``setuptools`` commands that modify -configuration files, such as the `alias`_ and `setopt`_ commands. - - -.. _setopt: - -``setopt`` - Set a distutils or setuptools option in a config file -================================================================== - -This command is mainly for use by scripts, but it can also be used as a quick -and dirty way to change a distutils configuration option without having to -remember what file the options are in and then open an editor. - -**Example 1**. Set the default C compiler to ``mingw32`` (using long option -names):: - - setup.py setopt --command=build --option=compiler --set-value=mingw32 - -**Example 2**. Remove any setting for the distutils default package -installation directory (short option names):: - - setup.py setopt -c install -o install_lib -r - - -Options for the ``setopt`` command: - -``--command=COMMAND, -c COMMAND`` - Command to set the option for. This option is required. - -``--option=OPTION, -o OPTION`` - The name of the option to set. This option is required. - -``--set-value=VALUE, -s VALUE`` - The value to set the option to. Not needed if ``-r`` or ``--remove`` is - set. - -``--remove, -r`` - Remove (unset) the option, instead of setting it. - -In addition to the above options, you may use any of the `configuration file -options`_ (listed under the `saveopts`_ command, above) to determine which -distutils configuration file the option will be added to (or removed from). - - -.. _test: - -``test`` - Build package and run a unittest suite -================================================= - -When doing test-driven development, or running automated builds that need -testing before they are deployed for downloading or use, it's often useful -to be able to run a project's unit tests without actually deploying the project -anywhere, even using the ``develop`` command. The ``test`` command runs a -project's unit tests without actually deploying it, by temporarily putting the -project's source on ``sys.path``, after first running ``build_ext -i`` and -``egg_info`` to ensure that any C extensions and project metadata are -up-to-date. - -To use this command, your project's tests must be wrapped in a ``unittest`` -test suite by either a function, a ``TestCase`` class or method, or a module -or package containing ``TestCase`` classes. If the named suite is a module, -and the module has an ``additional_tests()`` function, it is called and the -result (which must be a ``unittest.TestSuite``) is added to the tests to be -run. If the named suite is a package, any submodules and subpackages are -recursively added to the overall test suite. (Note: if your project specifies -a ``test_loader``, the rules for processing the chosen ``test_suite`` may -differ; see the `test_loader`_ documentation for more details.) - -Note that many test systems including ``doctest`` support wrapping their -non-``unittest`` tests in ``TestSuite`` objects. So, if you are using a test -package that does not support this, we suggest you encourage its developers to -implement test suite support, as this is a convenient and standard way to -aggregate a collection of tests to be run under a common test harness. - -By default, tests will be run in the "verbose" mode of the ``unittest`` -package's text test runner, but you can get the "quiet" mode (just dots) if -you supply the ``-q`` or ``--quiet`` option, either as a global option to -the setup script (e.g. ``setup.py -q test``) or as an option for the ``test`` -command itself (e.g. ``setup.py test -q``). There is one other option -available: - -``--test-suite=NAME, -s NAME`` - Specify the test suite (or module, class, or method) to be run - (e.g. ``some_module.test_suite``). The default for this option can be - set by giving a ``test_suite`` argument to the ``setup()`` function, e.g.:: - - setup( - # ... - test_suite = "my_package.tests.test_all" - ) - - If you did not set a ``test_suite`` in your ``setup()`` call, and do not - provide a ``--test-suite`` option, an error will occur. - - -.. _upload: - -``upload`` - Upload source and/or egg distributions to PyPI -=========================================================== - -PyPI now supports uploading project files for redistribution; uploaded files -are easily found by EasyInstall, even if you don't have download links on your -project's home page. - -Although Python 2.5 will support uploading all types of distributions to PyPI, -setuptools only supports source distributions and eggs. (This is partly -because PyPI's upload support is currently broken for various other file -types.) To upload files, you must include the ``upload`` command *after* the -``sdist`` or ``bdist_egg`` commands on the setup command line. For example:: - - setup.py bdist_egg upload # create an egg and upload it - setup.py sdist upload # create a source distro and upload it - setup.py sdist bdist_egg upload # create and upload both - -Note that to upload files for a project, the corresponding version must already -be registered with PyPI, using the distutils ``register`` command. It's -usually a good idea to include the ``register`` command at the start of the -command line, so that any registration problems can be found and fixed before -building and uploading the distributions, e.g.:: - - setup.py register sdist bdist_egg upload - -This will update PyPI's listing for your project's current version. - -Note, by the way, that the metadata in your ``setup()`` call determines what -will be listed in PyPI for your package. Try to fill out as much of it as -possible, as it will save you a lot of trouble manually adding and updating -your PyPI listings. Just put it in ``setup.py`` and use the ``register`` -comamnd to keep PyPI up to date. - -The ``upload`` command has a few options worth noting: - -``--sign, -s`` - Sign each uploaded file using GPG (GNU Privacy Guard). The ``gpg`` program - must be available for execution on the system ``PATH``. - -``--identity=NAME, -i NAME`` - Specify the identity or key name for GPG to use when signing. The value of - this option will be passed through the ``--local-user`` option of the - ``gpg`` program. - -``--show-response`` - Display the full response text from server; this is useful for debugging - PyPI problems. - -``--repository=URL, -r URL`` - The URL of the repository to upload to. Defaults to - https://pypi.python.org/pypi (i.e., the main PyPI installation). - -.. _upload_docs: - -``upload_docs`` - Upload package documentation to PyPI -====================================================== - -PyPI now supports uploading project documentation to the dedicated URL -https://pythonhosted.org//. - -The ``upload_docs`` command will create the necessary zip file out of a -documentation directory and will post to the repository. - -Note that to upload the documentation of a project, the corresponding version -must already be registered with PyPI, using the distutils ``register`` -command -- just like the ``upload`` command. - -Assuming there is an ``Example`` project with documentation in the -subdirectory ``docs``, e.g.:: - - Example/ - |-- example.py - |-- setup.cfg - |-- setup.py - |-- docs - | |-- build - | | `-- html - | | | |-- index.html - | | | `-- tips_tricks.html - | |-- conf.py - | |-- index.txt - | `-- tips_tricks.txt - -You can simply pass the documentation directory path to the ``upload_docs`` -command:: - - python setup.py upload_docs --upload-dir=docs/build/html - -If no ``--upload-dir`` is given, ``upload_docs`` will attempt to run the -``build_sphinx`` command to generate uploadable documentation. -For the command to become available, `Sphinx `_ -must be installed in the same environment as distribute. - -As with other ``setuptools``-based commands, you can define useful -defaults in the ``setup.cfg`` of your Python project, e.g.: - -.. code-block:: ini - - [upload_docs] - upload-dir = docs/build/html - -The ``upload_docs`` command has the following options: - -``--upload-dir`` - The directory to be uploaded to the repository. - -``--show-response`` - Display the full response text from server; this is useful for debugging - PyPI problems. - -``--repository=URL, -r URL`` - The URL of the repository to upload to. Defaults to - https://pypi.python.org/pypi (i.e., the main PyPI installation). - - --------------------------------- -Extending and Reusing Distribute --------------------------------- - -Creating ``distutils`` Extensions -================================= - -It can be hard to add new commands or setup arguments to the distutils. But -the ``setuptools`` package makes it a bit easier, by allowing you to distribute -a distutils extension as a separate project, and then have projects that need -the extension just refer to it in their ``setup_requires`` argument. - -With ``setuptools``, your distutils extension projects can hook in new -commands and ``setup()`` arguments just by defining "entry points". These -are mappings from command or argument names to a specification of where to -import a handler from. (See the section on `Dynamic Discovery of Services and -Plugins`_ above for some more background on entry points.) - - -Adding Commands ---------------- - -You can add new ``setup`` commands by defining entry points in the -``distutils.commands`` group. For example, if you wanted to add a ``foo`` -command, you might add something like this to your distutils extension -project's setup script:: - - setup( - # ... - entry_points = { - "distutils.commands": [ - "foo = mypackage.some_module:foo", - ], - }, - ) - -(Assuming, of course, that the ``foo`` class in ``mypackage.some_module`` is -a ``setuptools.Command`` subclass.) - -Once a project containing such entry points has been activated on ``sys.path``, -(e.g. by running "install" or "develop" with a site-packages installation -directory) the command(s) will be available to any ``setuptools``-based setup -scripts. It is not necessary to use the ``--command-packages`` option or -to monkeypatch the ``distutils.command`` package to install your commands; -``setuptools`` automatically adds a wrapper to the distutils to search for -entry points in the active distributions on ``sys.path``. In fact, this is -how setuptools' own commands are installed: the setuptools project's setup -script defines entry points for them! - - -Adding ``setup()`` Arguments ----------------------------- - -Sometimes, your commands may need additional arguments to the ``setup()`` -call. You can enable this by defining entry points in the -``distutils.setup_keywords`` group. For example, if you wanted a ``setup()`` -argument called ``bar_baz``, you might add something like this to your -distutils extension project's setup script:: - - setup( - # ... - entry_points = { - "distutils.commands": [ - "foo = mypackage.some_module:foo", - ], - "distutils.setup_keywords": [ - "bar_baz = mypackage.some_module:validate_bar_baz", - ], - }, - ) - -The idea here is that the entry point defines a function that will be called -to validate the ``setup()`` argument, if it's supplied. The ``Distribution`` -object will have the initial value of the attribute set to ``None``, and the -validation function will only be called if the ``setup()`` call sets it to -a non-None value. Here's an example validation function:: - - def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) - -Your function should accept three arguments: the ``Distribution`` object, -the attribute name, and the attribute value. It should raise a -``DistutilsSetupError`` (from the ``distutils.errors`` module) if the argument -is invalid. Remember, your function will only be called with non-None values, -and the default value of arguments defined this way is always None. So, your -commands should always be prepared for the possibility that the attribute will -be ``None`` when they access it later. - -If more than one active distribution defines an entry point for the same -``setup()`` argument, *all* of them will be called. This allows multiple -distutils extensions to define a common argument, as long as they agree on -what values of that argument are valid. - -Also note that as with commands, it is not necessary to subclass or monkeypatch -the distutils ``Distribution`` class in order to add your arguments; it is -sufficient to define the entry points in your extension, as long as any setup -script using your extension lists your project in its ``setup_requires`` -argument. - - -Adding new EGG-INFO Files -------------------------- - -Some extensible applications or frameworks may want to allow third parties to -develop plugins with application or framework-specific metadata included in -the plugins' EGG-INFO directory, for easy access via the ``pkg_resources`` -metadata API. The easiest way to allow this is to create a distutils extension -to be used from the plugin projects' setup scripts (via ``setup_requires``) -that defines a new setup keyword, and then uses that data to write an EGG-INFO -file when the ``egg_info`` command is run. - -The ``egg_info`` command looks for extension points in an ``egg_info.writers`` -group, and calls them to write the files. Here's a simple example of a -distutils extension defining a setup argument ``foo_bar``, which is a list of -lines that will be written to ``foo_bar.txt`` in the EGG-INFO directory of any -project that uses the argument:: - - setup( - # ... - entry_points = { - "distutils.setup_keywords": [ - "foo_bar = setuptools.dist:assert_string_list", - ], - "egg_info.writers": [ - "foo_bar.txt = setuptools.command.egg_info:write_arg", - ], - }, - ) - -This simple example makes use of two utility functions defined by setuptools -for its own use: a routine to validate that a setup keyword is a sequence of -strings, and another one that looks up a setup argument and writes it to -a file. Here's what the writer utility looks like:: - - def write_arg(cmd, basename, filename): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value)+'\n' - cmd.write_or_delete_file(argname, filename, value) - -As you can see, ``egg_info.writers`` entry points must be a function taking -three arguments: a ``egg_info`` command instance, the basename of the file to -write (e.g. ``foo_bar.txt``), and the actual full filename that should be -written to. - -In general, writer functions should honor the command object's ``dry_run`` -setting when writing files, and use the ``distutils.log`` object to do any -console output. The easiest way to conform to this requirement is to use -the ``cmd`` object's ``write_file()``, ``delete_file()``, and -``write_or_delete_file()`` methods exclusively for your file operations. See -those methods' docstrings for more details. - - -Adding Support for Other Revision Control Systems -------------------------------------------------- - -If you would like to create a plugin for ``setuptools`` to find files in other -source control systems besides CVS and Subversion, you can do so by adding an -entry point to the ``setuptools.file_finders`` group. The entry point should -be a function accepting a single directory name, and should yield -all the filenames within that directory (and any subdirectories thereof) that -are under revision control. - -For example, if you were going to create a plugin for a revision control system -called "foobar", you would write a function something like this: - -.. code-block:: python - - def find_files_for_foobar(dirname): - # loop to yield paths that start with `dirname` - -And you would register it in a setup script using something like this:: - - entry_points = { - "setuptools.file_finders": [ - "foobar = my_foobar_module:find_files_for_foobar" - ] - } - -Then, anyone who wants to use your plugin can simply install it, and their -local setuptools installation will be able to find the necessary files. - -It is not necessary to distribute source control plugins with projects that -simply use the other source control system, or to specify the plugins in -``setup_requires``. When you create a source distribution with the ``sdist`` -command, setuptools automatically records what files were found in the -``SOURCES.txt`` file. That way, recipients of source distributions don't need -to have revision control at all. However, if someone is working on a package -by checking out with that system, they will need the same plugin(s) that the -original author is using. - -A few important points for writing revision control file finders: - -* Your finder function MUST return relative paths, created by appending to the - passed-in directory name. Absolute paths are NOT allowed, nor are relative - paths that reference a parent directory of the passed-in directory. - -* Your finder function MUST accept an empty string as the directory name, - meaning the current directory. You MUST NOT convert this to a dot; just - yield relative paths. So, yielding a subdirectory named ``some/dir`` under - the current directory should NOT be rendered as ``./some/dir`` or - ``/somewhere/some/dir``, but *always* as simply ``some/dir`` - -* Your finder function SHOULD NOT raise any errors, and SHOULD deal gracefully - with the absence of needed programs (i.e., ones belonging to the revision - control system itself. It *may*, however, use ``distutils.log.warn()`` to - inform the user of the missing program(s). - - -Subclassing ``Command`` ------------------------ - -Sorry, this section isn't written yet, and neither is a lot of what's below -this point, except for the change log. You might want to `subscribe to changes -in this page `_ to see when new documentation is -added or updated. - -XXX - - -Reusing ``setuptools`` Code -=========================== - -``ez_setup`` ------------- - -XXX - - -``setuptools.archive_util`` ---------------------------- - -XXX - - -``setuptools.sandbox`` ----------------------- - -XXX - - -``setuptools.package_index`` ----------------------------- - -XXX - - -Mailing List and Bug Tracker -============================ - -Please use the `distutils-sig mailing list`_ for questions and discussion about -setuptools, and the `setuptools bug tracker`_ ONLY for issues you have -confirmed via the list are actual bugs, and which you have reduced to a minimal -set of steps to reproduce. - -.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ -.. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/ - diff --git a/libs/setuptools-2.2/docs/using.txt b/libs/setuptools-2.2/docs/using.txt deleted file mode 100644 index e44847d..0000000 --- a/libs/setuptools-2.2/docs/using.txt +++ /dev/null @@ -1,10 +0,0 @@ -================================ -Using Setuptools in your project -================================ - -To use Setuptools in your project, the recommended way is to ship -`ez_setup.py` alongside your `setup.py` script and call -it at the very beginning of `setup.py` like this:: - - from ez_setup import use_setuptools - use_setuptools() diff --git a/libs/setuptools-2.2/easy_install.py b/libs/setuptools-2.2/easy_install.py deleted file mode 100644 index d87e984..0000000 --- a/libs/setuptools-2.2/easy_install.py +++ /dev/null @@ -1,5 +0,0 @@ -"""Run the EasyInstall command""" - -if __name__ == '__main__': - from setuptools.command.easy_install import main - main() diff --git a/libs/setuptools-2.2/ez_setup.py b/libs/setuptools-2.2/ez_setup.py deleted file mode 100644 index 1420c11..0000000 --- a/libs/setuptools-2.2/ez_setup.py +++ /dev/null @@ -1,364 +0,0 @@ -#!/usr/bin/env python -"""Bootstrap setuptools installation - -To use setuptools in your package's setup.py, include this -file in the same directory and add this to the top of your setup.py:: - - from ez_setup import use_setuptools - use_setuptools() - -To require a specific version of setuptools, set a download -mirror, or use an alternate download directory, simply supply -the appropriate options to ``use_setuptools()``. - -This file can also be run as a script to install or upgrade setuptools. -""" -import os -import shutil -import sys -import tempfile -import tarfile -import optparse -import subprocess -import platform -import textwrap - -from distutils import log - -try: - from site import USER_SITE -except ImportError: - USER_SITE = None - -DEFAULT_VERSION = "2.2" -DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/" - -def _python_cmd(*args): - """ - Return True if the command succeeded. - """ - args = (sys.executable,) + args - return subprocess.call(args) == 0 - -def _install(tarball, install_args=()): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # installing - log.warn('Installing Setuptools') - if not _python_cmd('setup.py', 'install', *install_args): - log.warn('Something went wrong during the installation.') - log.warn('See the error message above.') - # exitcode will be 2 - return 2 - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - - -def _build_egg(egg, tarball, to_dir): - # extracting the tarball - tmpdir = tempfile.mkdtemp() - log.warn('Extracting in %s', tmpdir) - old_wd = os.getcwd() - try: - os.chdir(tmpdir) - tar = tarfile.open(tarball) - _extractall(tar) - tar.close() - - # going in the directory - subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0]) - os.chdir(subdir) - log.warn('Now working in %s', subdir) - - # building an egg - log.warn('Building a Setuptools egg in %s', to_dir) - _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir) - - finally: - os.chdir(old_wd) - shutil.rmtree(tmpdir) - # returning the result - log.warn(egg) - if not os.path.exists(egg): - raise IOError('Could not build the egg.') - - -def _do_download(version, download_base, to_dir, download_delay): - egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg' - % (version, sys.version_info[0], sys.version_info[1])) - if not os.path.exists(egg): - tarball = download_setuptools(version, download_base, - to_dir, download_delay) - _build_egg(egg, tarball, to_dir) - sys.path.insert(0, egg) - - # Remove previously-imported pkg_resources if present (see - # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details). - if 'pkg_resources' in sys.modules: - del sys.modules['pkg_resources'] - - import setuptools - setuptools.bootstrap_install_from = egg - - -def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, download_delay=15): - to_dir = os.path.abspath(to_dir) - rep_modules = 'pkg_resources', 'setuptools' - imported = set(sys.modules).intersection(rep_modules) - try: - import pkg_resources - except ImportError: - return _do_download(version, download_base, to_dir, download_delay) - try: - pkg_resources.require("setuptools>=" + version) - return - except pkg_resources.DistributionNotFound: - return _do_download(version, download_base, to_dir, download_delay) - except pkg_resources.VersionConflict as VC_err: - if imported: - msg = textwrap.dedent(""" - The required version of setuptools (>={version}) is not available, - and can't be installed while this script is running. Please - install a more recent version first, using - 'easy_install -U setuptools'. - - (Currently using {VC_err.args[0]!r}) - """).format(VC_err=VC_err, version=version) - sys.stderr.write(msg) - sys.exit(2) - - # otherwise, reload ok - del pkg_resources, sys.modules['pkg_resources'] - return _do_download(version, download_base, to_dir, download_delay) - -def _clean_check(cmd, target): - """ - Run the command to download target. If the command fails, clean up before - re-raising the error. - """ - try: - subprocess.check_call(cmd) - except subprocess.CalledProcessError: - if os.access(target, os.F_OK): - os.unlink(target) - raise - -def download_file_powershell(url, target): - """ - Download the file at url to target using Powershell (which will validate - trust). Raise an exception if the command cannot complete. - """ - target = os.path.abspath(target) - cmd = [ - 'powershell', - '-Command', - "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(), - ] - _clean_check(cmd, target) - -def has_powershell(): - if platform.system() != 'Windows': - return False - cmd = ['powershell', '-Command', 'echo test'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_powershell.viable = has_powershell - -def download_file_curl(url, target): - cmd = ['curl', url, '--silent', '--output', target] - _clean_check(cmd, target) - -def has_curl(): - cmd = ['curl', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_curl.viable = has_curl - -def download_file_wget(url, target): - cmd = ['wget', url, '--quiet', '--output-document', target] - _clean_check(cmd, target) - -def has_wget(): - cmd = ['wget', '--version'] - devnull = open(os.path.devnull, 'wb') - try: - try: - subprocess.check_call(cmd, stdout=devnull, stderr=devnull) - except: - return False - finally: - devnull.close() - return True - -download_file_wget.viable = has_wget - -def download_file_insecure(url, target): - """ - Use Python to download the file, even though it cannot authenticate the - connection. - """ - try: - from urllib.request import urlopen - except ImportError: - from urllib2 import urlopen - src = dst = None - try: - src = urlopen(url) - # Read/write all in one block, so we don't create a corrupt file - # if the download is interrupted. - data = src.read() - dst = open(target, "wb") - dst.write(data) - finally: - if src: - src.close() - if dst: - dst.close() - -download_file_insecure.viable = lambda: True - -def get_best_downloader(): - downloaders = [ - download_file_powershell, - download_file_curl, - download_file_wget, - download_file_insecure, - ] - - for dl in downloaders: - if dl.viable(): - return dl - -def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL, - to_dir=os.curdir, delay=15, - downloader_factory=get_best_downloader): - """Download setuptools from a specified location and return its filename - - `version` should be a valid setuptools version number that is available - as an egg for download under the `download_base` URL (which should end - with a '/'). `to_dir` is the directory where the egg will be downloaded. - `delay` is the number of seconds to pause before an actual download - attempt. - - ``downloader_factory`` should be a function taking no arguments and - returning a function for downloading a URL to a target. - """ - # making sure we use the absolute path - to_dir = os.path.abspath(to_dir) - tgz_name = "setuptools-%s.tar.gz" % version - url = download_base + tgz_name - saveto = os.path.join(to_dir, tgz_name) - if not os.path.exists(saveto): # Avoid repeated downloads - log.warn("Downloading %s", url) - downloader = downloader_factory() - downloader(url, saveto) - return os.path.realpath(saveto) - - -def _extractall(self, path=".", members=None): - """Extract all members from the archive to the current working - directory and set owner, modification time and permissions on - directories afterwards. `path' specifies a different directory - to extract to. `members' is optional and must be a subset of the - list returned by getmembers(). - """ - import copy - import operator - from tarfile import ExtractError - directories = [] - - if members is None: - members = self - - for tarinfo in members: - if tarinfo.isdir(): - # Extract directories with a safe mode. - directories.append(tarinfo) - tarinfo = copy.copy(tarinfo) - tarinfo.mode = 448 # decimal for oct 0700 - self.extract(tarinfo, path) - - # Reverse sort directories. - directories.sort(key=operator.attrgetter('name'), reverse=True) - - # Set correct owner, mtime and filemode on directories. - for tarinfo in directories: - dirpath = os.path.join(path, tarinfo.name) - try: - self.chown(tarinfo, dirpath) - self.utime(tarinfo, dirpath) - self.chmod(tarinfo, dirpath) - except ExtractError as e: - if self.errorlevel > 1: - raise - else: - self._dbg(1, "tarfile: %s" % e) - - -def _build_install_args(options): - """ - Build the arguments to 'python setup.py install' on the setuptools package - """ - return ['--user'] if options.user_install else [] - -def _parse_args(): - """ - Parse the command line for options - """ - parser = optparse.OptionParser() - parser.add_option( - '--user', dest='user_install', action='store_true', default=False, - help='install in user site package (requires Python 2.6 or later)') - parser.add_option( - '--download-base', dest='download_base', metavar="URL", - default=DEFAULT_URL, - help='alternative URL from where to download the setuptools package') - parser.add_option( - '--insecure', dest='downloader_factory', action='store_const', - const=lambda: download_file_insecure, default=get_best_downloader, - help='Use internal, non-validating downloader' - ) - options, args = parser.parse_args() - # positional arguments are ignored - return options - -def main(version=DEFAULT_VERSION): - """Install or upgrade setuptools and EasyInstall""" - options = _parse_args() - tarball = download_setuptools(download_base=options.download_base, - downloader_factory=options.downloader_factory) - return _install(tarball, _build_install_args(options)) - -if __name__ == '__main__': - sys.exit(main()) diff --git a/libs/setuptools-2.2/launcher.c b/libs/setuptools-2.2/launcher.c deleted file mode 100644 index be69f0c..0000000 --- a/libs/setuptools-2.2/launcher.c +++ /dev/null @@ -1,335 +0,0 @@ -/* Setuptools Script Launcher for Windows - - This is a stub executable for Windows that functions somewhat like - Effbot's "exemaker", in that it runs a script with the same name but - a .py extension, using information from a #! line. It differs in that - it spawns the actual Python executable, rather than attempting to - hook into the Python DLL. This means that the script will run with - sys.executable set to the Python executable, where exemaker ends up with - sys.executable pointing to itself. (Which means it won't work if you try - to run another Python process using sys.executable.) - - To build/rebuild with mingw32, do this in the setuptools project directory: - - gcc -DGUI=0 -mno-cygwin -O -s -o setuptools/cli.exe launcher.c - gcc -DGUI=1 -mwindows -mno-cygwin -O -s -o setuptools/gui.exe launcher.c - - To build for Windows RT, install both Visual Studio Express for Windows 8 - and for Windows Desktop (both freeware), create "win32" application using - "Windows Desktop" version, create new "ARM" target via - "Configuration Manager" menu and modify ".vcxproj" file by adding - "true" tag - as child of "PropertyGroup" tags that has "Debug|ARM" and "Release|ARM" - properties. - - It links to msvcrt.dll, but this shouldn't be a problem since it doesn't - actually run Python in the same process. Note that using 'exec' instead - of 'spawn' doesn't work, because on Windows this leads to the Python - executable running in the *background*, attached to the same console - window, meaning you get a command prompt back *before* Python even finishes - starting. So, we have to use spawnv() and wait for Python to exit before - continuing. :( -*/ - -#include -#include -#include -#include -#include -#include - -int child_pid=0; - -int fail(char *format, char *data) { - /* Print error message to stderr and return 2 */ - fprintf(stderr, format, data); - return 2; -} - -char *quoted(char *data) { - int i, ln = strlen(data), nb; - - /* We allocate twice as much space as needed to deal with worse-case - of having to escape everything. */ - char *result = calloc(ln*2+3, sizeof(char)); - char *presult = result; - - *presult++ = '"'; - for (nb=0, i=0; i < ln; i++) - { - if (data[i] == '\\') - nb += 1; - else if (data[i] == '"') - { - for (; nb > 0; nb--) - *presult++ = '\\'; - *presult++ = '\\'; - } - else - nb = 0; - *presult++ = data[i]; - } - - for (; nb > 0; nb--) /* Deal w trailing slashes */ - *presult++ = '\\'; - - *presult++ = '"'; - *presult++ = 0; - return result; -} - - - - - - - - - - -char *loadable_exe(char *exename) { - /* HINSTANCE hPython; DLL handle for python executable */ - char *result; - - /* hPython = LoadLibraryEx(exename, NULL, LOAD_WITH_ALTERED_SEARCH_PATH); - if (!hPython) return NULL; */ - - /* Return the absolute filename for spawnv */ - result = calloc(MAX_PATH, sizeof(char)); - strncpy(result, exename, MAX_PATH); - /*if (result) GetModuleFileNameA(hPython, result, MAX_PATH); - - FreeLibrary(hPython); */ - return result; -} - - -char *find_exe(char *exename, char *script) { - char drive[_MAX_DRIVE], dir[_MAX_DIR], fname[_MAX_FNAME], ext[_MAX_EXT]; - char path[_MAX_PATH], c, *result; - - /* convert slashes to backslashes for uniform search below */ - result = exename; - while (c = *result++) if (c=='/') result[-1] = '\\'; - - _splitpath(exename, drive, dir, fname, ext); - if (drive[0] || dir[0]=='\\') { - return loadable_exe(exename); /* absolute path, use directly */ - } - /* Use the script's parent directory, which should be the Python home - (This should only be used for bdist_wininst-installed scripts, because - easy_install-ed scripts use the absolute path to python[w].exe - */ - _splitpath(script, drive, dir, fname, ext); - result = dir + strlen(dir) -1; - if (*result == '\\') result--; - while (*result != '\\' && result>=dir) *result-- = 0; - _makepath(path, drive, dir, exename, NULL); - return loadable_exe(path); -} - - -char **parse_argv(char *cmdline, int *argc) -{ - /* Parse a command line in-place using MS C rules */ - - char **result = calloc(strlen(cmdline), sizeof(char *)); - char *output = cmdline; - char c; - int nb = 0; - int iq = 0; - *argc = 0; - - result[0] = output; - while (isspace(*cmdline)) cmdline++; /* skip leading spaces */ - - do { - c = *cmdline++; - if (!c || (isspace(c) && !iq)) { - while (nb) {*output++ = '\\'; nb--; } - *output++ = 0; - result[++*argc] = output; - if (!c) return result; - while (isspace(*cmdline)) cmdline++; /* skip leading spaces */ - if (!*cmdline) return result; /* avoid empty arg if trailing ws */ - continue; - } - if (c == '\\') - ++nb; /* count \'s */ - else { - if (c == '"') { - if (!(nb & 1)) { iq = !iq; c = 0; } /* skip " unless odd # of \ */ - nb = nb >> 1; /* cut \'s in half */ - } - while (nb) {*output++ = '\\'; nb--; } - if (c) *output++ = c; - } - } while (1); -} - -void pass_control_to_child(DWORD control_type) { - /* - * distribute-issue207 - * passes the control event to child process (Python) - */ - if (!child_pid) { - return; - } - GenerateConsoleCtrlEvent(child_pid,0); -} - -BOOL control_handler(DWORD control_type) { - /* - * distribute-issue207 - * control event handler callback function - */ - switch (control_type) { - case CTRL_C_EVENT: - pass_control_to_child(0); - break; - } - return TRUE; -} - -int create_and_wait_for_subprocess(char* command) { - /* - * distribute-issue207 - * launches child process (Python) - */ - DWORD return_value = 0; - LPSTR commandline = command; - STARTUPINFOA s_info; - PROCESS_INFORMATION p_info; - ZeroMemory(&p_info, sizeof(p_info)); - ZeroMemory(&s_info, sizeof(s_info)); - s_info.cb = sizeof(STARTUPINFO); - // set-up control handler callback funciotn - SetConsoleCtrlHandler((PHANDLER_ROUTINE) control_handler, TRUE); - if (!CreateProcessA(NULL, commandline, NULL, NULL, TRUE, 0, NULL, NULL, &s_info, &p_info)) { - fprintf(stderr, "failed to create process.\n"); - return 0; - } - child_pid = p_info.dwProcessId; - // wait for Python to exit - WaitForSingleObject(p_info.hProcess, INFINITE); - if (!GetExitCodeProcess(p_info.hProcess, &return_value)) { - fprintf(stderr, "failed to get exit code from process.\n"); - return 0; - } - return return_value; -} - -char* join_executable_and_args(char *executable, char **args, int argc) -{ - /* - * distribute-issue207 - * CreateProcess needs a long string of the executable and command-line arguments, - * so we need to convert it from the args that was built - */ - int len,counter; - char* cmdline; - - len=strlen(executable)+2; - for (counter=1; counterscript && *end != '.') - *end-- = '\0'; - *end-- = '\0'; - strcat(script, (GUI ? "-script.pyw" : "-script.py")); - - /* figure out the target python executable */ - - scriptf = open(script, O_RDONLY); - if (scriptf == -1) { - return fail("Cannot open %s\n", script); - } - end = python + read(scriptf, python, sizeof(python)); - close(scriptf); - - ptr = python-1; - while(++ptr < end && *ptr && *ptr!='\n' && *ptr!='\r') {;} - - *ptr-- = '\0'; - - if (strncmp(python, "#!", 2)) { - /* default to python.exe if no #! header */ - strcpy(python, "#!python.exe"); - } - - parsedargs = parse_argv(python+2, &parsedargc); - - /* Using spawnv() can fail strangely if you e.g. find the Cygwin - Python, so we'll make sure Windows can find and load it */ - - ptr = find_exe(parsedargs[0], script); - if (!ptr) { - return fail("Cannot find Python executable %s\n", parsedargs[0]); - } - - /* printf("Python executable: %s\n", ptr); */ - - /* Argument array needs to be - parsedargc + argc, plus 1 for null sentinel */ - - newargs = (char **)calloc(parsedargc + argc + 1, sizeof(char *)); - newargsp = newargs; - - *newargsp++ = quoted(ptr); - for (i = 1; i= (3, 3) and sys.implementation.name == "cpython": - import importlib._bootstrap as importlib_bootstrap -else: - importlib_bootstrap = None - -try: - import parser -except ImportError: - pass - -def _bypass_ensure_directory(name, mode=0x1FF): # 0777 - # Sandbox-bypassing version of ensure_directory() - if not WRITE_SUPPORT: - raise IOError('"os.mkdir" not supported on this platform.') - dirname, filename = split(name) - if dirname and filename and not isdir(dirname): - _bypass_ensure_directory(dirname) - mkdir(dirname, mode) - - -_state_vars = {} - -def _declare_state(vartype, **kw): - g = globals() - for name, val in kw.items(): - g[name] = val - _state_vars[name] = vartype - -def __getstate__(): - state = {} - g = globals() - for k, v in _state_vars.items(): - state[k] = g['_sget_'+v](g[k]) - return state - -def __setstate__(state): - g = globals() - for k, v in state.items(): - g['_sset_'+_state_vars[k]](k, g[k], v) - return state - -def _sget_dict(val): - return val.copy() - -def _sset_dict(key, ob, state): - ob.clear() - ob.update(state) - -def _sget_object(val): - return val.__getstate__() - -def _sset_object(key, ob, state): - ob.__setstate__(state) - -_sget_none = _sset_none = lambda *args: None - - -def get_supported_platform(): - """Return this platform's maximum compatible version. - - distutils.util.get_platform() normally reports the minimum version - of Mac OS X that would be required to *use* extensions produced by - distutils. But what we want when checking compatibility is to know the - version of Mac OS X that we are *running*. To allow usage of packages that - explicitly require a newer version of Mac OS X, we must also know the - current version of the OS. - - If this condition occurs for any other platform with a version in its - platform strings, this function should be extended accordingly. - """ - plat = get_build_platform() - m = macosVersionString.match(plat) - if m is not None and sys.platform == "darwin": - try: - plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) - except ValueError: - pass # not Mac OS X - return plat - -__all__ = [ - # Basic resource access and distribution/entry point discovery - 'require', 'run_script', 'get_provider', 'get_distribution', - 'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points', - 'resource_string', 'resource_stream', 'resource_filename', - 'resource_listdir', 'resource_exists', 'resource_isdir', - - # Environmental control - 'declare_namespace', 'working_set', 'add_activation_listener', - 'find_distributions', 'set_extraction_path', 'cleanup_resources', - 'get_default_cache', - - # Primary implementation classes - 'Environment', 'WorkingSet', 'ResourceManager', - 'Distribution', 'Requirement', 'EntryPoint', - - # Exceptions - 'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra', - 'ExtractionError', - - # Parsing functions and string utilities - 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', - 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', - 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', - - # filesystem utilities - 'ensure_directory', 'normalize_path', - - # Distribution "precedence" constants - 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', - - # "Provider" interfaces, implementations, and registration/lookup APIs - 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', - 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', - 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', - 'register_finder', 'register_namespace_handler', 'register_loader_type', - 'fixup_namespace_packages', 'get_importer', - - # Deprecated/backward compatibility only - 'run_main', 'AvailableDistributions', -] - -class ResolutionError(Exception): - """Abstract base for dependency resolution errors""" - def __repr__(self): - return self.__class__.__name__+repr(self.args) - -class VersionConflict(ResolutionError): - """An already-installed version conflicts with the requested version""" - -class DistributionNotFound(ResolutionError): - """A requested distribution was not found""" - -class UnknownExtra(ResolutionError): - """Distribution doesn't have an "extra feature" of the given name""" -_provider_factories = {} - -PY_MAJOR = sys.version[:3] -EGG_DIST = 3 -BINARY_DIST = 2 -SOURCE_DIST = 1 -CHECKOUT_DIST = 0 -DEVELOP_DIST = -1 - -def register_loader_type(loader_type, provider_factory): - """Register `provider_factory` to make providers for `loader_type` - - `loader_type` is the type or class of a PEP 302 ``module.__loader__``, - and `provider_factory` is a function that, passed a *module* object, - returns an ``IResourceProvider`` for that module. - """ - _provider_factories[loader_type] = provider_factory - -def get_provider(moduleOrReq): - """Return an IResourceProvider for the named module or requirement""" - if isinstance(moduleOrReq,Requirement): - return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] - try: - module = sys.modules[moduleOrReq] - except KeyError: - __import__(moduleOrReq) - module = sys.modules[moduleOrReq] - loader = getattr(module, '__loader__', None) - return _find_adapter(_provider_factories, loader)(module) - -def _macosx_vers(_cache=[]): - if not _cache: - import platform - version = platform.mac_ver()[0] - # fallback for MacPorts - if version == '': - import plistlib - plist = '/System/Library/CoreServices/SystemVersion.plist' - if os.path.exists(plist): - if hasattr(plistlib, 'readPlist'): - plist_content = plistlib.readPlist(plist) - if 'ProductVersion' in plist_content: - version = plist_content['ProductVersion'] - - _cache.append(version.split('.')) - return _cache[0] - -def _macosx_arch(machine): - return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine) - -def get_build_platform(): - """Return this platform's string for platform-specific distributions - - XXX Currently this is the same as ``distutils.util.get_platform()``, but it - needs some hacks for Linux and Mac OS X. - """ - try: - # Python 2.7 or >=3.2 - from sysconfig import get_platform - except ImportError: - from distutils.util import get_platform - - plat = get_platform() - if sys.platform == "darwin" and not plat.startswith('macosx-'): - try: - version = _macosx_vers() - machine = os.uname()[4].replace(" ", "_") - return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), - _macosx_arch(machine)) - except ValueError: - # if someone is running a non-Mac darwin system, this will fall - # through to the default implementation - pass - return plat - -macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") -darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") -get_platform = get_build_platform # XXX backward compat - - -def compatible_platforms(provided,required): - """Can code for the `provided` platform run on the `required` platform? - - Returns true if either platform is ``None``, or the platforms are equal. - - XXX Needs compatibility checks for Linux and other unixy OSes. - """ - if provided is None or required is None or provided==required: - return True # easy case - - # Mac OS X special cases - reqMac = macosVersionString.match(required) - if reqMac: - provMac = macosVersionString.match(provided) - - # is this a Mac package? - if not provMac: - # this is backwards compatibility for packages built before - # setuptools 0.6. All packages built after this point will - # use the new macosx designation. - provDarwin = darwinVersionString.match(provided) - if provDarwin: - dversion = int(provDarwin.group(1)) - macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) - if dversion == 7 and macosversion >= "10.3" or \ - dversion == 8 and macosversion >= "10.4": - - #import warnings - #warnings.warn("Mac eggs should be rebuilt to " - # "use the macosx designation instead of darwin.", - # category=DeprecationWarning) - return True - return False # egg isn't macosx or legacy darwin - - # are they the same major version and machine type? - if provMac.group(1) != reqMac.group(1) or \ - provMac.group(3) != reqMac.group(3): - return False - - # is the required OS major update >= the provided one? - if int(provMac.group(2)) > int(reqMac.group(2)): - return False - - return True - - # XXX Linux and other platforms' special cases should go here - return False - - -def run_script(dist_spec, script_name): - """Locate distribution `dist_spec` and run its `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - require(dist_spec)[0].run_script(script_name, ns) - -run_main = run_script # backward compatibility - -def get_distribution(dist): - """Return a current distribution object for a Requirement or string""" - if isinstance(dist,basestring): dist = Requirement.parse(dist) - if isinstance(dist,Requirement): dist = get_provider(dist) - if not isinstance(dist,Distribution): - raise TypeError("Expected string, Requirement, or Distribution", dist) - return dist - -def load_entry_point(dist, group, name): - """Return `name` entry point of `group` for `dist` or raise ImportError""" - return get_distribution(dist).load_entry_point(group, name) - -def get_entry_map(dist, group=None): - """Return the entry point map for `group`, or the full entry map""" - return get_distribution(dist).get_entry_map(group) - -def get_entry_info(dist, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return get_distribution(dist).get_entry_info(group, name) - - -class IMetadataProvider: - - def has_metadata(name): - """Does the package's distribution contain the named metadata?""" - - def get_metadata(name): - """The named metadata resource as a string""" - - def get_metadata_lines(name): - """Yield named metadata resource as list of non-blank non-comment lines - - Leading and trailing whitespace is stripped from each line, and lines - with ``#`` as the first non-blank character are omitted.""" - - def metadata_isdir(name): - """Is the named metadata a directory? (like ``os.path.isdir()``)""" - - def metadata_listdir(name): - """List of metadata names in the directory (like ``os.listdir()``)""" - - def run_script(script_name, namespace): - """Execute the named script in the supplied namespace dictionary""" - - -class IResourceProvider(IMetadataProvider): - """An object that provides access to package resources""" - - def get_resource_filename(manager, resource_name): - """Return a true filesystem path for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_stream(manager, resource_name): - """Return a readable file-like object for `resource_name` - - `manager` must be an ``IResourceManager``""" - - def get_resource_string(manager, resource_name): - """Return a string containing the contents of `resource_name` - - `manager` must be an ``IResourceManager``""" - - def has_resource(resource_name): - """Does the package contain the named resource?""" - - def resource_isdir(resource_name): - """Is the named resource a directory? (like ``os.path.isdir()``)""" - - def resource_listdir(resource_name): - """List of resource names in the directory (like ``os.listdir()``)""" - - -class WorkingSet(object): - """A collection of active distributions on sys.path (or a similar list)""" - - def __init__(self, entries=None): - """Create working set from list of path entries (default=sys.path)""" - self.entries = [] - self.entry_keys = {} - self.by_key = {} - self.callbacks = [] - - if entries is None: - entries = sys.path - - for entry in entries: - self.add_entry(entry) - - def add_entry(self, entry): - """Add a path item to ``.entries``, finding any distributions on it - - ``find_distributions(entry, True)`` is used to find distributions - corresponding to the path entry, and they are added. `entry` is - always appended to ``.entries``, even if it is already present. - (This is because ``sys.path`` can contain the same value more than - once, and the ``.entries`` of the ``sys.path`` WorkingSet should always - equal ``sys.path``.) - """ - self.entry_keys.setdefault(entry, []) - self.entries.append(entry) - for dist in find_distributions(entry, True): - self.add(dist, entry, False) - - def __contains__(self,dist): - """True if `dist` is the active distribution for its project""" - return self.by_key.get(dist.key) == dist - - def find(self, req): - """Find a distribution matching requirement `req` - - If there is an active distribution for the requested project, this - returns it as long as it meets the version requirement specified by - `req`. But, if there is an active distribution for the project and it - does *not* meet the `req` requirement, ``VersionConflict`` is raised. - If there is no active distribution for the requested project, ``None`` - is returned. - """ - dist = self.by_key.get(req.key) - if dist is not None and dist not in req: - raise VersionConflict(dist,req) # XXX add more info - else: - return dist - - def iter_entry_points(self, group, name=None): - """Yield entry point objects from `group` matching `name` - - If `name` is None, yields all entry points in `group` from all - distributions in the working set, otherwise only ones matching - both `group` and `name` are yielded (in distribution order). - """ - for dist in self: - entries = dist.get_entry_map(group) - if name is None: - for ep in entries.values(): - yield ep - elif name in entries: - yield entries[name] - - def run_script(self, requires, script_name): - """Locate distribution for `requires` and run `script_name` script""" - ns = sys._getframe(1).f_globals - name = ns['__name__'] - ns.clear() - ns['__name__'] = name - self.require(requires)[0].run_script(script_name, ns) - - def __iter__(self): - """Yield distributions for non-duplicate projects in the working set - - The yield order is the order in which the items' path entries were - added to the working set. - """ - seen = {} - for item in self.entries: - if item not in self.entry_keys: - # workaround a cache issue - continue - - for key in self.entry_keys[item]: - if key not in seen: - seen[key]=1 - yield self.by_key[key] - - def add(self, dist, entry=None, insert=True, replace=False): - """Add `dist` to working set, associated with `entry` - - If `entry` is unspecified, it defaults to the ``.location`` of `dist`. - On exit from this routine, `entry` is added to the end of the working - set's ``.entries`` (if it wasn't already present). - - `dist` is only added to the working set if it's for a project that - doesn't already have a distribution in the set, unless `replace=True`. - If it's added, any callbacks registered with the ``subscribe()`` method - will be called. - """ - if insert: - dist.insert_on(self.entries, entry) - - if entry is None: - entry = dist.location - keys = self.entry_keys.setdefault(entry,[]) - keys2 = self.entry_keys.setdefault(dist.location,[]) - if not replace and dist.key in self.by_key: - return # ignore hidden distros - - self.by_key[dist.key] = dist - if dist.key not in keys: - keys.append(dist.key) - if dist.key not in keys2: - keys2.append(dist.key) - self._added_new(dist) - - def resolve(self, requirements, env=None, installer=None, - replace_conflicting=False): - """List all distributions needed to (recursively) meet `requirements` - - `requirements` must be a sequence of ``Requirement`` objects. `env`, - if supplied, should be an ``Environment`` instance. If - not supplied, it defaults to all distributions available within any - entry or distribution in the working set. `installer`, if supplied, - will be invoked with each requirement that cannot be met by an - already-installed distribution; it should return a ``Distribution`` or - ``None``. - - Unless `replace_conflicting=True`, raises a VersionConflict exception if - any requirements are found on the path that have the correct name but - the wrong version. Otherwise, if an `installer` is supplied it will be - invoked to obtain the correct version of the requirement and activate - it. - """ - - requirements = list(requirements)[::-1] # set up the stack - processed = {} # set of processed requirements - best = {} # key -> dist - to_activate = [] - - while requirements: - req = requirements.pop(0) # process dependencies breadth-first - if req in processed: - # Ignore cyclic or redundant dependencies - continue - dist = best.get(req.key) - if dist is None: - # Find the best distribution and add it to the map - dist = self.by_key.get(req.key) - if dist is None or (dist not in req and replace_conflicting): - ws = self - if env is None: - if dist is None: - env = Environment(self.entries) - else: - # Use an empty environment and workingset to avoid - # any further conflicts with the conflicting - # distribution - env = Environment([]) - ws = WorkingSet([]) - dist = best[req.key] = env.best_match(req, ws, installer) - if dist is None: - #msg = ("The '%s' distribution was not found on this " - # "system, and is required by this application.") - #raise DistributionNotFound(msg % req) - - # unfortunately, zc.buildout uses a str(err) - # to get the name of the distribution here.. - raise DistributionNotFound(req) - to_activate.append(dist) - if dist not in req: - # Oops, the "best" so far conflicts with a dependency - raise VersionConflict(dist,req) # XXX put more info here - requirements.extend(dist.requires(req.extras)[::-1]) - processed[req] = True - - return to_activate # return list of distros to activate - - def find_plugins(self, plugin_env, full_env=None, installer=None, - fallback=True): - """Find all activatable distributions in `plugin_env` - - Example usage:: - - distributions, errors = working_set.find_plugins( - Environment(plugin_dirlist) - ) - map(working_set.add, distributions) # add plugins+libs to sys.path - print 'Could not load', errors # display errors - - The `plugin_env` should be an ``Environment`` instance that contains - only distributions that are in the project's "plugin directory" or - directories. The `full_env`, if supplied, should be an ``Environment`` - contains all currently-available distributions. If `full_env` is not - supplied, one is created automatically from the ``WorkingSet`` this - method is called on, which will typically mean that every directory on - ``sys.path`` will be scanned for distributions. - - `installer` is a standard installer callback as used by the - ``resolve()`` method. The `fallback` flag indicates whether we should - attempt to resolve older versions of a plugin if the newest version - cannot be resolved. - - This method returns a 2-tuple: (`distributions`, `error_info`), where - `distributions` is a list of the distributions found in `plugin_env` - that were loadable, along with any other distributions that are needed - to resolve their dependencies. `error_info` is a dictionary mapping - unloadable plugin distributions to an exception instance describing the - error that occurred. Usually this will be a ``DistributionNotFound`` or - ``VersionConflict`` instance. - """ - - plugin_projects = list(plugin_env) - plugin_projects.sort() # scan project names in alphabetic order - - error_info = {} - distributions = {} - - if full_env is None: - env = Environment(self.entries) - env += plugin_env - else: - env = full_env + plugin_env - - shadow_set = self.__class__([]) - list(map(shadow_set.add, self)) # put all our entries in shadow_set - - for project_name in plugin_projects: - - for dist in plugin_env[project_name]: - - req = [dist.as_requirement()] - - try: - resolvees = shadow_set.resolve(req, env, installer) - - except ResolutionError: - v = sys.exc_info()[1] - error_info[dist] = v # save error info - if fallback: - continue # try the next older version of project - else: - break # give up on this project, keep going - - else: - list(map(shadow_set.add, resolvees)) - distributions.update(dict.fromkeys(resolvees)) - - # success, no need to try any more versions of this project - break - - distributions = list(distributions) - distributions.sort() - - return distributions, error_info - - def require(self, *requirements): - """Ensure that distributions matching `requirements` are activated - - `requirements` must be a string or a (possibly-nested) sequence - thereof, specifying the distributions and versions required. The - return value is a sequence of the distributions that needed to be - activated to fulfill the requirements; all relevant distributions are - included, even if they were already activated in this working set. - """ - needed = self.resolve(parse_requirements(requirements)) - - for dist in needed: - self.add(dist) - - return needed - - def subscribe(self, callback): - """Invoke `callback` for all distributions (including existing ones)""" - if callback in self.callbacks: - return - self.callbacks.append(callback) - for dist in self: - callback(dist) - - def _added_new(self, dist): - for callback in self.callbacks: - callback(dist) - - def __getstate__(self): - return ( - self.entries[:], self.entry_keys.copy(), self.by_key.copy(), - self.callbacks[:] - ) - - def __setstate__(self, e_k_b_c): - entries, keys, by_key, callbacks = e_k_b_c - self.entries = entries[:] - self.entry_keys = keys.copy() - self.by_key = by_key.copy() - self.callbacks = callbacks[:] - - -class Environment(object): - """Searchable snapshot of distributions on a search path""" - - def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR): - """Snapshot distributions available on a search path - - Any distributions found on `search_path` are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. - - `platform` is an optional string specifying the name of the platform - that platform-specific distributions must be compatible with. If - unspecified, it defaults to the current platform. `python` is an - optional string naming the desired version of Python (e.g. ``'3.3'``); - it defaults to the current version. - - You may explicitly set `platform` (and/or `python`) to ``None`` if you - wish to map *all* distributions, not just those compatible with the - running platform or Python version. - """ - self._distmap = {} - self._cache = {} - self.platform = platform - self.python = python - self.scan(search_path) - - def can_add(self, dist): - """Is distribution `dist` acceptable for this environment? - - The distribution must match the platform and python version - requirements specified when this environment was created, or False - is returned. - """ - return (self.python is None or dist.py_version is None - or dist.py_version==self.python) \ - and compatible_platforms(dist.platform,self.platform) - - def remove(self, dist): - """Remove `dist` from the environment""" - self._distmap[dist.key].remove(dist) - - def scan(self, search_path=None): - """Scan `search_path` for distributions usable in this environment - - Any distributions found are added to the environment. - `search_path` should be a sequence of ``sys.path`` items. If not - supplied, ``sys.path`` is used. Only distributions conforming to - the platform/python version defined at initialization are added. - """ - if search_path is None: - search_path = sys.path - - for item in search_path: - for dist in find_distributions(item): - self.add(dist) - - def __getitem__(self,project_name): - """Return a newest-to-oldest list of distributions for `project_name` - """ - try: - return self._cache[project_name] - except KeyError: - project_name = project_name.lower() - if project_name not in self._distmap: - return [] - - if project_name not in self._cache: - dists = self._cache[project_name] = self._distmap[project_name] - _sort_dists(dists) - - return self._cache[project_name] - - def add(self,dist): - """Add `dist` if we ``can_add()`` it and it isn't already added""" - if self.can_add(dist) and dist.has_version(): - dists = self._distmap.setdefault(dist.key,[]) - if dist not in dists: - dists.append(dist) - if dist.key in self._cache: - _sort_dists(self._cache[dist.key]) - - def best_match(self, req, working_set, installer=None): - """Find distribution best matching `req` and usable on `working_set` - - This calls the ``find(req)`` method of the `working_set` to see if a - suitable distribution is already active. (This may raise - ``VersionConflict`` if an unsuitable version of the project is already - active in the specified `working_set`.) If a suitable distribution - isn't active, this method returns the newest distribution in the - environment that meets the ``Requirement`` in `req`. If no suitable - distribution is found, and `installer` is supplied, then the result of - calling the environment's ``obtain(req, installer)`` method will be - returned. - """ - dist = working_set.find(req) - if dist is not None: - return dist - for dist in self[req.key]: - if dist in req: - return dist - return self.obtain(req, installer) # try and download/install - - def obtain(self, requirement, installer=None): - """Obtain a distribution matching `requirement` (e.g. via download) - - Obtain a distro that matches requirement (e.g. via download). In the - base ``Environment`` class, this routine just returns - ``installer(requirement)``, unless `installer` is None, in which case - None is returned instead. This method is a hook that allows subclasses - to attempt other ways of obtaining a distribution before falling back - to the `installer` argument.""" - if installer is not None: - return installer(requirement) - - def __iter__(self): - """Yield the unique project names of the available distributions""" - for key in self._distmap.keys(): - if self[key]: yield key - - def __iadd__(self, other): - """In-place addition of a distribution or environment""" - if isinstance(other,Distribution): - self.add(other) - elif isinstance(other,Environment): - for project in other: - for dist in other[project]: - self.add(dist) - else: - raise TypeError("Can't add %r to environment" % (other,)) - return self - - def __add__(self, other): - """Add an environment or distribution to an environment""" - new = self.__class__([], platform=None, python=None) - for env in self, other: - new += env - return new - - -AvailableDistributions = Environment # XXX backward compatibility - - -class ExtractionError(RuntimeError): - """An error occurred extracting a resource - - The following attributes are available from instances of this exception: - - manager - The resource manager that raised this exception - - cache_path - The base directory for resource extraction - - original_error - The exception instance that caused extraction to fail - """ - - -class ResourceManager: - """Manage resource extraction and packages""" - extraction_path = None - - def __init__(self): - self.cached_files = {} - - def resource_exists(self, package_or_requirement, resource_name): - """Does the named resource exist?""" - return get_provider(package_or_requirement).has_resource(resource_name) - - def resource_isdir(self, package_or_requirement, resource_name): - """Is the named resource an existing directory?""" - return get_provider(package_or_requirement).resource_isdir( - resource_name - ) - - def resource_filename(self, package_or_requirement, resource_name): - """Return a true filesystem path for specified resource""" - return get_provider(package_or_requirement).get_resource_filename( - self, resource_name - ) - - def resource_stream(self, package_or_requirement, resource_name): - """Return a readable file-like object for specified resource""" - return get_provider(package_or_requirement).get_resource_stream( - self, resource_name - ) - - def resource_string(self, package_or_requirement, resource_name): - """Return specified resource as a string""" - return get_provider(package_or_requirement).get_resource_string( - self, resource_name - ) - - def resource_listdir(self, package_or_requirement, resource_name): - """List the contents of the named resource directory""" - return get_provider(package_or_requirement).resource_listdir( - resource_name - ) - - def extraction_error(self): - """Give an error message for problems extracting file(s)""" - - old_exc = sys.exc_info()[1] - cache_path = self.extraction_path or get_default_cache() - - err = ExtractionError("""Can't extract file(s) to egg cache - -The following error occurred while trying to extract file(s) to the Python egg -cache: - - %s - -The Python egg cache directory is currently set to: - - %s - -Perhaps your account does not have write access to this directory? You can -change the cache directory by setting the PYTHON_EGG_CACHE environment -variable to point to an accessible directory. -""" % (old_exc, cache_path) - ) - err.manager = self - err.cache_path = cache_path - err.original_error = old_exc - raise err - - def get_cache_path(self, archive_name, names=()): - """Return absolute location in cache for `archive_name` and `names` - - The parent directory of the resulting path will be created if it does - not already exist. `archive_name` should be the base filename of the - enclosing egg (which may not be the name of the enclosing zipfile!), - including its ".egg" extension. `names`, if provided, should be a - sequence of path name parts "under" the egg's extraction location. - - This method should only be called by resource providers that need to - obtain an extraction location, and only for names they intend to - extract, as it tracks the generated names for possible cleanup later. - """ - extract_path = self.extraction_path or get_default_cache() - target_path = os.path.join(extract_path, archive_name+'-tmp', *names) - try: - _bypass_ensure_directory(target_path) - except: - self.extraction_error() - - self._warn_unsafe_extraction_path(extract_path) - - self.cached_files[target_path] = 1 - return target_path - - @staticmethod - def _warn_unsafe_extraction_path(path): - """ - If the default extraction path is overridden and set to an insecure - location, such as /tmp, it opens up an opportunity for an attacker to - replace an extracted file with an unauthorized payload. Warn the user - if a known insecure location is used. - - See Distribute #375 for more details. - """ - if os.name == 'nt' and not path.startswith(os.environ['windir']): - # On Windows, permissions are generally restrictive by default - # and temp directories are not writable by other users, so - # bypass the warning. - return - mode = os.stat(path).st_mode - if mode & stat.S_IWOTH or mode & stat.S_IWGRP: - msg = ("%s is writable by group/others and vulnerable to attack " - "when " - "used with get_resource_filename. Consider a more secure " - "location (set with .set_extraction_path or the " - "PYTHON_EGG_CACHE environment variable)." % path) - warnings.warn(msg, UserWarning) - - def postprocess(self, tempname, filename): - """Perform any platform-specific postprocessing of `tempname` - - This is where Mac header rewrites should be done; other platforms don't - have anything special they should do. - - Resource providers should call this method ONLY after successfully - extracting a compressed resource. They must NOT call it on resources - that are already in the filesystem. - - `tempname` is the current (temporary) name of the file, and `filename` - is the name it will be renamed to by the caller after this routine - returns. - """ - - if os.name == 'posix': - # Make the resource executable - mode = ((os.stat(tempname).st_mode) | 0x16D) & 0xFFF # 0555, 07777 - os.chmod(tempname, mode) - - def set_extraction_path(self, path): - """Set the base path where resources will be extracted to, if needed. - - If you do not call this routine before any extractions take place, the - path defaults to the return value of ``get_default_cache()``. (Which - is based on the ``PYTHON_EGG_CACHE`` environment variable, with various - platform-specific fallbacks. See that routine's documentation for more - details.) - - Resources are extracted to subdirectories of this path based upon - information given by the ``IResourceProvider``. You may set this to a - temporary directory, but then you must call ``cleanup_resources()`` to - delete the extracted files when done. There is no guarantee that - ``cleanup_resources()`` will be able to remove all extracted files. - - (Note: you may not change the extraction path for a given resource - manager once resources have been extracted, unless you first call - ``cleanup_resources()``.) - """ - if self.cached_files: - raise ValueError( - "Can't change extraction path, files already extracted" - ) - - self.extraction_path = path - - def cleanup_resources(self, force=False): - """ - Delete all extracted resource files and directories, returning a list - of the file and directory names that could not be successfully removed. - This function does not have any concurrency protection, so it should - generally only be called when the extraction path is a temporary - directory exclusive to a single process. This method is not - automatically called; you must call it explicitly or register it as an - ``atexit`` function if you wish to ensure cleanup of a temporary - directory used for extractions. - """ - # XXX - -def get_default_cache(): - """Determine the default cache location - - This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. - Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the - "Application Data" directory. On all other systems, it's "~/.python-eggs". - """ - try: - return os.environ['PYTHON_EGG_CACHE'] - except KeyError: - pass - - if os.name!='nt': - return os.path.expanduser('~/.python-eggs') - - app_data = 'Application Data' # XXX this may be locale-specific! - app_homes = [ - (('APPDATA',), None), # best option, should be locale-safe - (('USERPROFILE',), app_data), - (('HOMEDRIVE','HOMEPATH'), app_data), - (('HOMEPATH',), app_data), - (('HOME',), None), - (('WINDIR',), app_data), # 95/98/ME - ] - - for keys, subdir in app_homes: - dirname = '' - for key in keys: - if key in os.environ: - dirname = os.path.join(dirname, os.environ[key]) - else: - break - else: - if subdir: - dirname = os.path.join(dirname,subdir) - return os.path.join(dirname, 'Python-Eggs') - else: - raise RuntimeError( - "Please set the PYTHON_EGG_CACHE enviroment variable" - ) - -def safe_name(name): - """Convert an arbitrary string to a standard distribution name - - Any runs of non-alphanumeric/. characters are replaced with a single '-'. - """ - return re.sub('[^A-Za-z0-9.]+', '-', name) - - -def safe_version(version): - """Convert an arbitrary string to a standard version string - - Spaces become dots, and all other non-alphanumeric characters become - dashes, with runs of multiple dashes condensed to a single dash. - """ - version = version.replace(' ','.') - return re.sub('[^A-Za-z0-9.]+', '-', version) - - -def safe_extra(extra): - """Convert an arbitrary string to a standard 'extra' name - - Any runs of non-alphanumeric characters are replaced with a single '_', - and the result is always lowercased. - """ - return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() - - -def to_filename(name): - """Convert a project or version name to its filename-escaped form - - Any '-' characters are currently replaced with '_'. - """ - return name.replace('-','_') - - -class MarkerEvaluation(object): - values = { - 'os_name': lambda: os.name, - 'sys_platform': lambda: sys.platform, - 'python_full_version': lambda: sys.version.split()[0], - 'python_version': lambda:'%s.%s' % (sys.version_info[0], sys.version_info[1]), - 'platform_version': platform.version, - 'platform_machine': platform.machine, - 'python_implementation': platform.python_implementation, - } - - @classmethod - def is_invalid_marker(cls, text): - """ - Validate text as a PEP 426 environment marker; return an exception - if invalid or False otherwise. - """ - try: - cls.evaluate_marker(text) - except SyntaxError: - return cls.normalize_exception(sys.exc_info()[1]) - return False - - @staticmethod - def normalize_exception(exc): - """ - Given a SyntaxError from a marker evaluation, normalize the error message: - - Remove indications of filename and line number. - - Replace platform-specific error messages with standard error messages. - """ - subs = { - 'unexpected EOF while parsing': 'invalid syntax', - 'parenthesis is never closed': 'invalid syntax', - } - exc.filename = None - exc.lineno = None - exc.msg = subs.get(exc.msg, exc.msg) - return exc - - @classmethod - def and_test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.and_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) - - @classmethod - def test(cls, nodelist): - # MUST NOT short-circuit evaluation, or invalid syntax can be skipped! - return functools.reduce(operator.or_, [cls.interpret(nodelist[i]) for i in range(1,len(nodelist),2)]) - - @classmethod - def atom(cls, nodelist): - t = nodelist[1][0] - if t == token.LPAR: - if nodelist[2][0] == token.RPAR: - raise SyntaxError("Empty parentheses") - return cls.interpret(nodelist[2]) - raise SyntaxError("Language feature not supported in environment markers") - - @classmethod - def comparison(cls, nodelist): - if len(nodelist)>4: - raise SyntaxError("Chained comparison not allowed in environment markers") - comp = nodelist[2][1] - cop = comp[1] - if comp[0] == token.NAME: - if len(nodelist[2]) == 3: - if cop == 'not': - cop = 'not in' - else: - cop = 'is not' - try: - cop = cls.get_op(cop) - except KeyError: - raise SyntaxError(repr(cop)+" operator not allowed in environment markers") - return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3])) - - @classmethod - def get_op(cls, op): - ops = { - symbol.test: cls.test, - symbol.and_test: cls.and_test, - symbol.atom: cls.atom, - symbol.comparison: cls.comparison, - 'not in': lambda x, y: x not in y, - 'in': lambda x, y: x in y, - '==': operator.eq, - '!=': operator.ne, - } - if hasattr(symbol, 'or_test'): - ops[symbol.or_test] = cls.test - return ops[op] - - @classmethod - def evaluate_marker(cls, text, extra=None): - """ - Evaluate a PEP 426 environment marker on CPython 2.4+. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - - This implementation uses the 'parser' module, which is not implemented on - Jython and has been superseded by the 'ast' module in Python 2.6 and - later. - """ - return cls.interpret(parser.expr(text).totuple(1)[1]) - - @classmethod - def _markerlib_evaluate(cls, text): - """ - Evaluate a PEP 426 environment marker using markerlib. - Return a boolean indicating the marker result in this environment. - Raise SyntaxError if marker is invalid. - """ - import _markerlib - # markerlib implements Metadata 1.2 (PEP 345) environment markers. - # Translate the variables to Metadata 2.0 (PEP 426). - env = _markerlib.default_environment() - for key in env.keys(): - new_key = key.replace('.', '_') - env[new_key] = env.pop(key) - try: - result = _markerlib.interpret(text, env) - except NameError: - e = sys.exc_info()[1] - raise SyntaxError(e.args[0]) - return result - - if 'parser' not in globals(): - # Fall back to less-complete _markerlib implementation if 'parser' module - # is not available. - evaluate_marker = _markerlib_evaluate - - @classmethod - def interpret(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - try: - op = cls.get_op(nodelist[0]) - except KeyError: - raise SyntaxError("Comparison or logical expression expected") - return op(nodelist) - - @classmethod - def evaluate(cls, nodelist): - while len(nodelist)==2: nodelist = nodelist[1] - kind = nodelist[0] - name = nodelist[1] - if kind==token.NAME: - try: - op = cls.values[name] - except KeyError: - raise SyntaxError("Unknown name %r" % name) - return op() - if kind==token.STRING: - s = nodelist[1] - if s[:1] not in "'\"" or s.startswith('"""') or s.startswith("'''") \ - or '\\' in s: - raise SyntaxError( - "Only plain strings allowed in environment markers") - return s[1:-1] - raise SyntaxError("Language feature not supported in environment markers") - -invalid_marker = MarkerEvaluation.is_invalid_marker -evaluate_marker = MarkerEvaluation.evaluate_marker - -class NullProvider: - """Try to implement resources and metadata for arbitrary PEP 302 loaders""" - - egg_name = None - egg_info = None - loader = None - - def __init__(self, module): - self.loader = getattr(module, '__loader__', None) - self.module_path = os.path.dirname(getattr(module, '__file__', '')) - - def get_resource_filename(self, manager, resource_name): - return self._fn(self.module_path, resource_name) - - def get_resource_stream(self, manager, resource_name): - return BytesIO(self.get_resource_string(manager, resource_name)) - - def get_resource_string(self, manager, resource_name): - return self._get(self._fn(self.module_path, resource_name)) - - def has_resource(self, resource_name): - return self._has(self._fn(self.module_path, resource_name)) - - def has_metadata(self, name): - return self.egg_info and self._has(self._fn(self.egg_info,name)) - - if sys.version_info <= (3,): - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)) - else: - def get_metadata(self, name): - if not self.egg_info: - return "" - return self._get(self._fn(self.egg_info,name)).decode("utf-8") - - def get_metadata_lines(self, name): - return yield_lines(self.get_metadata(name)) - - def resource_isdir(self,resource_name): - return self._isdir(self._fn(self.module_path, resource_name)) - - def metadata_isdir(self,name): - return self.egg_info and self._isdir(self._fn(self.egg_info,name)) - - def resource_listdir(self,resource_name): - return self._listdir(self._fn(self.module_path,resource_name)) - - def metadata_listdir(self,name): - if self.egg_info: - return self._listdir(self._fn(self.egg_info,name)) - return [] - - def run_script(self,script_name,namespace): - script = 'scripts/'+script_name - if not self.has_metadata(script): - raise ResolutionError("No script named %r" % script_name) - script_text = self.get_metadata(script).replace('\r\n','\n') - script_text = script_text.replace('\r','\n') - script_filename = self._fn(self.egg_info,script) - namespace['__file__'] = script_filename - if os.path.exists(script_filename): - execfile(script_filename, namespace, namespace) - else: - from linecache import cache - cache[script_filename] = ( - len(script_text), 0, script_text.split('\n'), script_filename - ) - script_code = compile(script_text,script_filename,'exec') - exec(script_code, namespace, namespace) - - def _has(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _isdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _listdir(self, path): - raise NotImplementedError( - "Can't perform this operation for unregistered loader type" - ) - - def _fn(self, base, resource_name): - if resource_name: - return os.path.join(base, *resource_name.split('/')) - return base - - def _get(self, path): - if hasattr(self.loader, 'get_data'): - return self.loader.get_data(path) - raise NotImplementedError( - "Can't perform this operation for loaders without 'get_data()'" - ) - -register_loader_type(object, NullProvider) - - -class EggProvider(NullProvider): - """Provider based on a virtual filesystem""" - - def __init__(self,module): - NullProvider.__init__(self,module) - self._setup_prefix() - - def _setup_prefix(self): - # we assume here that our metadata may be nested inside a "basket" - # of multiple eggs; that's why we use module_path instead of .archive - path = self.module_path - old = None - while path!=old: - if path.lower().endswith('.egg'): - self.egg_name = os.path.basename(path) - self.egg_info = os.path.join(path, 'EGG-INFO') - self.egg_root = path - break - old = path - path, base = os.path.split(path) - -class DefaultProvider(EggProvider): - """Provides access to package resources in the filesystem""" - - def _has(self, path): - return os.path.exists(path) - - def _isdir(self,path): - return os.path.isdir(path) - - def _listdir(self,path): - return os.listdir(path) - - def get_resource_stream(self, manager, resource_name): - return open(self._fn(self.module_path, resource_name), 'rb') - - def _get(self, path): - stream = open(path, 'rb') - try: - return stream.read() - finally: - stream.close() - -register_loader_type(type(None), DefaultProvider) - -if importlib_bootstrap is not None: - register_loader_type(importlib_bootstrap.SourceFileLoader, DefaultProvider) - - -class EmptyProvider(NullProvider): - """Provider that returns nothing for all requests""" - - _isdir = _has = lambda self,path: False - _get = lambda self,path: '' - _listdir = lambda self,path: [] - module_path = None - - def __init__(self): - pass - -empty_provider = EmptyProvider() - - -def build_zipmanifest(path): - """ - This builds a similar dictionary to the zipimport directory - caches. However instead of tuples, ZipInfo objects are stored. - - The translation of the tuple is as follows: - * [0] - zipinfo.filename on stock pythons this needs "/" --> os.sep - on pypy it is the same (one reason why distribute did work - in some cases on pypy and win32). - * [1] - zipinfo.compress_type - * [2] - zipinfo.compress_size - * [3] - zipinfo.file_size - * [4] - len(utf-8 encoding of filename) if zipinfo & 0x800 - len(ascii encoding of filename) otherwise - * [5] - (zipinfo.date_time[0] - 1980) << 9 | - zipinfo.date_time[1] << 5 | zipinfo.date_time[2] - * [6] - (zipinfo.date_time[3] - 1980) << 11 | - zipinfo.date_time[4] << 5 | (zipinfo.date_time[5] // 2) - * [7] - zipinfo.CRC - """ - zipinfo = dict() - zfile = zipfile.ZipFile(path) - #Got ZipFile has not __exit__ on python 3.1 - try: - for zitem in zfile.namelist(): - zpath = zitem.replace('/', os.sep) - zipinfo[zpath] = zfile.getinfo(zitem) - assert zipinfo[zpath] is not None - finally: - zfile.close() - return zipinfo - - -class ZipProvider(EggProvider): - """Resource support for zips and eggs""" - - eagers = None - - def __init__(self, module): - EggProvider.__init__(self,module) - self.zipinfo = build_zipmanifest(self.loader.archive) - self.zip_pre = self.loader.archive+os.sep - - def _zipinfo_name(self, fspath): - # Convert a virtual filename (full path to file) into a zipfile subpath - # usable with the zipimport directory cache for our target archive - if fspath.startswith(self.zip_pre): - return fspath[len(self.zip_pre):] - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.zip_pre) - ) - - def _parts(self,zip_path): - # Convert a zipfile subpath into an egg-relative path part list - fspath = self.zip_pre+zip_path # pseudo-fs path - if fspath.startswith(self.egg_root+os.sep): - return fspath[len(self.egg_root)+1:].split(os.sep) - raise AssertionError( - "%s is not a subpath of %s" % (fspath,self.egg_root) - ) - - def get_resource_filename(self, manager, resource_name): - if not self.egg_name: - raise NotImplementedError( - "resource_filename() only supported for .egg, not .zip" - ) - # no need to lock for extraction, since we use temp names - zip_path = self._resource_to_zip(resource_name) - eagers = self._get_eager_resources() - if '/'.join(self._parts(zip_path)) in eagers: - for name in eagers: - self._extract_resource(manager, self._eager_to_zip(name)) - return self._extract_resource(manager, zip_path) - - @staticmethod - def _get_date_and_size(zip_stat): - size = zip_stat.file_size - date_time = zip_stat.date_time + (0, 0, -1) # ymdhms+wday, yday, dst - #1980 offset already done - timestamp = time.mktime(date_time) - return timestamp, size - - def _extract_resource(self, manager, zip_path): - - if zip_path in self._index(): - for name in self._index()[zip_path]: - last = self._extract_resource( - manager, os.path.join(zip_path, name) - ) - return os.path.dirname(last) # return the extracted directory name - - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - - if not WRITE_SUPPORT: - raise IOError('"os.rename" and "os.unlink" are not supported ' - 'on this platform') - try: - - real_path = manager.get_cache_path( - self.egg_name, self._parts(zip_path) - ) - - if self._is_current(real_path, zip_path): - return real_path - - outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) - os.write(outf, self.loader.get_data(zip_path)) - os.close(outf) - utime(tmpnam, (timestamp,timestamp)) - manager.postprocess(tmpnam, real_path) - - try: - rename(tmpnam, real_path) - - except os.error: - if os.path.isfile(real_path): - if self._is_current(real_path, zip_path): - # the file became current since it was checked above, - # so proceed. - return real_path - elif os.name=='nt': # Windows, del old file and retry - unlink(real_path) - rename(tmpnam, real_path) - return real_path - raise - - except os.error: - manager.extraction_error() # report a user-friendly error - - return real_path - - def _is_current(self, file_path, zip_path): - """ - Return True if the file_path is current for this zip_path - """ - timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) - if not os.path.isfile(file_path): - return False - stat = os.stat(file_path) - if stat.st_size!=size or stat.st_mtime!=timestamp: - return False - # check that the contents match - zip_contents = self.loader.get_data(zip_path) - f = open(file_path, 'rb') - file_contents = f.read() - f.close() - return zip_contents == file_contents - - def _get_eager_resources(self): - if self.eagers is None: - eagers = [] - for name in ('native_libs.txt', 'eager_resources.txt'): - if self.has_metadata(name): - eagers.extend(self.get_metadata_lines(name)) - self.eagers = eagers - return self.eagers - - def _index(self): - try: - return self._dirindex - except AttributeError: - ind = {} - for path in self.zipinfo: - parts = path.split(os.sep) - while parts: - parent = os.sep.join(parts[:-1]) - if parent in ind: - ind[parent].append(parts[-1]) - break - else: - ind[parent] = [parts.pop()] - self._dirindex = ind - return ind - - def _has(self, fspath): - zip_path = self._zipinfo_name(fspath) - return zip_path in self.zipinfo or zip_path in self._index() - - def _isdir(self,fspath): - return self._zipinfo_name(fspath) in self._index() - - def _listdir(self,fspath): - return list(self._index().get(self._zipinfo_name(fspath), ())) - - def _eager_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.egg_root,resource_name)) - - def _resource_to_zip(self,resource_name): - return self._zipinfo_name(self._fn(self.module_path,resource_name)) - -register_loader_type(zipimport.zipimporter, ZipProvider) - - -class FileMetadata(EmptyProvider): - """Metadata handler for standalone PKG-INFO files - - Usage:: - - metadata = FileMetadata("/path/to/PKG-INFO") - - This provider rejects all data and metadata requests except for PKG-INFO, - which is treated as existing, and will be the contents of the file at - the provided location. - """ - - def __init__(self,path): - self.path = path - - def has_metadata(self,name): - return name=='PKG-INFO' - - def get_metadata(self,name): - if name=='PKG-INFO': - f = open(self.path,'rU') - metadata = f.read() - f.close() - return metadata - raise KeyError("No metadata except PKG-INFO is available") - - def get_metadata_lines(self,name): - return yield_lines(self.get_metadata(name)) - - -class PathMetadata(DefaultProvider): - """Metadata provider for egg directories - - Usage:: - - # Development eggs: - - egg_info = "/path/to/PackageName.egg-info" - base_dir = os.path.dirname(egg_info) - metadata = PathMetadata(base_dir, egg_info) - dist_name = os.path.splitext(os.path.basename(egg_info))[0] - dist = Distribution(basedir,project_name=dist_name,metadata=metadata) - - # Unpacked egg directories: - - egg_path = "/path/to/PackageName-ver-pyver-etc.egg" - metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) - dist = Distribution.from_filename(egg_path, metadata=metadata) - """ - - def __init__(self, path, egg_info): - self.module_path = path - self.egg_info = egg_info - - -class EggMetadata(ZipProvider): - """Metadata provider for .egg files""" - - def __init__(self, importer): - """Create a metadata provider from a zipimporter""" - - self.zipinfo = build_zipmanifest(importer.archive) - self.zip_pre = importer.archive+os.sep - self.loader = importer - if importer.prefix: - self.module_path = os.path.join(importer.archive, importer.prefix) - else: - self.module_path = importer.archive - self._setup_prefix() - -_declare_state('dict', _distribution_finders = {}) - -def register_finder(importer_type, distribution_finder): - """Register `distribution_finder` to find distributions in sys.path items - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `distribution_finder` is a callable that, passed a path - item and the importer instance, yields ``Distribution`` instances found on - that path item. See ``pkg_resources.find_on_path`` for an example.""" - _distribution_finders[importer_type] = distribution_finder - - -def find_distributions(path_item, only=False): - """Yield distributions accessible via `path_item`""" - importer = get_importer(path_item) - finder = _find_adapter(_distribution_finders, importer) - return finder(importer, path_item, only) - -def find_eggs_in_zip(importer, path_item, only=False): - """ - Find eggs in zip files; possibly multiple nested eggs. - """ - if importer.archive.endswith('.whl'): - # wheels are not supported with this finder - # they don't have PKG-INFO metadata, and won't ever contain eggs - return - metadata = EggMetadata(importer) - if metadata.has_metadata('PKG-INFO'): - yield Distribution.from_filename(path_item, metadata=metadata) - if only: - return # don't yield nested distros - for subitem in metadata.resource_listdir('/'): - if subitem.endswith('.egg'): - subpath = os.path.join(path_item, subitem) - for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): - yield dist - -register_finder(zipimport.zipimporter, find_eggs_in_zip) - -def find_nothing(importer, path_item, only=False): - return () -register_finder(object,find_nothing) - -def find_on_path(importer, path_item, only=False): - """Yield distributions accessible on a sys.path directory""" - path_item = _normalize_cached(path_item) - - if os.path.isdir(path_item) and os.access(path_item, os.R_OK): - if path_item.lower().endswith('.egg'): - # unpacked egg - yield Distribution.from_filename( - path_item, metadata=PathMetadata( - path_item, os.path.join(path_item,'EGG-INFO') - ) - ) - else: - # scan for .egg and .egg-info in directory - for entry in os.listdir(path_item): - lower = entry.lower() - if lower.endswith('.egg-info') or lower.endswith('.dist-info'): - fullpath = os.path.join(path_item, entry) - if os.path.isdir(fullpath): - # egg-info directory, allow getting metadata - metadata = PathMetadata(path_item, fullpath) - else: - metadata = FileMetadata(fullpath) - yield Distribution.from_location( - path_item,entry,metadata,precedence=DEVELOP_DIST - ) - elif not only and lower.endswith('.egg'): - for dist in find_distributions(os.path.join(path_item, entry)): - yield dist - elif not only and lower.endswith('.egg-link'): - entry_file = open(os.path.join(path_item, entry)) - try: - entry_lines = entry_file.readlines() - finally: - entry_file.close() - for line in entry_lines: - if not line.strip(): continue - for item in find_distributions(os.path.join(path_item,line.rstrip())): - yield item - break -register_finder(pkgutil.ImpImporter,find_on_path) - -if importlib_bootstrap is not None: - register_finder(importlib_bootstrap.FileFinder, find_on_path) - -_declare_state('dict', _namespace_handlers={}) -_declare_state('dict', _namespace_packages={}) - - -def register_namespace_handler(importer_type, namespace_handler): - """Register `namespace_handler` to declare namespace packages - - `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item - handler), and `namespace_handler` is a callable like this:: - - def namespace_handler(importer,path_entry,moduleName,module): - # return a path_entry to use for child packages - - Namespace handlers are only called if the importer object has already - agreed that it can handle the relevant path item, and they should only - return a subpath if the module __path__ does not already contain an - equivalent subpath. For an example namespace handler, see - ``pkg_resources.file_ns_handler``. - """ - _namespace_handlers[importer_type] = namespace_handler - -def _handle_ns(packageName, path_item): - """Ensure that named package includes a subpath of path_item (if needed)""" - - importer = get_importer(path_item) - if importer is None: - return None - loader = importer.find_module(packageName) - if loader is None: - return None - module = sys.modules.get(packageName) - if module is None: - module = sys.modules[packageName] = imp.new_module(packageName) - module.__path__ = [] - _set_parent_ns(packageName) - elif not hasattr(module,'__path__'): - raise TypeError("Not a package:", packageName) - handler = _find_adapter(_namespace_handlers, importer) - subpath = handler(importer, path_item, packageName, module) - if subpath is not None: - path = module.__path__ - path.append(subpath) - loader.load_module(packageName) - for path_item in path: - if path_item not in module.__path__: - module.__path__.append(path_item) - return subpath - -def declare_namespace(packageName): - """Declare that package 'packageName' is a namespace package""" - - imp.acquire_lock() - try: - if packageName in _namespace_packages: - return - - path, parent = sys.path, None - if '.' in packageName: - parent = '.'.join(packageName.split('.')[:-1]) - declare_namespace(parent) - if parent not in _namespace_packages: - __import__(parent) - try: - path = sys.modules[parent].__path__ - except AttributeError: - raise TypeError("Not a package:", parent) - - # Track what packages are namespaces, so when new path items are added, - # they can be updated - _namespace_packages.setdefault(parent,[]).append(packageName) - _namespace_packages.setdefault(packageName,[]) - - for path_item in path: - # Ensure all the parent's path items are reflected in the child, - # if they apply - _handle_ns(packageName, path_item) - - finally: - imp.release_lock() - -def fixup_namespace_packages(path_item, parent=None): - """Ensure that previously-declared namespace packages include path_item""" - imp.acquire_lock() - try: - for package in _namespace_packages.get(parent,()): - subpath = _handle_ns(package, path_item) - if subpath: fixup_namespace_packages(subpath,package) - finally: - imp.release_lock() - -def file_ns_handler(importer, path_item, packageName, module): - """Compute an ns-package subpath for a filesystem or zipfile importer""" - - subpath = os.path.join(path_item, packageName.split('.')[-1]) - normalized = _normalize_cached(subpath) - for item in module.__path__: - if _normalize_cached(item)==normalized: - break - else: - # Only return the path if it's not already there - return subpath - -register_namespace_handler(pkgutil.ImpImporter,file_ns_handler) -register_namespace_handler(zipimport.zipimporter,file_ns_handler) - -if importlib_bootstrap is not None: - register_namespace_handler(importlib_bootstrap.FileFinder, file_ns_handler) - - -def null_ns_handler(importer, path_item, packageName, module): - return None - -register_namespace_handler(object,null_ns_handler) - - -def normalize_path(filename): - """Normalize a file/dir name for comparison purposes""" - return os.path.normcase(os.path.realpath(filename)) - -def _normalize_cached(filename,_cache={}): - try: - return _cache[filename] - except KeyError: - _cache[filename] = result = normalize_path(filename) - return result - -def _set_parent_ns(packageName): - parts = packageName.split('.') - name = parts.pop() - if parts: - parent = '.'.join(parts) - setattr(sys.modules[parent], name, sys.modules[packageName]) - - -def yield_lines(strs): - """Yield non-empty/non-comment lines of a ``basestring`` or sequence""" - if isinstance(strs,basestring): - for s in strs.splitlines(): - s = s.strip() - if s and not s.startswith('#'): # skip blank lines/comments - yield s - else: - for ss in strs: - for s in yield_lines(ss): - yield s - -LINE_END = re.compile(r"\s*(#.*)?$").match # whitespace and comment -CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match # line continuation -DISTRO = re.compile(r"\s*((\w|[-.])+)").match # Distribution or extra -VERSION = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match # ver. info -COMMA = re.compile(r"\s*,").match # comma between items -OBRACKET = re.compile(r"\s*\[").match -CBRACKET = re.compile(r"\s*\]").match -MODULE = re.compile(r"\w+(\.\w+)*$").match -EGG_NAME = re.compile( - r"(?P[^-]+)" - r"( -(?P[^-]+) (-py(?P[^-]+) (-(?P.+))? )? )?", - re.VERBOSE | re.IGNORECASE -).match - -component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) -replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get - -def _parse_version_parts(s): - for part in component_re.split(s): - part = replace(part,part) - if not part or part=='.': - continue - if part[:1] in '0123456789': - yield part.zfill(8) # pad for numeric comparison - else: - yield '*'+part - - yield '*final' # ensure that alpha/beta/candidate are before final - -def parse_version(s): - """Convert a version string to a chronologically-sortable key - - This is a rough cross between distutils' StrictVersion and LooseVersion; - if you give it versions that would work with StrictVersion, then it behaves - the same; otherwise it acts like a slightly-smarter LooseVersion. It is - *possible* to create pathological version coding schemes that will fool - this parser, but they should be very rare in practice. - - The returned value will be a tuple of strings. Numeric portions of the - version are padded to 8 digits so they will compare numerically, but - without relying on how numbers compare relative to strings. Dots are - dropped, but dashes are retained. Trailing zeros between alpha segments - or dashes are suppressed, so that e.g. "2.4.0" is considered the same as - "2.4". Alphanumeric parts are lower-cased. - - The algorithm assumes that strings like "-" and any alpha string that - alphabetically follows "final" represents a "patch level". So, "2.4-1" - is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is - considered newer than "2.4-1", which in turn is newer than "2.4". - - Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that - come before "final" alphabetically) are assumed to be pre-release versions, - so that the version "2.4" is considered newer than "2.4a1". - - Finally, to handle miscellaneous cases, the strings "pre", "preview", and - "rc" are treated as if they were "c", i.e. as though they were release - candidates, and therefore are not as new as a version string that does not - contain them, and "dev" is replaced with an '@' so that it sorts lower than - than any other pre-release tag. - """ - parts = [] - for part in _parse_version_parts(s.lower()): - if part.startswith('*'): - if part<'*final': # remove '-' before a prerelease tag - while parts and parts[-1]=='*final-': parts.pop() - # remove trailing zeros from each series of numeric parts - while parts and parts[-1]=='00000000': - parts.pop() - parts.append(part) - return tuple(parts) -class EntryPoint(object): - """Object representing an advertised importable object""" - - def __init__(self, name, module_name, attrs=(), extras=(), dist=None): - if not MODULE(module_name): - raise ValueError("Invalid module name", module_name) - self.name = name - self.module_name = module_name - self.attrs = tuple(attrs) - self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras - self.dist = dist - - def __str__(self): - s = "%s = %s" % (self.name, self.module_name) - if self.attrs: - s += ':' + '.'.join(self.attrs) - if self.extras: - s += ' [%s]' % ','.join(self.extras) - return s - - def __repr__(self): - return "EntryPoint.parse(%r)" % str(self) - - def load(self, require=True, env=None, installer=None): - if require: self.require(env, installer) - entry = __import__(self.module_name, globals(),globals(), ['__name__']) - for attr in self.attrs: - try: - entry = getattr(entry,attr) - except AttributeError: - raise ImportError("%r has no %r attribute" % (entry,attr)) - return entry - - def require(self, env=None, installer=None): - if self.extras and not self.dist: - raise UnknownExtra("Can't require() without a distribution", self) - list(map(working_set.add, - working_set.resolve(self.dist.requires(self.extras),env,installer))) - - @classmethod - def parse(cls, src, dist=None): - """Parse a single entry point from string `src` - - Entry point syntax follows the form:: - - name = some.module:some.attr [extra1,extra2] - - The entry name and module name are required, but the ``:attrs`` and - ``[extras]`` parts are optional - """ - try: - attrs = extras = () - name,value = src.split('=',1) - if '[' in value: - value,extras = value.split('[',1) - req = Requirement.parse("x["+extras) - if req.specs: raise ValueError - extras = req.extras - if ':' in value: - value,attrs = value.split(':',1) - if not MODULE(attrs.rstrip()): - raise ValueError - attrs = attrs.rstrip().split('.') - except ValueError: - raise ValueError( - "EntryPoint must be in 'name=module:attrs [extras]' format", - src - ) - else: - return cls(name.strip(), value.strip(), attrs, extras, dist) - - @classmethod - def parse_group(cls, group, lines, dist=None): - """Parse an entry point group""" - if not MODULE(group): - raise ValueError("Invalid group name", group) - this = {} - for line in yield_lines(lines): - ep = cls.parse(line, dist) - if ep.name in this: - raise ValueError("Duplicate entry point", group, ep.name) - this[ep.name]=ep - return this - - @classmethod - def parse_map(cls, data, dist=None): - """Parse a map of entry point groups""" - if isinstance(data,dict): - data = data.items() - else: - data = split_sections(data) - maps = {} - for group, lines in data: - if group is None: - if not lines: - continue - raise ValueError("Entry points must be listed in groups") - group = group.strip() - if group in maps: - raise ValueError("Duplicate group name", group) - maps[group] = cls.parse_group(group, lines, dist) - return maps - - -def _remove_md5_fragment(location): - if not location: - return '' - parsed = urlparse(location) - if parsed[-1].startswith('md5='): - return urlunparse(parsed[:-1] + ('',)) - return location - - -class Distribution(object): - """Wrap an actual or potential sys.path entry w/metadata""" - PKG_INFO = 'PKG-INFO' - - def __init__(self, location=None, metadata=None, project_name=None, - version=None, py_version=PY_MAJOR, platform=None, - precedence=EGG_DIST): - self.project_name = safe_name(project_name or 'Unknown') - if version is not None: - self._version = safe_version(version) - self.py_version = py_version - self.platform = platform - self.location = location - self.precedence = precedence - self._provider = metadata or empty_provider - - @classmethod - def from_location(cls,location,basename,metadata=None,**kw): - project_name, version, py_version, platform = [None]*4 - basename, ext = os.path.splitext(basename) - if ext.lower() in _distributionImpl: - # .dist-info gets much metadata differently - match = EGG_NAME(basename) - if match: - project_name, version, py_version, platform = match.group( - 'name','ver','pyver','plat' - ) - cls = _distributionImpl[ext.lower()] - return cls( - location, metadata, project_name=project_name, version=version, - py_version=py_version, platform=platform, **kw - ) - - hashcmp = property( - lambda self: ( - getattr(self,'parsed_version',()), - self.precedence, - self.key, - _remove_md5_fragment(self.location), - self.py_version, - self.platform - ) - ) - def __hash__(self): return hash(self.hashcmp) - def __lt__(self, other): - return self.hashcmp < other.hashcmp - def __le__(self, other): - return self.hashcmp <= other.hashcmp - def __gt__(self, other): - return self.hashcmp > other.hashcmp - def __ge__(self, other): - return self.hashcmp >= other.hashcmp - def __eq__(self, other): - if not isinstance(other, self.__class__): - # It's not a Distribution, so they are not equal - return False - return self.hashcmp == other.hashcmp - def __ne__(self, other): - return not self == other - - # These properties have to be lazy so that we don't have to load any - # metadata until/unless it's actually needed. (i.e., some distributions - # may not know their name or version without loading PKG-INFO) - - @property - def key(self): - try: - return self._key - except AttributeError: - self._key = key = self.project_name.lower() - return key - - @property - def parsed_version(self): - try: - return self._parsed_version - except AttributeError: - self._parsed_version = pv = parse_version(self.version) - return pv - - @property - def version(self): - try: - return self._version - except AttributeError: - for line in self._get_metadata(self.PKG_INFO): - if line.lower().startswith('version:'): - self._version = safe_version(line.split(':',1)[1].strip()) - return self._version - else: - raise ValueError( - "Missing 'Version:' header and/or %s file" % self.PKG_INFO, self - ) - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - dm = self.__dep_map = {None: []} - for name in 'requires.txt', 'depends.txt': - for extra,reqs in split_sections(self._get_metadata(name)): - if extra: - if ':' in extra: - extra, marker = extra.split(':',1) - if invalid_marker(marker): - reqs=[] # XXX warn - elif not evaluate_marker(marker): - reqs=[] - extra = safe_extra(extra) or None - dm.setdefault(extra,[]).extend(parse_requirements(reqs)) - return dm - - def requires(self,extras=()): - """List of Requirements needed for this distro if `extras` are used""" - dm = self._dep_map - deps = [] - deps.extend(dm.get(None,())) - for ext in extras: - try: - deps.extend(dm[safe_extra(ext)]) - except KeyError: - raise UnknownExtra( - "%s has no such extra feature %r" % (self, ext) - ) - return deps - - def _get_metadata(self,name): - if self.has_metadata(name): - for line in self.get_metadata_lines(name): - yield line - - def activate(self,path=None): - """Ensure distribution is importable on `path` (default=sys.path)""" - if path is None: path = sys.path - self.insert_on(path) - if path is sys.path: - fixup_namespace_packages(self.location) - list(map(declare_namespace, self._get_metadata('namespace_packages.txt'))) - - def egg_name(self): - """Return what this distribution's standard .egg filename should be""" - filename = "%s-%s-py%s" % ( - to_filename(self.project_name), to_filename(self.version), - self.py_version or PY_MAJOR - ) - - if self.platform: - filename += '-'+self.platform - return filename - - def __repr__(self): - if self.location: - return "%s (%s)" % (self,self.location) - else: - return str(self) - - def __str__(self): - try: version = getattr(self,'version',None) - except ValueError: version = None - version = version or "[unknown version]" - return "%s %s" % (self.project_name,version) - - def __getattr__(self,attr): - """Delegate all unrecognized public attributes to .metadata provider""" - if attr.startswith('_'): - raise AttributeError(attr) - return getattr(self._provider, attr) - - @classmethod - def from_filename(cls,filename,metadata=None, **kw): - return cls.from_location( - _normalize_cached(filename), os.path.basename(filename), metadata, - **kw - ) - - def as_requirement(self): - """Return a ``Requirement`` that matches this distribution exactly""" - return Requirement.parse('%s==%s' % (self.project_name, self.version)) - - def load_entry_point(self, group, name): - """Return the `name` entry point of `group` or raise ImportError""" - ep = self.get_entry_info(group,name) - if ep is None: - raise ImportError("Entry point %r not found" % ((group,name),)) - return ep.load() - - def get_entry_map(self, group=None): - """Return the entry point map for `group`, or the full entry map""" - try: - ep_map = self._ep_map - except AttributeError: - ep_map = self._ep_map = EntryPoint.parse_map( - self._get_metadata('entry_points.txt'), self - ) - if group is not None: - return ep_map.get(group,{}) - return ep_map - - def get_entry_info(self, group, name): - """Return the EntryPoint object for `group`+`name`, or ``None``""" - return self.get_entry_map(group).get(name) - - def insert_on(self, path, loc = None): - """Insert self.location in path before its nearest parent directory""" - - loc = loc or self.location - if not loc: - return - - nloc = _normalize_cached(loc) - bdir = os.path.dirname(nloc) - npath= [(p and _normalize_cached(p) or p) for p in path] - - for p, item in enumerate(npath): - if item==nloc: - break - elif item==bdir and self.precedence==EGG_DIST: - # if it's an .egg, give it precedence over its directory - if path is sys.path: - self.check_version_conflict() - path.insert(p, loc) - npath.insert(p, nloc) - break - else: - if path is sys.path: - self.check_version_conflict() - path.append(loc) - return - - # p is the spot where we found or inserted loc; now remove duplicates - while 1: - try: - np = npath.index(nloc, p+1) - except ValueError: - break - else: - del npath[np], path[np] - p = np # ha! - - return - - def check_version_conflict(self): - if self.key=='setuptools': - return # ignore the inevitable setuptools self-conflicts :( - - nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) - loc = normalize_path(self.location) - for modname in self._get_metadata('top_level.txt'): - if (modname not in sys.modules or modname in nsp - or modname in _namespace_packages): - continue - if modname in ('pkg_resources', 'setuptools', 'site'): - continue - fn = getattr(sys.modules[modname], '__file__', None) - if fn and (normalize_path(fn).startswith(loc) or - fn.startswith(self.location)): - continue - issue_warning( - "Module %s was already imported from %s, but %s is being added" - " to sys.path" % (modname, fn, self.location), - ) - - def has_version(self): - try: - self.version - except ValueError: - issue_warning("Unbuilt egg for "+repr(self)) - return False - return True - - def clone(self,**kw): - """Copy this distribution, substituting in any changed keyword args""" - for attr in ( - 'project_name', 'version', 'py_version', 'platform', 'location', - 'precedence' - ): - kw.setdefault(attr, getattr(self,attr,None)) - kw.setdefault('metadata', self._provider) - return self.__class__(**kw) - - @property - def extras(self): - return [dep for dep in self._dep_map if dep] - - -class DistInfoDistribution(Distribution): - """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" - PKG_INFO = 'METADATA' - EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") - - @property - def _parsed_pkg_info(self): - """Parse and cache metadata""" - try: - return self._pkg_info - except AttributeError: - from email.parser import Parser - self._pkg_info = Parser().parsestr(self.get_metadata(self.PKG_INFO)) - return self._pkg_info - - @property - def _dep_map(self): - try: - return self.__dep_map - except AttributeError: - self.__dep_map = self._compute_dependencies() - return self.__dep_map - - def _preparse_requirement(self, requires_dist): - """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz') - Split environment marker, add == prefix to version specifiers as - necessary, and remove parenthesis. - """ - parts = requires_dist.split(';', 1) + [''] - distvers = parts[0].strip() - mark = parts[1].strip() - distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers) - distvers = distvers.replace('(', '').replace(')', '') - return (distvers, mark) - - def _compute_dependencies(self): - """Recompute this distribution's dependencies.""" - from _markerlib import compile as compile_marker - dm = self.__dep_map = {None: []} - - reqs = [] - # Including any condition expressions - for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: - distvers, mark = self._preparse_requirement(req) - parsed = next(parse_requirements(distvers)) - parsed.marker_fn = compile_marker(mark) - reqs.append(parsed) - - def reqs_for_extra(extra): - for req in reqs: - if req.marker_fn(override={'extra':extra}): - yield req - - common = frozenset(reqs_for_extra(None)) - dm[None].extend(common) - - for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: - extra = safe_extra(extra.strip()) - dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) - - return dm - - -_distributionImpl = { - '.egg': Distribution, - '.egg-info': Distribution, - '.dist-info': DistInfoDistribution, - } - - -def issue_warning(*args,**kw): - level = 1 - g = globals() - try: - # find the first stack frame that is *not* code in - # the pkg_resources module, to use for the warning - while sys._getframe(level).f_globals is g: - level += 1 - except ValueError: - pass - from warnings import warn - warn(stacklevel = level+1, *args, **kw) - - -def parse_requirements(strs): - """Yield ``Requirement`` objects for each specification in `strs` - - `strs` must be an instance of ``basestring``, or a (possibly-nested) - iterable thereof. - """ - # create a steppable iterator, so we can handle \-continuations - lines = iter(yield_lines(strs)) - - def scan_list(ITEM,TERMINATOR,line,p,groups,item_name): - - items = [] - - while not TERMINATOR(line,p): - if CONTINUE(line,p): - try: - line = next(lines) - p = 0 - except StopIteration: - raise ValueError( - "\\ must not appear on the last nonblank line" - ) - - match = ITEM(line,p) - if not match: - raise ValueError("Expected "+item_name+" in",line,"at",line[p:]) - - items.append(match.group(*groups)) - p = match.end() - - match = COMMA(line,p) - if match: - p = match.end() # skip the comma - elif not TERMINATOR(line,p): - raise ValueError( - "Expected ',' or end-of-list in",line,"at",line[p:] - ) - - match = TERMINATOR(line,p) - if match: p = match.end() # skip the terminator, if any - return line, p, items - - for line in lines: - match = DISTRO(line) - if not match: - raise ValueError("Missing distribution spec", line) - project_name = match.group(1) - p = match.end() - extras = [] - - match = OBRACKET(line,p) - if match: - p = match.end() - line, p, extras = scan_list( - DISTRO, CBRACKET, line, p, (1,), "'extra' name" - ) - - line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec") - specs = [(op,safe_version(val)) for op,val in specs] - yield Requirement(project_name, specs, extras) - - -def _sort_dists(dists): - tmp = [(dist.hashcmp,dist) for dist in dists] - tmp.sort() - dists[::-1] = [d for hc,d in tmp] - - -class Requirement: - def __init__(self, project_name, specs, extras): - """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" - self.unsafe_name, project_name = project_name, safe_name(project_name) - self.project_name, self.key = project_name, project_name.lower() - index = [(parse_version(v),state_machine[op],op,v) for op,v in specs] - index.sort() - self.specs = [(op,ver) for parsed,trans,op,ver in index] - self.index, self.extras = index, tuple(map(safe_extra,extras)) - self.hashCmp = ( - self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]), - frozenset(self.extras) - ) - self.__hash = hash(self.hashCmp) - - def __str__(self): - specs = ','.join([''.join(s) for s in self.specs]) - extras = ','.join(self.extras) - if extras: extras = '[%s]' % extras - return '%s%s%s' % (self.project_name, extras, specs) - - def __eq__(self,other): - return isinstance(other,Requirement) and self.hashCmp==other.hashCmp - - def __contains__(self,item): - if isinstance(item,Distribution): - if item.key != self.key: return False - if self.index: item = item.parsed_version # only get if we need it - elif isinstance(item,basestring): - item = parse_version(item) - last = None - compare = lambda a, b: (a > b) - (a < b) # -1, 0, 1 - for parsed,trans,op,ver in self.index: - action = trans[compare(item,parsed)] # Indexing: 0, 1, -1 - if action=='F': - return False - elif action=='T': - return True - elif action=='+': - last = True - elif action=='-' or last is None: last = False - if last is None: last = True # no rules encountered - return last - - def __hash__(self): - return self.__hash - - def __repr__(self): return "Requirement.parse(%r)" % str(self) - - @staticmethod - def parse(s): - reqs = list(parse_requirements(s)) - if reqs: - if len(reqs)==1: - return reqs[0] - raise ValueError("Expected only one requirement", s) - raise ValueError("No requirements found", s) - -state_machine = { - # =>< - '<': '--T', - '<=': 'T-T', - '>': 'F+F', - '>=': 'T+F', - '==': 'T..', - '!=': 'F++', -} - - -def _get_mro(cls): - """Get an mro for a type or classic class""" - if not isinstance(cls,type): - class cls(cls,object): pass - return cls.__mro__[1:] - return cls.__mro__ - -def _find_adapter(registry, ob): - """Return an adapter factory for `ob` from `registry`""" - for t in _get_mro(getattr(ob, '__class__', type(ob))): - if t in registry: - return registry[t] - - -def ensure_directory(path): - """Ensure that the parent directory of `path` exists""" - dirname = os.path.dirname(path) - if not os.path.isdir(dirname): - os.makedirs(dirname) - -def split_sections(s): - """Split a string or iterable thereof into (section,content) pairs - - Each ``section`` is a stripped version of the section header ("[section]") - and each ``content`` is a list of stripped lines excluding blank lines and - comment-only lines. If there are any such lines before the first section - header, they're returned in a first ``section`` of ``None``. - """ - section = None - content = [] - for line in yield_lines(s): - if line.startswith("["): - if line.endswith("]"): - if section or content: - yield section, content - section = line[1:-1].strip() - content = [] - else: - raise ValueError("Invalid section heading", line) - else: - content.append(line) - - # wrap up last segment - yield section, content - -def _mkstemp(*args,**kw): - from tempfile import mkstemp - old_open = os.open - try: - os.open = os_open # temporarily bypass sandboxing - return mkstemp(*args,**kw) - finally: - os.open = old_open # and then put it back - - -# Set up global resource manager (deliberately not state-saved) -_manager = ResourceManager() -def _initialize(g): - for name in dir(_manager): - if not name.startswith('_'): - g[name] = getattr(_manager, name) -_initialize(globals()) - -# Prepare the master working set and make the ``require()`` API available -_declare_state('object', working_set = WorkingSet()) -try: - # Does the main program list any requirements? - from __main__ import __requires__ -except ImportError: - pass # No: just use the default working set based on sys.path -else: - # Yes: ensure the requirements are met, by prefixing sys.path if necessary - try: - working_set.require(__requires__) - except VersionConflict: # try it without defaults already on sys.path - working_set = WorkingSet([]) # by starting with an empty path - for dist in working_set.resolve( - parse_requirements(__requires__), Environment() - ): - working_set.add(dist) - for entry in sys.path: # add any missing entries from sys.path - if entry not in working_set.entries: - working_set.add_entry(entry) - sys.path[:] = working_set.entries # then copy back to sys.path - -require = working_set.require -iter_entry_points = working_set.iter_entry_points -add_activation_listener = working_set.subscribe -run_script = working_set.run_script -run_main = run_script # backward compatibility -# Activate all distributions already on sys.path, and ensure that -# all distributions added to the working set in the future (e.g. by -# calling ``require()``) will get activated as well. -add_activation_listener(lambda dist: dist.activate()) -working_set.entries=[] -list(map(working_set.add_entry,sys.path)) # match order diff --git a/libs/setuptools-2.2/release.py b/libs/setuptools-2.2/release.py deleted file mode 100644 index d2f8277..0000000 --- a/libs/setuptools-2.2/release.py +++ /dev/null @@ -1,91 +0,0 @@ -""" -Setuptools is released using 'jaraco.packaging.release'. To make a release, -install jaraco.packaging and run 'python -m jaraco.packaging.release' -""" - -import re -import os -import subprocess - -import pkg_resources - -pkg_resources.require('jaraco.packaging>=2.0') -pkg_resources.require('wheel') - - -def before_upload(): - _linkify('CHANGES.txt', 'CHANGES (links).txt') - BootstrapBookmark.add() - - -def after_push(): - os.remove('CHANGES (links).txt') - BootstrapBookmark.push() - -files_with_versions = ( - 'ez_setup.py', 'setuptools/version.py', -) - -# bdist_wheel must be included or pip will break -dist_commands = 'sdist', 'bdist_wheel' - -test_info = "Travis-CI tests: http://travis-ci.org/#!/jaraco/setuptools" - -os.environ["SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES"] = "1" - -link_patterns = [ - r"(Issue )?#(?P\d+)", - r"Pull Request ?#(?P\d+)", - r"Distribute #(?P\d+)", - r"Buildout #(?P\d+)", - r"Old Setuptools #(?P\d+)", - r"Jython #(?P\d+)", - r"Python #(?P\d+)", -] - -issue_urls = dict( - pull_request='https://bitbucket.org' - '/pypa/setuptools/pull-request/{pull_request}', - issue='https://bitbucket.org/pypa/setuptools/issue/{issue}', - distribute='https://bitbucket.org/tarek/distribute/issue/{distribute}', - buildout='https://github.com/buildout/buildout/issues/{buildout}', - old_setuptools='http://bugs.python.org/setuptools/issue{old_setuptools}', - jython='http://bugs.jython.org/issue{jython}', - python='http://bugs.python.org/issue{python}', -) - - -def _linkify(source, dest): - pattern = '|'.join(link_patterns) - with open(source) as source: - out = re.sub(pattern, replacer, source.read()) - with open(dest, 'w') as dest: - dest.write(out) - - -def replacer(match): - text = match.group(0) - match_dict = match.groupdict() - for key in match_dict: - if match_dict[key]: - url = issue_urls[key].format(**match_dict) - return "`{text} <{url}>`_".format(text=text, url=url) - -class BootstrapBookmark: - name = 'bootstrap' - - @classmethod - def add(cls): - cmd = ['hg', 'bookmark', '-i', cls.name, '-f'] - subprocess.Popen(cmd) - - @classmethod - def push(cls): - """ - Push the bootstrap bookmark - """ - push_command = ['hg', 'push', '-B', cls.name] - # don't use check_call here because mercurial will return a non-zero - # code even if it succeeds at pushing the bookmark (because there are - # no changesets to be pushed). !dm mercurial - subprocess.call(push_command) diff --git a/libs/setuptools-2.2/setup.cfg b/libs/setuptools-2.2/setup.cfg deleted file mode 100644 index 4ed20bf..0000000 --- a/libs/setuptools-2.2/setup.cfg +++ /dev/null @@ -1,24 +0,0 @@ -[egg_info] -tag_build = -tag_date = 0 -tag_svn_revision = 0 - -[aliases] -release = egg_info -RDb '' -source = register sdist binary -binary = bdist_egg upload --show-response - -[build_sphinx] -source-dir = docs/ -build-dir = docs/build -all_files = 1 - -[upload_docs] -upload-dir = docs/build/html - -[sdist] -formats = gztar - -[wheel] -universal = 1 - diff --git a/libs/setuptools-2.2/setup.py b/libs/setuptools-2.2/setup.py deleted file mode 100644 index dc4391c..0000000 --- a/libs/setuptools-2.2/setup.py +++ /dev/null @@ -1,202 +0,0 @@ -#!/usr/bin/env python -"""Distutils setup file, used to install or test 'setuptools'""" -import io -import os -import sys -import textwrap - -# Allow to run setup.py from another directory. -os.chdir(os.path.dirname(os.path.abspath(__file__))) - -src_root = None - -from distutils.util import convert_path - -command_ns = {} -init_path = convert_path('setuptools/command/__init__.py') -with open(init_path) as init_file: - exec(init_file.read(), command_ns) - -SETUP_COMMANDS = command_ns['__all__'] - -main_ns = {} -ver_path = convert_path('setuptools/version.py') -with open(ver_path) as ver_file: - exec(ver_file.read(), main_ns) - -import setuptools -from setuptools.command.build_py import build_py as _build_py -from setuptools.command.test import test as _test - -scripts = [] - -def _gen_console_scripts(): - yield "easy_install = setuptools.command.easy_install:main" - - # Gentoo distributions manage the python-version-specific scripts - # themselves, so those platforms define an environment variable to - # suppress the creation of the version-specific scripts. - var_names = ( - 'SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT', - 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT', - ) - if any(os.environ.get(var) not in (None, "", "0") for var in var_names): - return - yield ("easy_install-{shortver} = setuptools.command.easy_install:main" - .format(shortver=sys.version[:3])) - -console_scripts = list(_gen_console_scripts()) - - -# specific command that is used to generate windows .exe files -class build_py(_build_py): - def build_package_data(self): - """Copy data files into build directory""" - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - srcfile = os.path.abspath(srcfile) - -class test(_test): - """Specific test class to avoid rewriting the entry_points.txt""" - def run(self): - entry_points = os.path.join('setuptools.egg-info', 'entry_points.txt') - - if not os.path.exists(entry_points): - _test.run(self) - return # even though _test.run will raise SystemExit - - # save the content - with open(entry_points) as f: - ep_content = f.read() - - # run the test - try: - _test.run(self) - finally: - # restore the file - with open(entry_points, 'w') as f: - f.write(ep_content) - - -readme_file = io.open('README.txt', encoding='utf-8') - -# the release script adds hyperlinks to issues -if os.path.exists('CHANGES (links).txt'): - changes_file = open('CHANGES (links).txt') -else: - # but if the release script has not run, fall back to the source file - changes_file = open('CHANGES.txt') -with readme_file: - with changes_file: - long_description = readme_file.read() + '\n' + changes_file.read() - -package_data = {'setuptools': ['site-patch.py']} -force_windows_specific_files = ( - os.environ.get("SETUPTOOLS_INSTALL_WINDOWS_SPECIFIC_FILES") - not in (None, "", "0") -) -if sys.platform == 'win32' or force_windows_specific_files: - package_data.setdefault('setuptools', []).extend(['*.exe']) - package_data.setdefault('setuptools.command', []).extend(['*.xml']) - -setup_params = dict( - name="setuptools", - version=main_ns['__version__'], - description="Easily download, build, install, upgrade, and uninstall " - "Python packages", - author="Python Packaging Authority", - author_email="distutils-sig@python.org", - license="PSF or ZPL", - long_description = long_description, - keywords = "CPAN PyPI distutils eggs package management", - url = "https://pypi.python.org/pypi/setuptools", - test_suite = 'setuptools.tests', - src_root = src_root, - packages = setuptools.find_packages(), - package_data = package_data, - - py_modules = ['pkg_resources', 'easy_install'], - - zip_safe = (sys.version>="2.5"), # <2.5 needs unzipped for -m to work - - cmdclass = {'test': test}, - entry_points = { - "distutils.commands": [ - "%(cmd)s = setuptools.command.%(cmd)s:%(cmd)s" % locals() - for cmd in SETUP_COMMANDS - ], - "distutils.setup_keywords": [ - "eager_resources = setuptools.dist:assert_string_list", - "namespace_packages = setuptools.dist:check_nsp", - "extras_require = setuptools.dist:check_extras", - "install_requires = setuptools.dist:check_requirements", - "tests_require = setuptools.dist:check_requirements", - "entry_points = setuptools.dist:check_entry_points", - "test_suite = setuptools.dist:check_test_suite", - "zip_safe = setuptools.dist:assert_bool", - "package_data = setuptools.dist:check_package_data", - "exclude_package_data = setuptools.dist:check_package_data", - "include_package_data = setuptools.dist:assert_bool", - "packages = setuptools.dist:check_packages", - "dependency_links = setuptools.dist:assert_string_list", - "test_loader = setuptools.dist:check_importable", - "use_2to3 = setuptools.dist:assert_bool", - "convert_2to3_doctests = setuptools.dist:assert_string_list", - "use_2to3_fixers = setuptools.dist:assert_string_list", - "use_2to3_exclude_fixers = setuptools.dist:assert_string_list", - ], - "egg_info.writers": [ - "PKG-INFO = setuptools.command.egg_info:write_pkg_info", - "requires.txt = setuptools.command.egg_info:write_requirements", - "entry_points.txt = setuptools.command.egg_info:write_entries", - "eager_resources.txt = setuptools.command.egg_info:overwrite_arg", - "namespace_packages.txt = setuptools.command.egg_info:overwrite_arg", - "top_level.txt = setuptools.command.egg_info:write_toplevel_names", - "depends.txt = setuptools.command.egg_info:warn_depends_obsolete", - "dependency_links.txt = setuptools.command.egg_info:overwrite_arg", - ], - "console_scripts": console_scripts, - - "setuptools.file_finders": - ["svn_cvs = setuptools.command.sdist:_default_revctrl"], - - "setuptools.installation": - ['eggsecutable = setuptools.command.easy_install:bootstrap'], - }, - - - classifiers = textwrap.dedent(""" - Development Status :: 5 - Production/Stable - Intended Audience :: Developers - License :: OSI Approved :: Python Software Foundation License - License :: OSI Approved :: Zope Public License - Operating System :: OS Independent - Programming Language :: Python :: 2.6 - Programming Language :: Python :: 2.7 - Programming Language :: Python :: 3 - Programming Language :: Python :: 3.1 - Programming Language :: Python :: 3.2 - Programming Language :: Python :: 3.3 - Topic :: Software Development :: Libraries :: Python Modules - Topic :: System :: Archiving :: Packaging - Topic :: System :: Systems Administration - Topic :: Utilities - """).strip().splitlines(), - extras_require = { - "ssl:sys_platform=='win32'": "wincertstore==0.1", - "certs": "certifi==0.0.8", - }, - dependency_links = [ - 'https://pypi.python.org/packages/source/c/certifi/certifi-0.0.8.tar.gz#md5=dc5f5e7f0b5fc08d27654b17daa6ecec', - 'https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.1.zip#md5=2f9accbebe8f7b4c06ac7aa83879b81c', - ], - scripts = [], - # tests_require = "setuptools[ssl]", -) - -if __name__ == '__main__': - dist = setuptools.setup(**setup_params) diff --git a/libs/setuptools-2.2/setuptools.egg-info/PKG-INFO b/libs/setuptools-2.2/setuptools.egg-info/PKG-INFO deleted file mode 100644 index 816326d..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/PKG-INFO +++ /dev/null @@ -1,1793 +0,0 @@ -Metadata-Version: 1.1 -Name: setuptools -Version: 2.2 -Summary: Easily download, build, install, upgrade, and uninstall Python packages -Home-page: https://pypi.python.org/pypi/setuptools -Author: Python Packaging Authority -Author-email: distutils-sig@python.org -License: PSF or ZPL -Description: =============================== - Installing and Using Setuptools - =============================== - - .. contents:: **Table of Contents** - - - ------------------------- - Installation Instructions - ------------------------- - - Upgrading from Distribute - ========================= - - Currently, Distribute disallows installing Setuptools 0.7+ over Distribute. - You must first uninstall any active version of Distribute first (see - `Uninstalling`_). - - Upgrading from Setuptools 0.6 - ============================= - - Upgrading from prior versions of Setuptools is supported. Initial reports - good success in this regard. - - Windows - ======= - - The recommended way to install setuptools on Windows is to download - `ez_setup.py`_ and run it. The script will download the appropriate .egg - file and install it for you. - - .. _ez_setup.py: https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - - For best results, uninstall previous versions FIRST (see `Uninstalling`_). - - Once installation is complete, you will find an ``easy_install`` program in - your Python ``Scripts`` subdirectory. For simple invocation and best results, - add this directory to your ``PATH`` environment variable, if it is not already - present. - - - Unix-based Systems including Mac OS X - ===================================== - - Download `ez_setup.py`_ and run it using the target Python version. The script - will download the appropriate version and install it for you:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | python - - Note that you will may need to invoke the command with superuser privileges to - install to the system Python:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py -O - | sudo python - - Alternatively, on Python 2.6 and later, Setuptools may be installed to a - user-local path:: - - > wget https://bitbucket.org/pypa/setuptools/raw/bootstrap/ez_setup.py - > python ez_setup.py --user - - - Python 2.4 and Python 2.5 support - ================================= - - Setuptools 2.0 and later requires Python 2.6 or later. To install setuptools - on Python 2.4 or Python 2.5, use the bootstrap script for Setuptools 1.x: - https://bitbucket.org/pypa/setuptools/raw/bootstrap-py24/ez_setup.py. - - - Advanced Installation - ===================== - - For more advanced installation options, such as installing to custom - locations or prefixes, download and extract the source - tarball from `Setuptools on PyPI `_ - and run setup.py with any supported distutils and Setuptools options. - For example:: - - setuptools-x.x$ python setup.py install --prefix=/opt/setuptools - - Use ``--help`` to get a full options list, but we recommend consulting - the `EasyInstall manual`_ for detailed instructions, especially `the section - on custom installation locations`_. - - .. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall - .. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations - - - Downloads - ========= - - All setuptools downloads can be found at `the project's home page in the Python - Package Index`_. Scroll to the very bottom of the page to find the links. - - .. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools - - In addition to the PyPI downloads, the development version of ``setuptools`` - is available from the `Bitbucket repo`_, and in-development versions of the - `0.6 branch`_ are available as well. - - .. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev - .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 - - Uninstalling - ============ - - On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` - installer, simply use the uninstall feature of "Add/Remove Programs" in the - Control Panel. - - Otherwise, to uninstall Setuptools or Distribute, regardless of the Python - version, delete all ``setuptools*`` and ``distribute*`` files and - directories from your system's ``site-packages`` directory - (and any other ``sys.path`` directories) FIRST. - - If you are upgrading or otherwise plan to re-install Setuptools or Distribute, - nothing further needs to be done. If you want to completely remove Setuptools, - you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts - and associated executables installed to the Python scripts directory. - - -------------------------------- - Using Setuptools and EasyInstall - -------------------------------- - - Here are some of the available manuals, tutorials, and other resources for - learning about Setuptools, Python Eggs, and EasyInstall: - - * `The EasyInstall user's guide and reference manual`_ - * `The setuptools Developer's Guide`_ - * `The pkg_resources API reference`_ - * `Package Compatibility Notes`_ (user-maintained) - * `The Internal Structure of Python Eggs`_ - - Questions, comments, and bug reports should be directed to the `distutils-sig - mailing list`_. If you have written (or know of) any tutorials, documentation, - plug-ins, or other resources for setuptools users, please let us know about - them there, so this reference list can be updated. If you have working, - *tested* patches to correct problems or add features, you may submit them to - the `setuptools bug tracker`_. - - .. _setuptools bug tracker: https://bitbucket.org/pypa/setuptools/issues - .. _Package Compatibility Notes: https://pythonhosted.org/setuptools/PackageNotes - .. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html - .. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html - .. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html - .. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html - .. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ - - - ------- - Credits - ------- - - * The original design for the ``.egg`` format and the ``pkg_resources`` API was - co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first - version of ``pkg_resources``, and supplied the OS X operating system version - compatibility algorithm. - - * Ian Bicking implemented many early "creature comfort" features of - easy_install, including support for downloading via Sourceforge and - Subversion repositories. Ian's comments on the Web-SIG about WSGI - application deployment also inspired the concept of "entry points" in eggs, - and he has given talks at PyCon and elsewhere to inform and educate the - community about eggs and setuptools. - - * Jim Fulton contributed time and effort to build automated tests of various - aspects of ``easy_install``, and supplied the doctests for the command-line - ``.exe`` wrappers on Windows. - - * Phillip J. Eby is the seminal author of setuptools, and - first proposed the idea of an importable binary distribution format for - Python application plug-ins. - - * Significant parts of the implementation of setuptools were funded by the Open - Source Applications Foundation, to provide a plug-in infrastructure for the - Chandler PIM application. In addition, many OSAF staffers (such as Mike - "Code Bear" Taylor) contributed their time and stress as guinea pigs for the - use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) - - * Tarek Ziadé is the principal author of the Distribute fork, which - re-invigorated the community on the project, encouraged renewed innovation, - and addressed many defects. - - * Since the merge with Distribute, Jason R. Coombs is the - maintainer of setuptools. The project is maintained in coordination with - the Python Packaging Authority (PyPA) and the larger Python community. - - .. _files: - - ======= - CHANGES - ======= - - --- - 2.2 - --- - - * `Issue #141 `_: Restored fix for allowing setup_requires dependencies to - override installed dependencies during setup. - * `Issue #128 `_: Fixed issue where only the first dependency link was honored - in a distribution where multiple dependency links were supplied. - - ----- - 2.1.2 - ----- - - * `Issue #144 `_: Read long_description using codecs module to avoid errors - installing on systems where LANG=C. - - ----- - 2.1.1 - ----- - - * `Issue #139 `_: Fix regression in re_finder for CVS repos (and maybe Git repos - as well). - - --- - 2.1 - --- - - * `Issue #129 `_: Suppress inspection of ``*.whl`` files when searching for files - in a zip-imported file. - * `Issue #131 `_: Fix RuntimeError when constructing an egg fetcher. - - ----- - 2.0.2 - ----- - - * Fix NameError during installation with Python implementations (e.g. Jython) - not containing parser module. - * Fix NameError in ``sdist:re_finder``. - - ----- - 2.0.1 - ----- - - * `Issue #124 `_: Fixed error in list detection in upload_docs. - - --- - 2.0 - --- - - * `Issue #121 `_: Exempt lib2to3 pickled grammars from DirectorySandbox. - * `Issue #41 `_: Dropped support for Python 2.4 and Python 2.5. Clients requiring - setuptools for those versions of Python should use setuptools 1.x. - * Removed ``setuptools.command.easy_install.HAS_USER_SITE``. Clients - expecting this boolean variable should use ``site.ENABLE_USER_SITE`` - instead. - * Removed ``pkg_resources.ImpWrapper``. Clients that expected this class - should use ``pkgutil.ImpImporter`` instead. - - ----- - 1.4.2 - ----- - - * `Issue #116 `_: Correct TypeError when reading a local package index on Python - 3. - - ----- - 1.4.1 - ----- - - * `Issue #114 `_: Use ``sys.getfilesystemencoding`` for decoding config in - ``bdist_wininst`` distributions. - - * `Issue #105 `_ and `Issue #113 `_: Establish a more robust technique for - determining the terminal encoding:: - - 1. Try ``getpreferredencoding`` - 2. If that returns US_ASCII or None, try the encoding from - ``getdefaultlocale``. If that encoding was a "fallback" because Python - could not figure it out from the environment or OS, encoding remains - unresolved. - 3. If the encoding is resolved, then make sure Python actually implements - the encoding. - 4. On the event of an error or unknown codec, revert to fallbacks - (UTF-8 on Darwin, ASCII on everything else). - 5. On the encoding is 'mac-roman' on Darwin, use UTF-8 as 'mac-roman' was - a bug on older Python releases. - - On a side note, it would seem that the encoding only matters for when SVN - does not yet support ``--xml`` and when getting repository and svn version - numbers. The ``--xml`` technique should yield UTF-8 according to some - messages on the SVN mailing lists. So if the version numbers are always - 7-bit ASCII clean, it may be best to only support the file parsing methods - for legacy SVN releases and support for SVN without the subprocess command - would simple go away as support for the older SVNs does. - - --- - 1.4 - --- - - * `Issue #27 `_: ``easy_install`` will now use credentials from .pypirc if - present for connecting to the package index. - * `Pull Request #21 `_: Omit unwanted newlines in ``package_index._encode_auth`` - when the username/password pair length indicates wrapping. - - ----- - 1.3.2 - ----- - - * `Issue #99 `_: Fix filename encoding issues in SVN support. - - ----- - 1.3.1 - ----- - - * Remove exuberant warning in SVN support when SVN is not used. - - --- - 1.3 - --- - - * Address security vulnerability in SSL match_hostname check as reported in - `Python #17997 `_. - * Prefer `backports.ssl_match_hostname - `_ for backport - implementation if present. - * Correct NameError in ``ssl_support`` module (``socket.error``). - - --- - 1.2 - --- - - * `Issue #26 `_: Add support for SVN 1.7. Special thanks to Philip Thiem for the - contribution. - * `Issue #93 `_: Wheels are now distributed with every release. Note that as - reported in `Issue #108 `_, as of Pip 1.4, scripts aren't installed properly - from wheels. Therefore, if using Pip to install setuptools from a wheel, - the ``easy_install`` command will not be available. - * Setuptools "natural" launcher support, introduced in 1.0, is now officially - supported. - - ----- - 1.1.7 - ----- - - * Fixed behavior of NameError handling in 'script template (dev).py' (script - launcher for 'develop' installs). - * ``ez_setup.py`` now ensures partial downloads are cleaned up following - a failed download. - * `Distribute #363 `_ and `Issue #55 `_: Skip an sdist test that fails on locales - other than UTF-8. - - ----- - 1.1.6 - ----- - - * `Distribute #349 `_: ``sandbox.execfile`` now opens the target file in binary - mode, thus honoring a BOM in the file when compiled. - - ----- - 1.1.5 - ----- - - * `Issue #69 `_: Second attempt at fix (logic was reversed). - - ----- - 1.1.4 - ----- - - * `Issue #77 `_: Fix error in upload command (Python 2.4). - - ----- - 1.1.3 - ----- - - * Fix NameError in previous patch. - - ----- - 1.1.2 - ----- - - * `Issue #69 `_: Correct issue where 404 errors are returned for URLs with - fragments in them (such as #egg=). - - ----- - 1.1.1 - ----- - - * `Issue #75 `_: Add ``--insecure`` option to ez_setup.py to accommodate - environments where a trusted SSL connection cannot be validated. - * `Issue #76 `_: Fix AttributeError in upload command with Python 2.4. - - --- - 1.1 - --- - - * `Issue #71 `_ (`Distribute #333 `_): EasyInstall now puts less emphasis on the - condition when a host is blocked via ``--allow-hosts``. - * `Issue #72 `_: Restored Python 2.4 compatibility in ``ez_setup.py``. - - --- - 1.0 - --- - - * `Issue #60 `_: On Windows, Setuptools supports deferring to another launcher, - such as Vinay Sajip's `pylauncher `_ - (included with Python 3.3) to launch console and GUI scripts and not install - its own launcher executables. This experimental functionality is currently - only enabled if the ``SETUPTOOLS_LAUNCHER`` environment variable is set to - "natural". In the future, this behavior may become default, but only after - it has matured and seen substantial adoption. The ``SETUPTOOLS_LAUNCHER`` - also accepts "executable" to force the default behavior of creating launcher - executables. - * `Issue #63 `_: Bootstrap script (ez_setup.py) now prefers Powershell, curl, or - wget for retrieving the Setuptools tarball for improved security of the - install. The script will still fall back to a simple ``urlopen`` on - platforms that do not have these tools. - * `Issue #65 `_: Deprecated the ``Features`` functionality. - * `Issue #52 `_: In ``VerifyingHTTPSConn``, handle a tunnelled (proxied) - connection. - - Backward-Incompatible Changes - ============================= - - This release includes a couple of backward-incompatible changes, but most if - not all users will find 1.0 a drop-in replacement for 0.9. - - * `Issue #50 `_: Normalized API of environment marker support. Specifically, - removed line number and filename from SyntaxErrors when returned from - `pkg_resources.invalid_marker`. Any clients depending on the specific - string representation of exceptions returned by that function may need to - be updated to account for this change. - * `Issue #50 `_: SyntaxErrors generated by `pkg_resources.invalid_marker` are - normalized for cross-implementation consistency. - * Removed ``--ignore-conflicts-at-my-risk`` and ``--delete-conflicting`` - options to easy_install. These options have been deprecated since 0.6a11. - - ----- - 0.9.8 - ----- - - * `Issue #53 `_: Fix NameErrors in `_vcs_split_rev_from_url`. - - ----- - 0.9.7 - ----- - - * `Issue #49 `_: Correct AttributeError on PyPy where a hashlib.HASH object does - not have a `.name` attribute. - * `Issue #34 `_: Documentation now refers to bootstrap script in code repository - referenced by bookmark. - * Add underscore-separated keys to environment markers (markerlib). - - ----- - 0.9.6 - ----- - - * `Issue #44 `_: Test failure on Python 2.4 when MD5 hash doesn't have a `.name` - attribute. - - ----- - 0.9.5 - ----- - - * `Python #17980 `_: Fix security vulnerability in SSL certificate validation. - - ----- - 0.9.4 - ----- - - * `Issue #43 `_: Fix issue (introduced in 0.9.1) with version resolution when - upgrading over other releases of Setuptools. - - ----- - 0.9.3 - ----- - - * `Issue #42 `_: Fix new ``AttributeError`` introduced in last fix. - - ----- - 0.9.2 - ----- - - * `Issue #42 `_: Fix regression where blank checksums would trigger an - ``AttributeError``. - - ----- - 0.9.1 - ----- - - * `Distribute #386 `_: Allow other positional and keyword arguments to os.open. - * Corrected dependency on certifi mis-referenced in 0.9. - - --- - 0.9 - --- - - * `package_index` now validates hashes other than MD5 in download links. - - --- - 0.8 - --- - - * Code base now runs on Python 2.4 - Python 3.3 without Python 2to3 - conversion. - - ----- - 0.7.8 - ----- - - * `Distribute #375 `_: Yet another fix for yet another regression. - - ----- - 0.7.7 - ----- - - * `Distribute #375 `_: Repair AttributeError created in last release (redo). - * `Issue #30 `_: Added test for get_cache_path. - - ----- - 0.7.6 - ----- - - * `Distribute #375 `_: Repair AttributeError created in last release. - - ----- - 0.7.5 - ----- - - * `Issue #21 `_: Restore Python 2.4 compatibility in ``test_easy_install``. - * `Distribute #375 `_: Merged additional warning from Distribute 0.6.46. - * Now honor the environment variable - ``SETUPTOOLS_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT`` in addition to the now - deprecated ``DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT``. - - ----- - 0.7.4 - ----- - - * `Issue #20 `_: Fix comparison of parsed SVN version on Python 3. - - ----- - 0.7.3 - ----- - - * `Issue #1 `_: Disable installation of Windows-specific files on non-Windows systems. - * Use new sysconfig module with Python 2.7 or >=3.2. - - ----- - 0.7.2 - ----- - - * `Issue #14 `_: Use markerlib when the `parser` module is not available. - * `Issue #10 `_: ``ez_setup.py`` now uses HTTPS to download setuptools from PyPI. - - ----- - 0.7.1 - ----- - - * Fix NameError (`Issue #3 `_) again - broken in bad merge. - - --- - 0.7 - --- - - * Merged Setuptools and Distribute. See docs/merge.txt for details. - - Added several features that were slated for setuptools 0.6c12: - - * Index URL now defaults to HTTPS. - * Added experimental environment marker support. Now clients may designate a - PEP-426 environment marker for "extra" dependencies. Setuptools uses this - feature in ``setup.py`` for optional SSL and certificate validation support - on older platforms. Based on Distutils-SIG discussions, the syntax is - somewhat tentative. There should probably be a PEP with a firmer spec before - the feature should be considered suitable for use. - * Added support for SSL certificate validation when installing packages from - an HTTPS service. - - ----- - 0.7b4 - ----- - - * `Issue #3 `_: Fixed NameError in SSL support. - - ------ - 0.6.49 - ------ - - * Move warning check in ``get_cache_path`` to follow the directory creation - to avoid errors when the cache path does not yet exist. Fixes the error - reported in `Distribute #375 `_. - - ------ - 0.6.48 - ------ - - * Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46 (redo). - - ------ - 0.6.47 - ------ - - * Correct AttributeError in ``ResourceManager.get_cache_path`` introduced in - 0.6.46. - - ------ - 0.6.46 - ------ - - * `Distribute #375 `_: Issue a warning if the PYTHON_EGG_CACHE or otherwise - customized egg cache location specifies a directory that's group- or - world-writable. - - ------ - 0.6.45 - ------ - - * `Distribute #379 `_: ``distribute_setup.py`` now traps VersionConflict as well, - restoring ability to upgrade from an older setuptools version. - - ------ - 0.6.44 - ------ - - * ``distribute_setup.py`` has been updated to allow Setuptools 0.7 to - satisfy use_setuptools. - - ------ - 0.6.43 - ------ - - * `Distribute #378 `_: Restore support for Python 2.4 Syntax (regression in 0.6.42). - - ------ - 0.6.42 - ------ - - * External links finder no longer yields duplicate links. - * `Distribute #337 `_: Moved site.py to setuptools/site-patch.py (graft of very old - patch from setuptools trunk which inspired PR `#31 `_). - - ------ - 0.6.41 - ------ - - * `Distribute #27 `_: Use public api for loading resources from zip files rather than - the private method `_zip_directory_cache`. - * Added a new function ``easy_install.get_win_launcher`` which may be used by - third-party libraries such as buildout to get a suitable script launcher. - - ------ - 0.6.40 - ------ - - * `Distribute #376 `_: brought back cli.exe and gui.exe that were deleted in the - previous release. - - ------ - 0.6.39 - ------ - - * Add support for console launchers on ARM platforms. - * Fix possible issue in GUI launchers where the subsystem was not supplied to - the linker. - * Launcher build script now refactored for robustness. - * `Distribute #375 `_: Resources extracted from a zip egg to the file system now also - check the contents of the file against the zip contents during each - invocation of get_resource_filename. - - ------ - 0.6.38 - ------ - - * `Distribute #371 `_: The launcher manifest file is now installed properly. - - ------ - 0.6.37 - ------ - - * `Distribute #143 `_: Launcher scripts, including easy_install itself, are now - accompanied by a manifest on 32-bit Windows environments to avoid the - Installer Detection Technology and thus undesirable UAC elevation described - in `this Microsoft article - `_. - - ------ - 0.6.36 - ------ - - * `Pull Request #35 `_: In `Buildout #64 `_, it was reported that - under Python 3, installation of distutils scripts could attempt to copy - the ``__pycache__`` directory as a file, causing an error, apparently only - under Windows. Easy_install now skips all directories when processing - metadata scripts. - - ------ - 0.6.35 - ------ - - - Note this release is backward-incompatible with distribute 0.6.23-0.6.34 in - how it parses version numbers. - - * `Distribute #278 `_: Restored compatibility with distribute 0.6.22 and setuptools - 0.6. Updated the documentation to match more closely with the version - parsing as intended in setuptools 0.6. - - ------ - 0.6.34 - ------ - - * `Distribute #341 `_: 0.6.33 fails to build under Python 2.4. - - ------ - 0.6.33 - ------ - - * Fix 2 errors with Jython 2.5. - * Fix 1 failure with Jython 2.5 and 2.7. - * Disable workaround for Jython scripts on Linux systems. - * `Distribute #336 `_: `setup.py` no longer masks failure exit code when tests fail. - * Fix issue in pkg_resources where try/except around a platform-dependent - import would trigger hook load failures on Mercurial. See pull request 32 - for details. - * `Distribute #341 `_: Fix a ResourceWarning. - - ------ - 0.6.32 - ------ - - * Fix test suite with Python 2.6. - * Fix some DeprecationWarnings and ResourceWarnings. - * `Distribute #335 `_: Backed out `setup_requires` superceding installed requirements - until regression can be addressed. - - ------ - 0.6.31 - ------ - - * `Distribute #303 `_: Make sure the manifest only ever contains UTF-8 in Python 3. - * `Distribute #329 `_: Properly close files created by tests for compatibility with - Jython. - * Work around `Jython #1980 `_ and `Jython #1981 `_. - * `Distribute #334 `_: Provide workaround for packages that reference `sys.__stdout__` - such as numpy does. This change should address - `virtualenv `#359 `_ `_ as long - as the system encoding is UTF-8 or the IO encoding is specified in the - environment, i.e.:: - - PYTHONIOENCODING=utf8 pip install numpy - - * Fix for encoding issue when installing from Windows executable on Python 3. - * `Distribute #323 `_: Allow `setup_requires` requirements to supercede installed - requirements. Added some new keyword arguments to existing pkg_resources - methods. Also had to updated how __path__ is handled for namespace packages - to ensure that when a new egg distribution containing a namespace package is - placed on sys.path, the entries in __path__ are found in the same order they - would have been in had that egg been on the path when pkg_resources was - first imported. - - ------ - 0.6.30 - ------ - - * `Distribute #328 `_: Clean up temporary directories in distribute_setup.py. - * Fix fatal bug in distribute_setup.py. - - ------ - 0.6.29 - ------ - - * `Pull Request #14 `_: Honor file permissions in zip files. - * `Distribute #327 `_: Merged pull request `#24 `_ to fix a dependency problem with pip. - * Merged pull request `#23 `_ to fix https://github.com/pypa/virtualenv/issues/301. - * If Sphinx is installed, the `upload_docs` command now runs `build_sphinx` - to produce uploadable documentation. - * `Distribute #326 `_: `upload_docs` provided mangled auth credentials under Python 3. - * `Distribute #320 `_: Fix check for "createable" in distribute_setup.py. - * `Distribute #305 `_: Remove a warning that was triggered during normal operations. - * `Distribute #311 `_: Print metadata in UTF-8 independent of platform. - * `Distribute #303 `_: Read manifest file with UTF-8 encoding under Python 3. - * `Distribute #301 `_: Allow to run tests of namespace packages when using 2to3. - * `Distribute #304 `_: Prevent import loop in site.py under Python 3.3. - * `Distribute #283 `_: Reenable scanning of `*.pyc` / `*.pyo` files on Python 3.3. - * `Distribute #299 `_: The develop command didn't work on Python 3, when using 2to3, - as the egg link would go to the Python 2 source. Linking to the 2to3'd code - in build/lib makes it work, although you will have to rebuild the module - before testing it. - * `Distribute #306 `_: Even if 2to3 is used, we build in-place under Python 2. - * `Distribute #307 `_: Prints the full path when .svn/entries is broken. - * `Distribute #313 `_: Support for sdist subcommands (Python 2.7) - * `Distribute #314 `_: test_local_index() would fail an OS X. - * `Distribute #310 `_: Non-ascii characters in a namespace __init__.py causes errors. - * `Distribute #218 `_: Improved documentation on behavior of `package_data` and - `include_package_data`. Files indicated by `package_data` are now included - in the manifest. - * `distribute_setup.py` now allows a `--download-base` argument for retrieving - distribute from a specified location. - - ------ - 0.6.28 - ------ - - * `Distribute #294 `_: setup.py can now be invoked from any directory. - * Scripts are now installed honoring the umask. - * Added support for .dist-info directories. - * `Distribute #283 `_: Fix and disable scanning of `*.pyc` / `*.pyo` files on - Python 3.3. - - ------ - 0.6.27 - ------ - - * Support current snapshots of CPython 3.3. - * Distribute now recognizes README.rst as a standard, default readme file. - * Exclude 'encodings' modules when removing modules from sys.modules. - Workaround for `#285 `_. - * `Distribute #231 `_: Don't fiddle with system python when used with buildout - (bootstrap.py) - - ------ - 0.6.26 - ------ - - * `Distribute #183 `_: Symlinked files are now extracted from source distributions. - * `Distribute #227 `_: Easy_install fetch parameters are now passed during the - installation of a source distribution; now fulfillment of setup_requires - dependencies will honor the parameters passed to easy_install. - - ------ - 0.6.25 - ------ - - * `Distribute #258 `_: Workaround a cache issue - * `Distribute #260 `_: distribute_setup.py now accepts the --user parameter for - Python 2.6 and later. - * `Distribute #262 `_: package_index.open_with_auth no longer throws LookupError - on Python 3. - * `Distribute #269 `_: AttributeError when an exception occurs reading Manifest.in - on late releases of Python. - * `Distribute #272 `_: Prevent TypeError when namespace package names are unicode - and single-install-externally-managed is used. Also fixes PIP issue - 449. - * `Distribute #273 `_: Legacy script launchers now install with Python2/3 support. - - ------ - 0.6.24 - ------ - - * `Distribute #249 `_: Added options to exclude 2to3 fixers - - ------ - 0.6.23 - ------ - - * `Distribute #244 `_: Fixed a test - * `Distribute #243 `_: Fixed a test - * `Distribute #239 `_: Fixed a test - * `Distribute #240 `_: Fixed a test - * `Distribute #241 `_: Fixed a test - * `Distribute #237 `_: Fixed a test - * `Distribute #238 `_: easy_install now uses 64bit executable wrappers on 64bit Python - * `Distribute #208 `_: Fixed parsed_versions, it now honors post-releases as noted in the documentation - * `Distribute #207 `_: Windows cli and gui wrappers pass CTRL-C to child python process - * `Distribute #227 `_: easy_install now passes its arguments to setup.py bdist_egg - * `Distribute #225 `_: Fixed a NameError on Python 2.5, 2.4 - - ------ - 0.6.21 - ------ - - * `Distribute #225 `_: FIxed a regression on py2.4 - - ------ - 0.6.20 - ------ - - * `Distribute #135 `_: Include url in warning when processing URLs in package_index. - * `Distribute #212 `_: Fix issue where easy_instal fails on Python 3 on windows installer. - * `Distribute #213 `_: Fix typo in documentation. - - ------ - 0.6.19 - ------ - - * `Distribute #206 `_: AttributeError: 'HTTPMessage' object has no attribute 'getheaders' - - ------ - 0.6.18 - ------ - - * `Distribute #210 `_: Fixed a regression introduced by `Distribute #204 `_ fix. - - ------ - 0.6.17 - ------ - - * Support 'DISTRIBUTE_DISABLE_VERSIONED_EASY_INSTALL_SCRIPT' environment - variable to allow to disable installation of easy_install-${version} script. - * Support Python >=3.1.4 and >=3.2.1. - * `Distribute #204 `_: Don't try to import the parent of a namespace package in - declare_namespace - * `Distribute #196 `_: Tolerate responses with multiple Content-Length headers - * `Distribute #205 `_: Sandboxing doesn't preserve working_set. Leads to setup_requires - problems. - - ------ - 0.6.16 - ------ - - * Builds sdist gztar even on Windows (avoiding `Distribute #193 `_). - * `Distribute #192 `_: Fixed metadata omitted on Windows when package_dir - specified with forward-slash. - * `Distribute #195 `_: Cython build support. - * `Distribute #200 `_: Issues with recognizing 64-bit packages on Windows. - - ------ - 0.6.15 - ------ - - * Fixed typo in bdist_egg - * Several issues under Python 3 has been solved. - * `Distribute #146 `_: Fixed missing DLL files after easy_install of windows exe package. - - ------ - 0.6.14 - ------ - - * `Distribute #170 `_: Fixed unittest failure. Thanks to Toshio. - * `Distribute #171 `_: Fixed race condition in unittests cause deadlocks in test suite. - * `Distribute #143 `_: Fixed a lookup issue with easy_install. - Thanks to David and Zooko. - * `Distribute #174 `_: Fixed the edit mode when its used with setuptools itself - - ------ - 0.6.13 - ------ - - * `Distribute #160 `_: 2.7 gives ValueError("Invalid IPv6 URL") - * `Distribute #150 `_: Fixed using ~/.local even in a --no-site-packages virtualenv - * `Distribute #163 `_: scan index links before external links, and don't use the md5 when - comparing two distributions - - ------ - 0.6.12 - ------ - - * `Distribute #149 `_: Fixed various failures on 2.3/2.4 - - ------ - 0.6.11 - ------ - - * Found another case of SandboxViolation - fixed - * `Distribute #15 `_ and `Distribute #48 `_: Introduced a socket timeout of 15 seconds on url openings - * Added indexsidebar.html into MANIFEST.in - * `Distribute #108 `_: Fixed TypeError with Python3.1 - * `Distribute #121 `_: Fixed --help install command trying to actually install. - * `Distribute #112 `_: Added an os.makedirs so that Tarek's solution will work. - * `Distribute #133 `_: Added --no-find-links to easy_install - * Added easy_install --user - * `Distribute #100 `_: Fixed develop --user not taking '.' in PYTHONPATH into account - * `Distribute #134 `_: removed spurious UserWarnings. Patch by VanLindberg - * `Distribute #138 `_: cant_write_to_target error when setup_requires is used. - * `Distribute #147 `_: respect the sys.dont_write_bytecode flag - - ------ - 0.6.10 - ------ - - * Reverted change made for the DistributionNotFound exception because - zc.buildout uses the exception message to get the name of the - distribution. - - ----- - 0.6.9 - ----- - - * `Distribute #90 `_: unknown setuptools version can be added in the working set - * `Distribute #87 `_: setupt.py doesn't try to convert distribute_setup.py anymore - Initial Patch by arfrever. - * `Distribute #89 `_: added a side bar with a download link to the doc. - * `Distribute #86 `_: fixed missing sentence in pkg_resources doc. - * Added a nicer error message when a DistributionNotFound is raised. - * `Distribute #80 `_: test_develop now works with Python 3.1 - * `Distribute #93 `_: upload_docs now works if there is an empty sub-directory. - * `Distribute #70 `_: exec bit on non-exec files - * `Distribute #99 `_: now the standalone easy_install command doesn't uses a - "setup.cfg" if any exists in the working directory. It will use it - only if triggered by ``install_requires`` from a setup.py call - (install, develop, etc). - * `Distribute #101 `_: Allowing ``os.devnull`` in Sandbox - * `Distribute #92 `_: Fixed the "no eggs" found error with MacPort - (platform.mac_ver() fails) - * `Distribute #103 `_: test_get_script_header_jython_workaround not run - anymore under py3 with C or POSIX local. Contributed by Arfrever. - * `Distribute #104 `_: remvoved the assertion when the installation fails, - with a nicer message for the end user. - * `Distribute #100 `_: making sure there's no SandboxViolation when - the setup script patches setuptools. - - ----- - 0.6.8 - ----- - - * Added "check_packages" in dist. (added in Setuptools 0.6c11) - * Fixed the DONT_PATCH_SETUPTOOLS state. - - ----- - 0.6.7 - ----- - - * `Distribute #58 `_: Added --user support to the develop command - * `Distribute #11 `_: Generated scripts now wrap their call to the script entry point - in the standard "if name == 'main'" - * Added the 'DONT_PATCH_SETUPTOOLS' environment variable, so virtualenv - can drive an installation that doesn't patch a global setuptools. - * Reviewed unladen-swallow specific change from - http://code.google.com/p/unladen-swallow/source/detail?spec=svn875&r=719 - and determined that it no longer applies. Distribute should work fine with - Unladen Swallow 2009Q3. - * `Distribute #21 `_: Allow PackageIndex.open_url to gracefully handle all cases of a - httplib.HTTPException instead of just InvalidURL and BadStatusLine. - * Removed virtual-python.py from this distribution and updated documentation - to point to the actively maintained virtualenv instead. - * `Distribute #64 `_: use_setuptools no longer rebuilds the distribute egg every - time it is run - * use_setuptools now properly respects the requested version - * use_setuptools will no longer try to import a distribute egg for the - wrong Python version - * `Distribute #74 `_: no_fake should be True by default. - * `Distribute #72 `_: avoid a bootstrapping issue with easy_install -U - - ----- - 0.6.6 - ----- - - * Unified the bootstrap file so it works on both py2.x and py3k without 2to3 - (patch by Holger Krekel) - - ----- - 0.6.5 - ----- - - * `Distribute #65 `_: cli.exe and gui.exe are now generated at build time, - depending on the platform in use. - - * `Distribute #67 `_: Fixed doc typo (PEP 381/382) - - * Distribute no longer shadows setuptools if we require a 0.7-series - setuptools. And an error is raised when installing a 0.7 setuptools with - distribute. - - * When run from within buildout, no attempt is made to modify an existing - setuptools egg, whether in a shared egg directory or a system setuptools. - - * Fixed a hole in sandboxing allowing builtin file to write outside of - the sandbox. - - ----- - 0.6.4 - ----- - - * Added the generation of `distribute_setup_3k.py` during the release. - This closes `Distribute #52 `_. - - * Added an upload_docs command to easily upload project documentation to - PyPI's https://pythonhosted.org. This close issue `Distribute #56 `_. - - * Fixed a bootstrap bug on the use_setuptools() API. - - ----- - 0.6.3 - ----- - - setuptools - ========== - - * Fixed a bunch of calls to file() that caused crashes on Python 3. - - bootstrapping - ============= - - * Fixed a bug in sorting that caused bootstrap to fail on Python 3. - - ----- - 0.6.2 - ----- - - setuptools - ========== - - * Added Python 3 support; see docs/python3.txt. - This closes `Old Setuptools #39 `_. - - * Added option to run 2to3 automatically when installing on Python 3. - This closes issue `Distribute #31 `_. - - * Fixed invalid usage of requirement.parse, that broke develop -d. - This closes `Old Setuptools #44 `_. - - * Fixed script launcher for 64-bit Windows. - This closes `Old Setuptools #2 `_. - - * KeyError when compiling extensions. - This closes `Old Setuptools #41 `_. - - bootstrapping - ============= - - * Fixed bootstrap not working on Windows. This closes issue `Distribute #49 `_. - - * Fixed 2.6 dependencies. This closes issue `Distribute #50 `_. - - * Make sure setuptools is patched when running through easy_install - This closes `Old Setuptools #40 `_. - - ----- - 0.6.1 - ----- - - setuptools - ========== - - * package_index.urlopen now catches BadStatusLine and malformed url errors. - This closes `Distribute #16 `_ and `Distribute #18 `_. - - * zip_ok is now False by default. This closes `Old Setuptools #33 `_. - - * Fixed invalid URL error catching. `Old Setuptools #20 `_. - - * Fixed invalid bootstraping with easy_install installation (`Distribute #40 `_). - Thanks to Florian Schulze for the help. - - * Removed buildout/bootstrap.py. A new repository will create a specific - bootstrap.py script. - - - bootstrapping - ============= - - * The boostrap process leave setuptools alone if detected in the system - and --root or --prefix is provided, but is not in the same location. - This closes `Distribute #10 `_. - - --- - 0.6 - --- - - setuptools - ========== - - * Packages required at build time where not fully present at install time. - This closes `Distribute #12 `_. - - * Protected against failures in tarfile extraction. This closes `Distribute #10 `_. - - * Made Jython api_tests.txt doctest compatible. This closes `Distribute #7 `_. - - * sandbox.py replaced builtin type file with builtin function open. This - closes `Distribute #6 `_. - - * Immediately close all file handles. This closes `Distribute #3 `_. - - * Added compatibility with Subversion 1.6. This references `Distribute #1 `_. - - pkg_resources - ============= - - * Avoid a call to /usr/bin/sw_vers on OSX and use the official platform API - instead. Based on a patch from ronaldoussoren. This closes issue `#5 `_. - - * Fixed a SandboxViolation for mkdir that could occur in certain cases. - This closes `Distribute #13 `_. - - * Allow to find_on_path on systems with tight permissions to fail gracefully. - This closes `Distribute #9 `_. - - * Corrected inconsistency between documentation and code of add_entry. - This closes `Distribute #8 `_. - - * Immediately close all file handles. This closes `Distribute #3 `_. - - easy_install - ============ - - * Immediately close all file handles. This closes `Distribute #3 `_. - - ----- - 0.6c9 - ----- - - * Fixed a missing files problem when using Windows source distributions on - non-Windows platforms, due to distutils not handling manifest file line - endings correctly. - - * Updated Pyrex support to work with Pyrex 0.9.6 and higher. - - * Minor changes for Jython compatibility, including skipping tests that can't - work on Jython. - - * Fixed not installing eggs in ``install_requires`` if they were also used for - ``setup_requires`` or ``tests_require``. - - * Fixed not fetching eggs in ``install_requires`` when running tests. - - * Allow ``ez_setup.use_setuptools()`` to upgrade existing setuptools - installations when called from a standalone ``setup.py``. - - * Added a warning if a namespace package is declared, but its parent package - is not also declared as a namespace. - - * Support Subversion 1.5 - - * Removed use of deprecated ``md5`` module if ``hashlib`` is available - - * Fixed ``bdist_wininst upload`` trying to upload the ``.exe`` twice - - * Fixed ``bdist_egg`` putting a ``native_libs.txt`` in the source package's - ``.egg-info``, when it should only be in the built egg's ``EGG-INFO``. - - * Ensure that _full_name is set on all shared libs before extensions are - checked for shared lib usage. (Fixes a bug in the experimental shared - library build support.) - - * Fix to allow unpacked eggs containing native libraries to fail more - gracefully under Google App Engine (with an ``ImportError`` loading the - C-based module, instead of getting a ``NameError``). - - ----- - 0.6c7 - ----- - - * Fixed ``distutils.filelist.findall()`` crashing on broken symlinks, and - ``egg_info`` command failing on new, uncommitted SVN directories. - - * Fix import problems with nested namespace packages installed via - ``--root`` or ``--single-version-externally-managed``, due to the - parent package not having the child package as an attribute. - - ----- - 0.6c6 - ----- - - * Added ``--egg-path`` option to ``develop`` command, allowing you to force - ``.egg-link`` files to use relative paths (allowing them to be shared across - platforms on a networked drive). - - * Fix not building binary RPMs correctly. - - * Fix "eggsecutables" (such as setuptools' own egg) only being runnable with - bash-compatible shells. - - * Fix ``#!`` parsing problems in Windows ``.exe`` script wrappers, when there - was whitespace inside a quoted argument or at the end of the ``#!`` line - (a regression introduced in 0.6c4). - - * Fix ``test`` command possibly failing if an older version of the project - being tested was installed on ``sys.path`` ahead of the test source - directory. - - * Fix ``find_packages()`` treating ``ez_setup`` and directories with ``.`` in - their names as packages. - - ----- - 0.6c5 - ----- - - * Fix uploaded ``bdist_rpm`` packages being described as ``bdist_egg`` - packages under Python versions less than 2.5. - - * Fix uploaded ``bdist_wininst`` packages being described as suitable for - "any" version by Python 2.5, even if a ``--target-version`` was specified. - - ----- - 0.6c4 - ----- - - * Overhauled Windows script wrapping to support ``bdist_wininst`` better. - Scripts installed with ``bdist_wininst`` will always use ``#!python.exe`` or - ``#!pythonw.exe`` as the executable name (even when built on non-Windows - platforms!), and the wrappers will look for the executable in the script's - parent directory (which should find the right version of Python). - - * Fix ``upload`` command not uploading files built by ``bdist_rpm`` or - ``bdist_wininst`` under Python 2.3 and 2.4. - - * Add support for "eggsecutable" headers: a ``#!/bin/sh`` script that is - prepended to an ``.egg`` file to allow it to be run as a script on Unix-ish - platforms. (This is mainly so that setuptools itself can have a single-file - installer on Unix, without doing multiple downloads, dealing with firewalls, - etc.) - - * Fix problem with empty revision numbers in Subversion 1.4 ``entries`` files - - * Use cross-platform relative paths in ``easy-install.pth`` when doing - ``develop`` and the source directory is a subdirectory of the installation - target directory. - - * Fix a problem installing eggs with a system packaging tool if the project - contained an implicit namespace package; for example if the ``setup()`` - listed a namespace package ``foo.bar`` without explicitly listing ``foo`` - as a namespace package. - - ----- - 0.6c3 - ----- - - * Fixed breakages caused by Subversion 1.4's new "working copy" format - - ----- - 0.6c2 - ----- - - * The ``ez_setup`` module displays the conflicting version of setuptools (and - its installation location) when a script requests a version that's not - available. - - * Running ``setup.py develop`` on a setuptools-using project will now install - setuptools if needed, instead of only downloading the egg. - - ----- - 0.6c1 - ----- - - * Fixed ``AttributeError`` when trying to download a ``setup_requires`` - dependency when a distribution lacks a ``dependency_links`` setting. - - * Made ``zip-safe`` and ``not-zip-safe`` flag files contain a single byte, so - as to play better with packaging tools that complain about zero-length - files. - - * Made ``setup.py develop`` respect the ``--no-deps`` option, which it - previously was ignoring. - - * Support ``extra_path`` option to ``setup()`` when ``install`` is run in - backward-compatibility mode. - - * Source distributions now always include a ``setup.cfg`` file that explicitly - sets ``egg_info`` options such that they produce an identical version number - to the source distribution's version number. (Previously, the default - version number could be different due to the use of ``--tag-date``, or if - the version was overridden on the command line that built the source - distribution.) - - ----- - 0.6b4 - ----- - - * Fix ``register`` not obeying name/version set by ``egg_info`` command, if - ``egg_info`` wasn't explicitly run first on the same command line. - - * Added ``--no-date`` and ``--no-svn-revision`` options to ``egg_info`` - command, to allow suppressing tags configured in ``setup.cfg``. - - * Fixed redundant warnings about missing ``README`` file(s); it should now - appear only if you are actually a source distribution. - - ----- - 0.6b3 - ----- - - * Fix ``bdist_egg`` not including files in subdirectories of ``.egg-info``. - - * Allow ``.py`` files found by the ``include_package_data`` option to be - automatically included. Remove duplicate data file matches if both - ``include_package_data`` and ``package_data`` are used to refer to the same - files. - - ----- - 0.6b1 - ----- - - * Strip ``module`` from the end of compiled extension modules when computing - the name of a ``.py`` loader/wrapper. (Python's import machinery ignores - this suffix when searching for an extension module.) - - ------ - 0.6a11 - ------ - - * Added ``test_loader`` keyword to support custom test loaders - - * Added ``setuptools.file_finders`` entry point group to allow implementing - revision control plugins. - - * Added ``--identity`` option to ``upload`` command. - - * Added ``dependency_links`` to allow specifying URLs for ``--find-links``. - - * Enhanced test loader to scan packages as well as modules, and call - ``additional_tests()`` if present to get non-unittest tests. - - * Support namespace packages in conjunction with system packagers, by omitting - the installation of any ``__init__.py`` files for namespace packages, and - adding a special ``.pth`` file to create a working package in - ``sys.modules``. - - * Made ``--single-version-externally-managed`` automatic when ``--root`` is - used, so that most system packagers won't require special support for - setuptools. - - * Fixed ``setup_requires``, ``tests_require``, etc. not using ``setup.cfg`` or - other configuration files for their option defaults when installing, and - also made the install use ``--multi-version`` mode so that the project - directory doesn't need to support .pth files. - - * ``MANIFEST.in`` is now forcibly closed when any errors occur while reading - it. Previously, the file could be left open and the actual error would be - masked by problems trying to remove the open file on Windows systems. - - ------ - 0.6a10 - ------ - - * Fixed the ``develop`` command ignoring ``--find-links``. - - ----- - 0.6a9 - ----- - - * The ``sdist`` command no longer uses the traditional ``MANIFEST`` file to - create source distributions. ``MANIFEST.in`` is still read and processed, - as are the standard defaults and pruning. But the manifest is built inside - the project's ``.egg-info`` directory as ``SOURCES.txt``, and it is rebuilt - every time the ``egg_info`` command is run. - - * Added the ``include_package_data`` keyword to ``setup()``, allowing you to - automatically include any package data listed in revision control or - ``MANIFEST.in`` - - * Added the ``exclude_package_data`` keyword to ``setup()``, allowing you to - trim back files included via the ``package_data`` and - ``include_package_data`` options. - - * Fixed ``--tag-svn-revision`` not working when run from a source - distribution. - - * Added warning for namespace packages with missing ``declare_namespace()`` - - * Added ``tests_require`` keyword to ``setup()``, so that e.g. packages - requiring ``nose`` to run unit tests can make this dependency optional - unless the ``test`` command is run. - - * Made all commands that use ``easy_install`` respect its configuration - options, as this was causing some problems with ``setup.py install``. - - * Added an ``unpack_directory()`` driver to ``setuptools.archive_util``, so - that you can process a directory tree through a processing filter as if it - were a zipfile or tarfile. - - * Added an internal ``install_egg_info`` command to use as part of old-style - ``install`` operations, that installs an ``.egg-info`` directory with the - package. - - * Added a ``--single-version-externally-managed`` option to the ``install`` - command so that you can more easily wrap a "flat" egg in a system package. - - * Enhanced ``bdist_rpm`` so that it installs single-version eggs that - don't rely on a ``.pth`` file. The ``--no-egg`` option has been removed, - since all RPMs are now built in a more backwards-compatible format. - - * Support full roundtrip translation of eggs to and from ``bdist_wininst`` - format. Running ``bdist_wininst`` on a setuptools-based package wraps the - egg in an .exe that will safely install it as an egg (i.e., with metadata - and entry-point wrapper scripts), and ``easy_install`` can turn the .exe - back into an ``.egg`` file or directory and install it as such. - - - ----- - 0.6a8 - ----- - - * Fixed some problems building extensions when Pyrex was installed, especially - with Python 2.4 and/or packages using SWIG. - - * Made ``develop`` command accept all the same options as ``easy_install``, - and use the ``easy_install`` command's configuration settings as defaults. - - * Made ``egg_info --tag-svn-revision`` fall back to extracting the revision - number from ``PKG-INFO`` in case it is being run on a source distribution of - a snapshot taken from a Subversion-based project. - - * Automatically detect ``.dll``, ``.so`` and ``.dylib`` files that are being - installed as data, adding them to ``native_libs.txt`` automatically. - - * Fixed some problems with fresh checkouts of projects that don't include - ``.egg-info/PKG-INFO`` under revision control and put the project's source - code directly in the project directory. If such a package had any - requirements that get processed before the ``egg_info`` command can be run, - the setup scripts would fail with a "Missing 'Version:' header and/or - PKG-INFO file" error, because the egg runtime interpreted the unbuilt - metadata in a directory on ``sys.path`` (i.e. the current directory) as - being a corrupted egg. Setuptools now monkeypatches the distribution - metadata cache to pretend that the egg has valid version information, until - it has a chance to make it actually be so (via the ``egg_info`` command). - - ----- - 0.6a5 - ----- - - * Fixed missing gui/cli .exe files in distribution. Fixed bugs in tests. - - ----- - 0.6a3 - ----- - - * Added ``gui_scripts`` entry point group to allow installing GUI scripts - on Windows and other platforms. (The special handling is only for Windows; - other platforms are treated the same as for ``console_scripts``.) - - ----- - 0.6a2 - ----- - - * Added ``console_scripts`` entry point group to allow installing scripts - without the need to create separate script files. On Windows, console - scripts get an ``.exe`` wrapper so you can just type their name. On other - platforms, the scripts are written without a file extension. - - ----- - 0.6a1 - ----- - - * Added support for building "old-style" RPMs that don't install an egg for - the target package, using a ``--no-egg`` option. - - * The ``build_ext`` command now works better when using the ``--inplace`` - option and multiple Python versions. It now makes sure that all extensions - match the current Python version, even if newer copies were built for a - different Python version. - - * The ``upload`` command no longer attaches an extra ``.zip`` when uploading - eggs, as PyPI now supports egg uploads without trickery. - - * The ``ez_setup`` script/module now displays a warning before downloading - the setuptools egg, and attempts to check the downloaded egg against an - internal MD5 checksum table. - - * Fixed the ``--tag-svn-revision`` option of ``egg_info`` not finding the - latest revision number; it was using the revision number of the directory - containing ``setup.py``, not the highest revision number in the project. - - * Added ``eager_resources`` setup argument - - * The ``sdist`` command now recognizes Subversion "deleted file" entries and - does not include them in source distributions. - - * ``setuptools`` now embeds itself more thoroughly into the distutils, so that - other distutils extensions (e.g. py2exe, py2app) will subclass setuptools' - versions of things, rather than the native distutils ones. - - * Added ``entry_points`` and ``setup_requires`` arguments to ``setup()``; - ``setup_requires`` allows you to automatically find and download packages - that are needed in order to *build* your project (as opposed to running it). - - * ``setuptools`` now finds its commands, ``setup()`` argument validators, and - metadata writers using entry points, so that they can be extended by - third-party packages. See `Creating distutils Extensions - `_ - for more details. - - * The vestigial ``depends`` command has been removed. It was never finished - or documented, and never would have worked without EasyInstall - which it - pre-dated and was never compatible with. - - ------ - 0.5a12 - ------ - - * The zip-safety scanner now checks for modules that might be used with - ``python -m``, and marks them as unsafe for zipping, since Python 2.4 can't - handle ``-m`` on zipped modules. - - ------ - 0.5a11 - ------ - - * Fix breakage of the "develop" command that was caused by the addition of - ``--always-unzip`` to the ``easy_install`` command. - - ----- - 0.5a9 - ----- - - * Include ``svn:externals`` directories in source distributions as well as - normal subversion-controlled files and directories. - - * Added ``exclude=patternlist`` option to ``setuptools.find_packages()`` - - * Changed --tag-svn-revision to include an "r" in front of the revision number - for better readability. - - * Added ability to build eggs without including source files (except for any - scripts, of course), using the ``--exclude-source-files`` option to - ``bdist_egg``. - - * ``setup.py install`` now automatically detects when an "unmanaged" package - or module is going to be on ``sys.path`` ahead of a package being installed, - thereby preventing the newer version from being imported. If this occurs, - a warning message is output to ``sys.stderr``, but installation proceeds - anyway. The warning message informs the user what files or directories - need deleting, and advises them they can also use EasyInstall (with the - ``--delete-conflicting`` option) to do it automatically. - - * The ``egg_info`` command now adds a ``top_level.txt`` file to the metadata - directory that lists all top-level modules and packages in the distribution. - This is used by the ``easy_install`` command to find possibly-conflicting - "unmanaged" packages when installing the distribution. - - * Added ``zip_safe`` and ``namespace_packages`` arguments to ``setup()``. - Added package analysis to determine zip-safety if the ``zip_safe`` flag - is not given, and advise the author regarding what code might need changing. - - * Fixed the swapped ``-d`` and ``-b`` options of ``bdist_egg``. - - ----- - 0.5a8 - ----- - - * The "egg_info" command now always sets the distribution metadata to "safe" - forms of the distribution name and version, so that distribution files will - be generated with parseable names (i.e., ones that don't include '-' in the - name or version). Also, this means that if you use the various ``--tag`` - options of "egg_info", any distributions generated will use the tags in the - version, not just egg distributions. - - * Added support for defining command aliases in distutils configuration files, - under the "[aliases]" section. To prevent recursion and to allow aliases to - call the command of the same name, a given alias can be expanded only once - per command-line invocation. You can define new aliases with the "alias" - command, either for the local, global, or per-user configuration. - - * Added "rotate" command to delete old distribution files, given a set of - patterns to match and the number of files to keep. (Keeps the most - recently-modified distribution files matching each pattern.) - - * Added "saveopts" command that saves all command-line options for the current - invocation to the local, global, or per-user configuration file. Useful for - setting defaults without having to hand-edit a configuration file. - - * Added a "setopt" command that sets a single option in a specified distutils - configuration file. - - ----- - 0.5a7 - ----- - - * Added "upload" support for egg and source distributions, including a bug - fix for "upload" and a temporary workaround for lack of .egg support in - PyPI. - - ----- - 0.5a6 - ----- - - * Beefed up the "sdist" command so that if you don't have a MANIFEST.in, it - will include all files under revision control (CVS or Subversion) in the - current directory, and it will regenerate the list every time you create a - source distribution, not just when you tell it to. This should make the - default "do what you mean" more often than the distutils' default behavior - did, while still retaining the old behavior in the presence of MANIFEST.in. - - * Fixed the "develop" command always updating .pth files, even if you - specified ``-n`` or ``--dry-run``. - - * Slightly changed the format of the generated version when you use - ``--tag-build`` on the "egg_info" command, so that you can make tagged - revisions compare *lower* than the version specified in setup.py (e.g. by - using ``--tag-build=dev``). - - ----- - 0.5a5 - ----- - - * Added ``develop`` command to ``setuptools``-based packages. This command - installs an ``.egg-link`` pointing to the package's source directory, and - script wrappers that ``execfile()`` the source versions of the package's - scripts. This lets you put your development checkout(s) on sys.path without - having to actually install them. (To uninstall the link, use - use ``setup.py develop --uninstall``.) - - * Added ``egg_info`` command to ``setuptools``-based packages. This command - just creates or updates the "projectname.egg-info" directory, without - building an egg. (It's used by the ``bdist_egg``, ``test``, and ``develop`` - commands.) - - * Enhanced the ``test`` command so that it doesn't install the package, but - instead builds any C extensions in-place, updates the ``.egg-info`` - metadata, adds the source directory to ``sys.path``, and runs the tests - directly on the source. This avoids an "unmanaged" installation of the - package to ``site-packages`` or elsewhere. - - * Made ``easy_install`` a standard ``setuptools`` command, moving it from - the ``easy_install`` module to ``setuptools.command.easy_install``. Note - that if you were importing or extending it, you must now change your imports - accordingly. ``easy_install.py`` is still installed as a script, but not as - a module. - - ----- - 0.5a4 - ----- - - * Setup scripts using setuptools can now list their dependencies directly in - the setup.py file, without having to manually create a ``depends.txt`` file. - The ``install_requires`` and ``extras_require`` arguments to ``setup()`` - are used to create a dependencies file automatically. If you are manually - creating ``depends.txt`` right now, please switch to using these setup - arguments as soon as practical, because ``depends.txt`` support will be - removed in the 0.6 release cycle. For documentation on the new arguments, - see the ``setuptools.dist.Distribution`` class. - - * Setup scripts using setuptools now always install using ``easy_install`` - internally, for ease of uninstallation and upgrading. - - ----- - 0.5a1 - ----- - - * Added support for "self-installation" bootstrapping. Packages can now - include ``ez_setup.py`` in their source distribution, and add the following - to their ``setup.py``, in order to automatically bootstrap installation of - setuptools as part of their setup process:: - - from ez_setup import use_setuptools - use_setuptools() - - from setuptools import setup - # etc... - - ----- - 0.4a2 - ----- - - * Added ``ez_setup.py`` installer/bootstrap script to make initial setuptools - installation easier, and to allow distributions using setuptools to avoid - having to include setuptools in their source distribution. - - * All downloads are now managed by the ``PackageIndex`` class (which is now - subclassable and replaceable), so that embedders can more easily override - download logic, give download progress reports, etc. The class has also - been moved to the new ``setuptools.package_index`` module. - - * The ``Installer`` class no longer handles downloading, manages a temporary - directory, or tracks the ``zip_ok`` option. Downloading is now handled - by ``PackageIndex``, and ``Installer`` has become an ``easy_install`` - command class based on ``setuptools.Command``. - - * There is a new ``setuptools.sandbox.run_setup()`` API to invoke a setup - script in a directory sandbox, and a new ``setuptools.archive_util`` module - with an ``unpack_archive()`` API. These were split out of EasyInstall to - allow reuse by other tools and applications. - - * ``setuptools.Command`` now supports reinitializing commands using keyword - arguments to set/reset options. Also, ``Command`` subclasses can now set - their ``command_consumes_arguments`` attribute to ``True`` in order to - receive an ``args`` option containing the rest of the command line. - - ----- - 0.3a2 - ----- - - * Added new options to ``bdist_egg`` to allow tagging the egg's version number - with a subversion revision number, the current date, or an explicit tag - value. Run ``setup.py bdist_egg --help`` to get more information. - - * Misc. bug fixes - - ----- - 0.3a1 - ----- - - * Initial release. - -Keywords: CPAN PyPI distutils eggs package management -Platform: UNKNOWN -Classifier: Development Status :: 5 - Production/Stable -Classifier: Intended Audience :: Developers -Classifier: License :: OSI Approved :: Python Software Foundation License -Classifier: License :: OSI Approved :: Zope Public License -Classifier: Operating System :: OS Independent -Classifier: Programming Language :: Python :: 2.6 -Classifier: Programming Language :: Python :: 2.7 -Classifier: Programming Language :: Python :: 3 -Classifier: Programming Language :: Python :: 3.1 -Classifier: Programming Language :: Python :: 3.2 -Classifier: Programming Language :: Python :: 3.3 -Classifier: Topic :: Software Development :: Libraries :: Python Modules -Classifier: Topic :: System :: Archiving :: Packaging -Classifier: Topic :: System :: Systems Administration -Classifier: Topic :: Utilities diff --git a/libs/setuptools-2.2/setuptools.egg-info/SOURCES.txt b/libs/setuptools-2.2/setuptools.egg-info/SOURCES.txt deleted file mode 100644 index 78f8361..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/SOURCES.txt +++ /dev/null @@ -1,153 +0,0 @@ -CHANGES (links).txt -CHANGES.txt -CONTRIBUTORS.txt -DEVGUIDE.txt -MANIFEST.in -README.txt -easy_install.py -ez_setup.py -launcher.c -pkg_resources.py -release.py -setup.cfg -setup.py -_markerlib/__init__.py -_markerlib/markers.py -docs/Makefile -docs/conf.py -docs/development.txt -docs/easy_install.txt -docs/formats.txt -docs/index.txt -docs/merge-faq.txt -docs/merge.txt -docs/pkg_resources.txt -docs/python3.txt -docs/releases.txt -docs/roadmap.txt -docs/setuptools.txt -docs/using.txt -docs/_templates/indexsidebar.html -docs/_theme/nature/theme.conf -docs/_theme/nature/static/nature.css_t -docs/_theme/nature/static/pygments.css -setuptools/__init__.py -setuptools/archive_util.py -setuptools/cli-32.exe -setuptools/cli-64.exe -setuptools/cli-arm-32.exe -setuptools/cli.exe -setuptools/compat.py -setuptools/depends.py -setuptools/dist.py -setuptools/extension.py -setuptools/gui-32.exe -setuptools/gui-64.exe -setuptools/gui-arm-32.exe -setuptools/gui.exe -setuptools/lib2to3_ex.py -setuptools/package_index.py -setuptools/py26compat.py -setuptools/py27compat.py -setuptools/py31compat.py -setuptools/sandbox.py -setuptools/script template (dev).py -setuptools/script template.py -setuptools/site-patch.py -setuptools/ssl_support.py -setuptools/svn_utils.py -setuptools/version.py -setuptools.egg-info/PKG-INFO -setuptools.egg-info/SOURCES.txt -setuptools.egg-info/dependency_links.txt -setuptools.egg-info/entry_points.txt -setuptools.egg-info/requires.txt -setuptools.egg-info/requires.txt.orig -setuptools.egg-info/top_level.txt -setuptools.egg-info/zip-safe -setuptools/command/__init__.py -setuptools/command/alias.py -setuptools/command/bdist_egg.py -setuptools/command/bdist_rpm.py -setuptools/command/bdist_wininst.py -setuptools/command/build_ext.py -setuptools/command/build_py.py -setuptools/command/develop.py -setuptools/command/easy_install.py -setuptools/command/egg_info.py -setuptools/command/install.py -setuptools/command/install_egg_info.py -setuptools/command/install_lib.py -setuptools/command/install_scripts.py -setuptools/command/launcher manifest.xml -setuptools/command/register.py -setuptools/command/rotate.py -setuptools/command/saveopts.py -setuptools/command/sdist.py -setuptools/command/setopt.py -setuptools/command/test.py -setuptools/command/upload_docs.py -setuptools/tests/__init__.py -setuptools/tests/doctest.py -setuptools/tests/entries-v10 -setuptools/tests/environment.py -setuptools/tests/py26compat.py -setuptools/tests/script-with-bom.py -setuptools/tests/server.py -setuptools/tests/test_bdist_egg.py -setuptools/tests/test_build_ext.py -setuptools/tests/test_develop.py -setuptools/tests/test_dist_info.py -setuptools/tests/test_easy_install.py -setuptools/tests/test_egg_info.py -setuptools/tests/test_markerlib.py -setuptools/tests/test_packageindex.py -setuptools/tests/test_resources.py -setuptools/tests/test_sandbox.py -setuptools/tests/test_sdist.py -setuptools/tests/test_svn.py -setuptools/tests/test_test.py -setuptools/tests/test_upload_docs.py -setuptools/tests/win_script_wrapper.txt -setuptools/tests/indexes/test_links_priority/external.html -setuptools/tests/indexes/test_links_priority/simple/foobar/index.html -setuptools/tests/svn_data/dummy.zip -setuptools/tests/svn_data/dummy13.zip -setuptools/tests/svn_data/dummy14.zip -setuptools/tests/svn_data/dummy15.zip -setuptools/tests/svn_data/dummy16.zip -setuptools/tests/svn_data/dummy17.zip -setuptools/tests/svn_data/dummy18.zip -setuptools/tests/svn_data/svn13_example.zip -setuptools/tests/svn_data/svn13_ext_list.txt -setuptools/tests/svn_data/svn13_ext_list.xml -setuptools/tests/svn_data/svn13_info.xml -setuptools/tests/svn_data/svn14_example.zip -setuptools/tests/svn_data/svn14_ext_list.txt -setuptools/tests/svn_data/svn14_ext_list.xml -setuptools/tests/svn_data/svn14_info.xml -setuptools/tests/svn_data/svn15_example.zip -setuptools/tests/svn_data/svn15_ext_list.txt -setuptools/tests/svn_data/svn15_ext_list.xml -setuptools/tests/svn_data/svn15_info.xml -setuptools/tests/svn_data/svn16_example.zip -setuptools/tests/svn_data/svn16_ext_list.txt -setuptools/tests/svn_data/svn16_ext_list.xml -setuptools/tests/svn_data/svn16_info.xml -setuptools/tests/svn_data/svn17_example.zip -setuptools/tests/svn_data/svn17_ext_list.txt -setuptools/tests/svn_data/svn17_ext_list.xml -setuptools/tests/svn_data/svn17_info.xml -setuptools/tests/svn_data/svn18_example.zip -setuptools/tests/svn_data/svn18_ext_list.txt -setuptools/tests/svn_data/svn18_ext_list.xml -setuptools/tests/svn_data/svn18_info.xml -tests/api_tests.txt -tests/manual_test.py -tests/test_ez_setup.py -tests/test_pkg_resources.py -tests/shlib_test/hello.c -tests/shlib_test/hello.pyx -tests/shlib_test/hellolib.c -tests/shlib_test/setup.py -tests/shlib_test/test_hello.py \ No newline at end of file diff --git a/libs/setuptools-2.2/setuptools.egg-info/dependency_links.txt b/libs/setuptools-2.2/setuptools.egg-info/dependency_links.txt deleted file mode 100644 index b1c9a2c..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/dependency_links.txt +++ /dev/null @@ -1,2 +0,0 @@ -https://pypi.python.org/packages/source/c/certifi/certifi-0.0.8.tar.gz#md5=dc5f5e7f0b5fc08d27654b17daa6ecec -https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.1.zip#md5=2f9accbebe8f7b4c06ac7aa83879b81c diff --git a/libs/setuptools-2.2/setuptools.egg-info/entry_points.txt b/libs/setuptools-2.2/setuptools.egg-info/entry_points.txt deleted file mode 100644 index 7c33ce1..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/entry_points.txt +++ /dev/null @@ -1,62 +0,0 @@ -[egg_info.writers] -eager_resources.txt = setuptools.command.egg_info:overwrite_arg -requires.txt = setuptools.command.egg_info:write_requirements -dependency_links.txt = setuptools.command.egg_info:overwrite_arg -top_level.txt = setuptools.command.egg_info:write_toplevel_names -PKG-INFO = setuptools.command.egg_info:write_pkg_info -entry_points.txt = setuptools.command.egg_info:write_entries -namespace_packages.txt = setuptools.command.egg_info:overwrite_arg -depends.txt = setuptools.command.egg_info:warn_depends_obsolete - -[distutils.commands] -setopt = setuptools.command.setopt:setopt -install_egg_info = setuptools.command.install_egg_info:install_egg_info -register = setuptools.command.register:register -develop = setuptools.command.develop:develop -install_lib = setuptools.command.install_lib:install_lib -bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm -build_py = setuptools.command.build_py:build_py -sdist = setuptools.command.sdist:sdist -test = setuptools.command.test:test -alias = setuptools.command.alias:alias -easy_install = setuptools.command.easy_install:easy_install -install = setuptools.command.install:install -build_ext = setuptools.command.build_ext:build_ext -egg_info = setuptools.command.egg_info:egg_info -saveopts = setuptools.command.saveopts:saveopts -bdist_egg = setuptools.command.bdist_egg:bdist_egg -bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst -install_scripts = setuptools.command.install_scripts:install_scripts -upload_docs = setuptools.command.upload_docs:upload_docs -rotate = setuptools.command.rotate:rotate - -[distutils.setup_keywords] -namespace_packages = setuptools.dist:check_nsp -test_suite = setuptools.dist:check_test_suite -use_2to3_fixers = setuptools.dist:assert_string_list -include_package_data = setuptools.dist:assert_bool -packages = setuptools.dist:check_packages -tests_require = setuptools.dist:check_requirements -dependency_links = setuptools.dist:assert_string_list -use_2to3_exclude_fixers = setuptools.dist:assert_string_list -extras_require = setuptools.dist:check_extras -entry_points = setuptools.dist:check_entry_points -test_loader = setuptools.dist:check_importable -exclude_package_data = setuptools.dist:check_package_data -install_requires = setuptools.dist:check_requirements -eager_resources = setuptools.dist:assert_string_list -convert_2to3_doctests = setuptools.dist:assert_string_list -zip_safe = setuptools.dist:assert_bool -package_data = setuptools.dist:check_package_data -use_2to3 = setuptools.dist:assert_bool - -[setuptools.file_finders] -svn_cvs = setuptools.command.sdist:_default_revctrl - -[console_scripts] -easy_install-3.3 = setuptools.command.easy_install:main -easy_install = setuptools.command.easy_install:main - -[setuptools.installation] -eggsecutable = setuptools.command.easy_install:bootstrap - diff --git a/libs/setuptools-2.2/setuptools.egg-info/requires.txt b/libs/setuptools-2.2/setuptools.egg-info/requires.txt deleted file mode 100644 index 4fd464d..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/requires.txt +++ /dev/null @@ -1,7 +0,0 @@ - - -[ssl:sys_platform=='win32'] -wincertstore==0.1 - -[certs] -certifi==0.0.8 \ No newline at end of file diff --git a/libs/setuptools-2.2/setuptools.egg-info/requires.txt.orig b/libs/setuptools-2.2/setuptools.egg-info/requires.txt.orig deleted file mode 100644 index 4fd464d..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/requires.txt.orig +++ /dev/null @@ -1,7 +0,0 @@ - - -[ssl:sys_platform=='win32'] -wincertstore==0.1 - -[certs] -certifi==0.0.8 \ No newline at end of file diff --git a/libs/setuptools-2.2/setuptools.egg-info/top_level.txt b/libs/setuptools-2.2/setuptools.egg-info/top_level.txt deleted file mode 100644 index 29be8bc..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/top_level.txt +++ /dev/null @@ -1,4 +0,0 @@ -pkg_resources -easy_install -_markerlib -setuptools diff --git a/libs/setuptools-2.2/setuptools.egg-info/zip-safe b/libs/setuptools-2.2/setuptools.egg-info/zip-safe deleted file mode 100644 index 8b13789..0000000 --- a/libs/setuptools-2.2/setuptools.egg-info/zip-safe +++ /dev/null @@ -1 +0,0 @@ - diff --git a/libs/setuptools-2.2/setuptools/__init__.py b/libs/setuptools-2.2/setuptools/__init__.py deleted file mode 100644 index fc9b7b9..0000000 --- a/libs/setuptools-2.2/setuptools/__init__.py +++ /dev/null @@ -1,98 +0,0 @@ -"""Extensions to the 'distutils' for large or complex distributions""" - -import os -import sys -import distutils.core -import distutils.filelist -from distutils.core import Command as _Command -from distutils.util import convert_path - -import setuptools.version -from setuptools.extension import Extension -from setuptools.dist import Distribution, Feature, _get_unpatched -from setuptools.depends import Require - -__all__ = [ - 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', - 'find_packages' -] - -__version__ = setuptools.version.__version__ - -bootstrap_install_from = None - -# If we run 2to3 on .py files, should we also convert docstrings? -# Default: yes; assume that we can detect doctests reliably -run_2to3_on_doctests = True -# Standard package names for fixer packages -lib2to3_fixer_packages = ['lib2to3.fixes'] - -def find_packages(where='.', exclude=()): - """Return a list all Python packages found within directory 'where' - - 'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it - will be converted to the appropriate local path syntax. 'exclude' is a - sequence of package names to exclude; '*' can be used as a wildcard in the - names, such that 'foo.*' will exclude all subpackages of 'foo' (but not - 'foo' itself). - """ - out = [] - stack=[(convert_path(where), '')] - while stack: - where,prefix = stack.pop(0) - for name in os.listdir(where): - fn = os.path.join(where,name) - looks_like_package = ( - '.' not in name - and os.path.isdir(fn) - and os.path.isfile(os.path.join(fn, '__init__.py')) - ) - if looks_like_package: - out.append(prefix+name) - stack.append((fn, prefix+name+'.')) - for pat in list(exclude)+['ez_setup']: - from fnmatch import fnmatchcase - out = [item for item in out if not fnmatchcase(item,pat)] - return out - -setup = distutils.core.setup - -_Command = _get_unpatched(_Command) - -class Command(_Command): - __doc__ = _Command.__doc__ - - command_consumes_arguments = False - - def __init__(self, dist, **kw): - # Add support for keyword arguments - _Command.__init__(self,dist) - for k,v in kw.items(): - setattr(self,k,v) - - def reinitialize_command(self, command, reinit_subcommands=0, **kw): - cmd = _Command.reinitialize_command(self, command, reinit_subcommands) - for k,v in kw.items(): - setattr(cmd,k,v) # update command with keywords - return cmd - -distutils.core.Command = Command # we can't patch distutils.cmd, alas - -def findall(dir = os.curdir): - """Find all files under 'dir' and return the list of full filenames - (relative to 'dir'). - """ - all_files = [] - for base, dirs, files in os.walk(dir): - if base==os.curdir or base.startswith(os.curdir+os.sep): - base = base[2:] - if base: - files = [os.path.join(base, f) for f in files] - all_files.extend(filter(os.path.isfile, files)) - return all_files - -distutils.filelist.findall = findall # fix findall bug in distutils. - -# sys.dont_write_bytecode was introduced in Python 2.6. -_dont_write_bytecode = getattr(sys, 'dont_write_bytecode', - bool(os.environ.get("PYTHONDONTWRITEBYTECODE"))) diff --git a/libs/setuptools-2.2/setuptools/__pycache__/__init__.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/__init__.cpython-33.pyc deleted file mode 100644 index f67244a..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/__init__.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/archive_util.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/archive_util.cpython-33.pyc deleted file mode 100644 index 783eb94..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/archive_util.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/compat.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/compat.cpython-33.pyc deleted file mode 100644 index a977643..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/compat.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/depends.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/depends.cpython-33.pyc deleted file mode 100644 index f81df2c..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/depends.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/dist.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/dist.cpython-33.pyc deleted file mode 100644 index dc8e16c..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/dist.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/extension.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/extension.cpython-33.pyc deleted file mode 100644 index 499671a..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/extension.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/lib2to3_ex.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/lib2to3_ex.cpython-33.pyc deleted file mode 100644 index d602159..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/lib2to3_ex.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/package_index.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/package_index.cpython-33.pyc deleted file mode 100644 index 3b22758..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/package_index.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/py26compat.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/py26compat.cpython-33.pyc deleted file mode 100644 index e676ad1..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/py26compat.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/py27compat.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/py27compat.cpython-33.pyc deleted file mode 100644 index d55373b..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/py27compat.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/py31compat.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/py31compat.cpython-33.pyc deleted file mode 100644 index 572c811..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/py31compat.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/sandbox.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/sandbox.cpython-33.pyc deleted file mode 100644 index dd375f0..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/sandbox.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/ssl_support.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/ssl_support.cpython-33.pyc deleted file mode 100644 index 1d6d133..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/ssl_support.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/svn_utils.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/svn_utils.cpython-33.pyc deleted file mode 100644 index d324a82..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/svn_utils.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/__pycache__/version.cpython-33.pyc b/libs/setuptools-2.2/setuptools/__pycache__/version.cpython-33.pyc deleted file mode 100644 index c2f034c..0000000 Binary files a/libs/setuptools-2.2/setuptools/__pycache__/version.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/archive_util.py b/libs/setuptools-2.2/setuptools/archive_util.py deleted file mode 100644 index 1109f34..0000000 --- a/libs/setuptools-2.2/setuptools/archive_util.py +++ /dev/null @@ -1,210 +0,0 @@ -"""Utilities for extracting common archive formats""" - - -__all__ = [ - "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", - "UnrecognizedFormat", "extraction_drivers", "unpack_directory", -] - -import zipfile, tarfile, os, shutil, posixpath -from pkg_resources import ensure_directory -from distutils.errors import DistutilsError - -class UnrecognizedFormat(DistutilsError): - """Couldn't recognize the archive type""" - -def default_filter(src,dst): - """The default progress/filter callback; returns True for all files""" - return dst - - - - - - - - - - - - - - - - - - - - - - - -def unpack_archive(filename, extract_dir, progress_filter=default_filter, - drivers=None -): - """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` - - `progress_filter` is a function taking two arguments: a source path - internal to the archive ('/'-separated), and a filesystem path where it - will be extracted. The callback must return the desired extract path - (which may be the same as the one passed in), or else ``None`` to skip - that file or directory. The callback can thus be used to report on the - progress of the extraction, as well as to filter the items extracted or - alter their extraction paths. - - `drivers`, if supplied, must be a non-empty sequence of functions with the - same signature as this function (minus the `drivers` argument), that raise - ``UnrecognizedFormat`` if they do not support extracting the designated - archive type. The `drivers` are tried in sequence until one is found that - does not raise an error, or until all are exhausted (in which case - ``UnrecognizedFormat`` is raised). If you do not supply a sequence of - drivers, the module's ``extraction_drivers`` constant will be used, which - means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that - order. - """ - for driver in drivers or extraction_drivers: - try: - driver(filename, extract_dir, progress_filter) - except UnrecognizedFormat: - continue - else: - return - else: - raise UnrecognizedFormat( - "Not a recognized archive type: %s" % filename - ) - - - - - - - -def unpack_directory(filename, extract_dir, progress_filter=default_filter): - """"Unpack" a directory, using the same interface as for archives - - Raises ``UnrecognizedFormat`` if `filename` is not a directory - """ - if not os.path.isdir(filename): - raise UnrecognizedFormat("%s is not a directory" % (filename,)) - - paths = {filename:('',extract_dir)} - for base, dirs, files in os.walk(filename): - src,dst = paths[base] - for d in dirs: - paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d) - for f in files: - name = src+f - target = os.path.join(dst,f) - target = progress_filter(src+f, target) - if not target: - continue # skip non-files - ensure_directory(target) - f = os.path.join(base,f) - shutil.copyfile(f, target) - shutil.copystat(f, target) - - - - - - - - - - - - - - - - - - -def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): - """Unpack zip `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined - by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - - if not zipfile.is_zipfile(filename): - raise UnrecognizedFormat("%s is not a zip file" % (filename,)) - - z = zipfile.ZipFile(filename) - try: - for info in z.infolist(): - name = info.filename - - # don't extract absolute paths or ones with .. in them - if name.startswith('/') or '..' in name.split('/'): - continue - - target = os.path.join(extract_dir, *name.split('/')) - target = progress_filter(name, target) - if not target: - continue - if name.endswith('/'): - # directory - ensure_directory(target) - else: - # file - ensure_directory(target) - data = z.read(info.filename) - f = open(target,'wb') - try: - f.write(data) - finally: - f.close() - del data - unix_attributes = info.external_attr >> 16 - if unix_attributes: - os.chmod(target, unix_attributes) - finally: - z.close() - - -def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): - """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` - - Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined - by ``tarfile.open()``). See ``unpack_archive()`` for an explanation - of the `progress_filter` argument. - """ - try: - tarobj = tarfile.open(filename) - except tarfile.TarError: - raise UnrecognizedFormat( - "%s is not a compressed or uncompressed tar file" % (filename,) - ) - try: - tarobj.chown = lambda *args: None # don't do any chowning! - for member in tarobj: - name = member.name - # don't extract absolute paths or ones with .. in them - if not name.startswith('/') and '..' not in name.split('/'): - prelim_dst = os.path.join(extract_dir, *name.split('/')) - - # resolve any links and to extract the link targets as normal files - while member is not None and (member.islnk() or member.issym()): - linkpath = member.linkname - if member.issym(): - linkpath = posixpath.join(posixpath.dirname(member.name), linkpath) - linkpath = posixpath.normpath(linkpath) - member = tarobj._getmember(linkpath) - - if member is not None and (member.isfile() or member.isdir()): - final_dst = progress_filter(name, prelim_dst) - if final_dst: - if final_dst.endswith(os.sep): - final_dst = final_dst[:-1] - try: - tarobj._extract_member(member, final_dst) # XXX Ugh - except tarfile.ExtractError: - pass # chown/chmod/mkfifo/mknode/makedev failed - return True - finally: - tarobj.close() - -extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/libs/setuptools-2.2/setuptools/cli-32.exe b/libs/setuptools-2.2/setuptools/cli-32.exe deleted file mode 100644 index b1487b7..0000000 Binary files a/libs/setuptools-2.2/setuptools/cli-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/cli-64.exe b/libs/setuptools-2.2/setuptools/cli-64.exe deleted file mode 100644 index 675e6bf..0000000 Binary files a/libs/setuptools-2.2/setuptools/cli-64.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/cli-arm-32.exe b/libs/setuptools-2.2/setuptools/cli-arm-32.exe deleted file mode 100644 index 2f40402..0000000 Binary files a/libs/setuptools-2.2/setuptools/cli-arm-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/cli.exe b/libs/setuptools-2.2/setuptools/cli.exe deleted file mode 100644 index b1487b7..0000000 Binary files a/libs/setuptools-2.2/setuptools/cli.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__init__.py b/libs/setuptools-2.2/setuptools/command/__init__.py deleted file mode 100644 index 29c9d75..0000000 --- a/libs/setuptools-2.2/setuptools/command/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -__all__ = [ - 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', - 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', - 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', - 'register', 'bdist_wininst', 'upload_docs', -] - -from setuptools.command import install_scripts -import sys - -from distutils.command.bdist import bdist - -if 'egg' not in bdist.format_commands: - bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") - bdist.format_commands.append('egg') - -del bdist, sys diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/__init__.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/__init__.cpython-33.pyc deleted file mode 100644 index 86eec82..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/__init__.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/bdist_egg.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/bdist_egg.cpython-33.pyc deleted file mode 100644 index a9c44d2..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/bdist_egg.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/build_py.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/build_py.cpython-33.pyc deleted file mode 100644 index 2fca250..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/build_py.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/easy_install.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/easy_install.cpython-33.pyc deleted file mode 100644 index 22238f8..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/easy_install.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/egg_info.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/egg_info.cpython-33.pyc deleted file mode 100644 index af3a23b..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/egg_info.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/install.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/install.cpython-33.pyc deleted file mode 100644 index c481ee0..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/install.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/install_lib.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/install_lib.cpython-33.pyc deleted file mode 100644 index 0b6c81c..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/install_lib.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/install_scripts.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/install_scripts.cpython-33.pyc deleted file mode 100644 index 117987e..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/install_scripts.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/sdist.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/sdist.cpython-33.pyc deleted file mode 100644 index e270087..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/sdist.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/setopt.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/setopt.cpython-33.pyc deleted file mode 100644 index b17ff83..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/setopt.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/__pycache__/test.cpython-33.pyc b/libs/setuptools-2.2/setuptools/command/__pycache__/test.cpython-33.pyc deleted file mode 100644 index 6bacb73..0000000 Binary files a/libs/setuptools-2.2/setuptools/command/__pycache__/test.cpython-33.pyc and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/command/alias.py b/libs/setuptools-2.2/setuptools/command/alias.py deleted file mode 100644 index 52384e1..0000000 --- a/libs/setuptools-2.2/setuptools/command/alias.py +++ /dev/null @@ -1,82 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * -from setuptools.command.setopt import edit_config, option_base, config_file - -def shquote(arg): - """Quote an argument for later parsing by shlex.split()""" - for c in '"', "'", "\\", "#": - if c in arg: return repr(arg) - if arg.split() != [arg]: - return repr(arg) - return arg - - -class alias(option_base): - """Define a shortcut that invokes one or more commands""" - - description = "define a shortcut to invoke one or more commands" - command_consumes_arguments = True - - user_options = [ - ('remove', 'r', 'remove (unset) the alias'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.args = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.remove and len(self.args) != 1: - raise DistutilsOptionError( - "Must specify exactly one argument (the alias name) when " - "using --remove" - ) - - def run(self): - aliases = self.distribution.get_option_dict('aliases') - - if not self.args: - print("Command Aliases") - print("---------------") - for alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - - elif len(self.args)==1: - alias, = self.args - if self.remove: - command = None - elif alias in aliases: - print("setup.py alias", format_alias(alias, aliases)) - return - else: - print("No alias definition found for %r" % alias) - return - else: - alias = self.args[0] - command = ' '.join(map(shquote,self.args[1:])) - - edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run) - - -def format_alias(name, aliases): - source, command = aliases[name] - if source == config_file('global'): - source = '--global-config ' - elif source == config_file('user'): - source = '--user-config ' - elif source == config_file('local'): - source = '' - else: - source = '--filename=%r' % source - return source+name+' '+command - - - diff --git a/libs/setuptools-2.2/setuptools/command/bdist_egg.py b/libs/setuptools-2.2/setuptools/command/bdist_egg.py deleted file mode 100644 index c577615..0000000 --- a/libs/setuptools-2.2/setuptools/command/bdist_egg.py +++ /dev/null @@ -1,553 +0,0 @@ -"""setuptools.command.bdist_egg - -Build .egg distributions""" - -# This module should be kept compatible with Python 2.3 -import sys, os, marshal -from setuptools import Command -from distutils.dir_util import remove_tree, mkpath -try: - # Python 2.7 or >=3.2 - from sysconfig import get_path, get_python_version - def _get_purelib(): - return get_path("purelib") -except ImportError: - from distutils.sysconfig import get_python_lib, get_python_version - def _get_purelib(): - return get_python_lib(False) - -from distutils import log -from distutils.errors import DistutilsSetupError -from pkg_resources import get_build_platform, Distribution, ensure_directory -from pkg_resources import EntryPoint -from types import CodeType -from setuptools.compat import basestring, next -from setuptools.extension import Library - -def strip_module(filename): - if '.' in filename: - filename = os.path.splitext(filename)[0] - if filename.endswith('module'): - filename = filename[:-6] - return filename - -def write_stub(resource, pyfile): - f = open(pyfile,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __loader__, __file__", - " import sys, pkg_resources, imp", - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % resource, - " __loader__ = None; del __bootstrap__, __loader__", - " imp.load_dynamic(__name__,__file__)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - -# stub __init__.py for packages distributed without one -NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)' - -class bdist_egg(Command): - - description = "create an \"egg\" distribution" - - user_options = [ - ('bdist-dir=', 'b', - "temporary directory for creating the distribution"), - ('plat-name=', 'p', - "platform name to embed in generated filenames " - "(default: %s)" % get_build_platform()), - ('exclude-source-files', None, - "remove all .py files from the generated egg"), - ('keep-temp', 'k', - "keep the pseudo-installation tree around after " + - "creating the distribution archive"), - ('dist-dir=', 'd', - "directory to put final built distributions in"), - ('skip-build', None, - "skip rebuilding everything (for testing/debugging)"), - ] - - boolean_options = [ - 'keep-temp', 'skip-build', 'exclude-source-files' - ] - - - - - - - - - - - - - - - - - - def initialize_options (self): - self.bdist_dir = None - self.plat_name = None - self.keep_temp = 0 - self.dist_dir = None - self.skip_build = 0 - self.egg_output = None - self.exclude_source_files = None - - - def finalize_options(self): - ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") - self.egg_info = ei_cmd.egg_info - - if self.bdist_dir is None: - bdist_base = self.get_finalized_command('bdist').bdist_base - self.bdist_dir = os.path.join(bdist_base, 'egg') - - if self.plat_name is None: - self.plat_name = get_build_platform() - - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - if self.egg_output is None: - - # Compute filename of the output egg - basename = Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version, - get_python_version(), - self.distribution.has_ext_modules() and self.plat_name - ).egg_name() - - self.egg_output = os.path.join(self.dist_dir, basename+'.egg') - - - - - - - - - def do_install_data(self): - # Hack for packages that install data to install's --install-lib - self.get_finalized_command('install').install_lib = self.bdist_dir - - site_packages = os.path.normcase(os.path.realpath(_get_purelib())) - old, self.distribution.data_files = self.distribution.data_files,[] - - for item in old: - if isinstance(item,tuple) and len(item)==2: - if os.path.isabs(item[0]): - realpath = os.path.realpath(item[0]) - normalized = os.path.normcase(realpath) - if normalized==site_packages or normalized.startswith( - site_packages+os.sep - ): - item = realpath[len(site_packages)+1:], item[1] - # XXX else: raise ??? - self.distribution.data_files.append(item) - - try: - log.info("installing package data to %s" % self.bdist_dir) - self.call_command('install_data', force=0, root=None) - finally: - self.distribution.data_files = old - - - def get_outputs(self): - return [self.egg_output] - - - def call_command(self,cmdname,**kw): - """Invoke reinitialized command `cmdname` with keyword args""" - for dirname in INSTALL_DIRECTORY_ATTRS: - kw.setdefault(dirname,self.bdist_dir) - kw.setdefault('skip_build',self.skip_build) - kw.setdefault('dry_run', self.dry_run) - cmd = self.reinitialize_command(cmdname, **kw) - self.run_command(cmdname) - return cmd - - - def run(self): - # Generate metadata first - self.run_command("egg_info") - # We run install_lib before install_data, because some data hacks - # pull their data path from the install_lib command. - log.info("installing library code to %s" % self.bdist_dir) - instcmd = self.get_finalized_command('install') - old_root = instcmd.root; instcmd.root = None - if self.distribution.has_c_libraries() and not self.skip_build: - self.run_command('build_clib') - cmd = self.call_command('install_lib', warn_dir=0) - instcmd.root = old_root - - all_outputs, ext_outputs = self.get_ext_outputs() - self.stubs = [] - to_compile = [] - for (p,ext_name) in enumerate(ext_outputs): - filename,ext = os.path.splitext(ext_name) - pyfile = os.path.join(self.bdist_dir, strip_module(filename)+'.py') - self.stubs.append(pyfile) - log.info("creating stub loader for %s" % ext_name) - if not self.dry_run: - write_stub(os.path.basename(ext_name), pyfile) - to_compile.append(pyfile) - ext_outputs[p] = ext_name.replace(os.sep,'/') - - to_compile.extend(self.make_init_files()) - if to_compile: - cmd.byte_compile(to_compile) - if self.distribution.data_files: - self.do_install_data() - - # Make the EGG-INFO directory - archive_root = self.bdist_dir - egg_info = os.path.join(archive_root,'EGG-INFO') - self.mkpath(egg_info) - if self.distribution.scripts: - script_dir = os.path.join(egg_info, 'scripts') - log.info("installing scripts to %s" % script_dir) - self.call_command('install_scripts',install_dir=script_dir,no_ep=1) - - self.copy_metadata_to(egg_info) - native_libs = os.path.join(egg_info, "native_libs.txt") - if all_outputs: - log.info("writing %s" % native_libs) - if not self.dry_run: - ensure_directory(native_libs) - libs_file = open(native_libs, 'wt') - libs_file.write('\n'.join(all_outputs)) - libs_file.write('\n') - libs_file.close() - elif os.path.isfile(native_libs): - log.info("removing %s" % native_libs) - if not self.dry_run: - os.unlink(native_libs) - - write_safety_flag( - os.path.join(archive_root,'EGG-INFO'), self.zip_safe() - ) - - if os.path.exists(os.path.join(self.egg_info,'depends.txt')): - log.warn( - "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - if self.exclude_source_files: - self.zap_pyfiles() - - # Make the archive - make_zipfile(self.egg_output, archive_root, verbose=self.verbose, - dry_run=self.dry_run, mode=self.gen_header()) - if not self.keep_temp: - remove_tree(self.bdist_dir, dry_run=self.dry_run) - - # Add to 'Distribution.dist_files' so that the "upload" command works - getattr(self.distribution,'dist_files',[]).append( - ('bdist_egg',get_python_version(),self.egg_output)) - - - - - def zap_pyfiles(self): - log.info("Removing .py files from temporary directory") - for base,dirs,files in walk_egg(self.bdist_dir): - for name in files: - if name.endswith('.py'): - path = os.path.join(base,name) - log.debug("Deleting %s", path) - os.unlink(path) - - def zip_safe(self): - safe = getattr(self.distribution,'zip_safe',None) - if safe is not None: - return safe - log.warn("zip_safe flag not set; analyzing archive contents...") - return analyze_egg(self.bdist_dir, self.stubs) - - def make_init_files(self): - """Create missing package __init__ files""" - init_files = [] - for base,dirs,files in walk_egg(self.bdist_dir): - if base==self.bdist_dir: - # don't put an __init__ in the root - continue - for name in files: - if name.endswith('.py'): - if '__init__.py' not in files: - pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.') - if self.distribution.has_contents_for(pkg): - log.warn("Creating missing __init__.py for %s",pkg) - filename = os.path.join(base,'__init__.py') - if not self.dry_run: - f = open(filename,'w'); f.write(NS_PKG_STUB) - f.close() - init_files.append(filename) - break - else: - # not a package, don't traverse to subdirectories - dirs[:] = [] - - return init_files - - def gen_header(self): - epm = EntryPoint.parse_map(self.distribution.entry_points or '') - ep = epm.get('setuptools.installation',{}).get('eggsecutable') - if ep is None: - return 'w' # not an eggsecutable, do it the usual way. - - if not ep.attrs or ep.extras: - raise DistutilsSetupError( - "eggsecutable entry point (%r) cannot have 'extras' " - "or refer to a module" % (ep,) - ) - - pyver = sys.version[:3] - pkg = ep.module_name - full = '.'.join(ep.attrs) - base = ep.attrs[0] - basename = os.path.basename(self.egg_output) - - header = ( - "#!/bin/sh\n" - 'if [ `basename $0` = "%(basename)s" ]\n' - 'then exec python%(pyver)s -c "' - "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " - "from %(pkg)s import %(base)s; sys.exit(%(full)s())" - '" "$@"\n' - 'else\n' - ' echo $0 is not the correct name for this egg file.\n' - ' echo Please rename it back to %(basename)s and try again.\n' - ' exec false\n' - 'fi\n' - - ) % locals() - - if not self.dry_run: - mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) - f = open(self.egg_output, 'w') - f.write(header) - f.close() - return 'a' - - - def copy_metadata_to(self, target_dir): - "Copy metadata (egg info) to the target_dir" - # normalize the path (so that a forward-slash in egg_info will - # match using startswith below) - norm_egg_info = os.path.normpath(self.egg_info) - prefix = os.path.join(norm_egg_info,'') - for path in self.ei_cmd.filelist.files: - if path.startswith(prefix): - target = os.path.join(target_dir, path[len(prefix):]) - ensure_directory(target) - self.copy_file(path, target) - - def get_ext_outputs(self): - """Get a list of relative paths to C extensions in the output distro""" - - all_outputs = [] - ext_outputs = [] - - paths = {self.bdist_dir:''} - for base, dirs, files in os.walk(self.bdist_dir): - for filename in files: - if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: - all_outputs.append(paths[base]+filename) - for filename in dirs: - paths[os.path.join(base,filename)] = paths[base]+filename+'/' - - if self.distribution.has_ext_modules(): - build_cmd = self.get_finalized_command('build_ext') - for ext in build_cmd.extensions: - if isinstance(ext,Library): - continue - fullname = build_cmd.get_ext_fullname(ext.name) - filename = build_cmd.get_ext_filename(fullname) - if not os.path.basename(filename).startswith('dl-'): - if os.path.exists(os.path.join(self.bdist_dir,filename)): - ext_outputs.append(filename) - - return all_outputs, ext_outputs - - -NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) - - - - -def walk_egg(egg_dir): - """Walk an unpacked egg's contents, skipping the metadata directory""" - walker = os.walk(egg_dir) - base,dirs,files = next(walker) - if 'EGG-INFO' in dirs: - dirs.remove('EGG-INFO') - yield base,dirs,files - for bdf in walker: - yield bdf - -def analyze_egg(egg_dir, stubs): - # check for existing flag in EGG-INFO - for flag,fn in safety_flags.items(): - if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)): - return flag - if not can_scan(): return False - safe = True - for base, dirs, files in walk_egg(egg_dir): - for name in files: - if name.endswith('.py') or name.endswith('.pyw'): - continue - elif name.endswith('.pyc') or name.endswith('.pyo'): - # always scan, even if we already know we're not safe - safe = scan_module(egg_dir, base, name, stubs) and safe - return safe - -def write_safety_flag(egg_dir, safe): - # Write or remove zip safety flag file(s) - for flag,fn in safety_flags.items(): - fn = os.path.join(egg_dir, fn) - if os.path.exists(fn): - if safe is None or bool(safe) != flag: - os.unlink(fn) - elif safe is not None and bool(safe)==flag: - f=open(fn,'wt'); f.write('\n'); f.close() - -safety_flags = { - True: 'zip-safe', - False: 'not-zip-safe', -} - -def scan_module(egg_dir, base, name, stubs): - """Check whether module possibly uses unsafe-for-zipfile stuff""" - - filename = os.path.join(base,name) - if filename[:-1] in stubs: - return True # Extension module - pkg = base[len(egg_dir)+1:].replace(os.sep,'.') - module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0] - if sys.version_info < (3, 3): - skip = 8 # skip magic & date - else: - skip = 12 # skip magic & date & file size - f = open(filename,'rb'); f.read(skip) - code = marshal.load(f); f.close() - safe = True - symbols = dict.fromkeys(iter_symbols(code)) - for bad in ['__file__', '__path__']: - if bad in symbols: - log.warn("%s: module references %s", module, bad) - safe = False - if 'inspect' in symbols: - for bad in [ - 'getsource', 'getabsfile', 'getsourcefile', 'getfile' - 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', - 'getinnerframes', 'getouterframes', 'stack', 'trace' - ]: - if bad in symbols: - log.warn("%s: module MAY be using inspect.%s", module, bad) - safe = False - if '__name__' in symbols and '__main__' in symbols and '.' not in module: - if sys.version[:3]=="2.4": # -m works w/zipfiles in 2.5 - log.warn("%s: top-level module may be 'python -m' script", module) - safe = False - return safe - -def iter_symbols(code): - """Yield names and strings used by `code` and its nested code objects""" - for name in code.co_names: yield name - for const in code.co_consts: - if isinstance(const,basestring): - yield const - elif isinstance(const,CodeType): - for name in iter_symbols(const): - yield name - -def can_scan(): - if not sys.platform.startswith('java') and sys.platform != 'cli': - # CPython, PyPy, etc. - return True - log.warn("Unable to analyze compiled code on this platform.") - log.warn("Please ask the author to include a 'zip_safe'" - " setting (either True or False) in the package's setup.py") - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -# Attribute names of options for commands that might need to be convinced to -# install to the egg build directory - -INSTALL_DIRECTORY_ATTRS = [ - 'install_lib', 'install_dir', 'install_data', 'install_base' -] - -def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=None, - mode='w' -): - """Create a zip file from all the files under 'base_dir'. The output - zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" - Python module (if available) or the InfoZIP "zip" utility (if installed - and found on the default search path). If neither tool is available, - raises DistutilsExecError. Returns the name of the output zip file. - """ - import zipfile - mkpath(os.path.dirname(zip_filename), dry_run=dry_run) - log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) - - def visit(z, dirname, names): - for name in names: - path = os.path.normpath(os.path.join(dirname, name)) - if os.path.isfile(path): - p = path[len(base_dir)+1:] - if not dry_run: - z.write(path, p) - log.debug("adding '%s'" % p) - - if compress is None: - compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits - - compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)] - if not dry_run: - z = zipfile.ZipFile(zip_filename, mode, compression=compression) - for dirname, dirs, files in os.walk(base_dir): - visit(z, dirname, files) - z.close() - else: - for dirname, dirs, files in os.walk(base_dir): - visit(None, dirname, files) - return zip_filename -# diff --git a/libs/setuptools-2.2/setuptools/command/bdist_rpm.py b/libs/setuptools-2.2/setuptools/command/bdist_rpm.py deleted file mode 100644 index 8c48da3..0000000 --- a/libs/setuptools-2.2/setuptools/command/bdist_rpm.py +++ /dev/null @@ -1,82 +0,0 @@ -# This is just a kludge so that bdist_rpm doesn't guess wrong about the -# distribution name and version, if the egg_info command is going to alter -# them, another kludge to allow you to build old-style non-egg RPMs, and -# finally, a kludge to track .rpm files for uploading when run on Python <2.5. - -from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm -import sys, os - -class bdist_rpm(_bdist_rpm): - - def initialize_options(self): - _bdist_rpm.initialize_options(self) - self.no_egg = None - - if sys.version<"2.5": - # Track for uploading any .rpm file(s) moved to self.dist_dir - def move_file(self, src, dst, level=1): - _bdist_rpm.move_file(self, src, dst, level) - if dst==self.dist_dir and src.endswith('.rpm'): - getattr(self.distribution,'dist_files',[]).append( - ('bdist_rpm', - src.endswith('.src.rpm') and 'any' or sys.version[:3], - os.path.join(dst, os.path.basename(src))) - ) - - def run(self): - self.run_command('egg_info') # ensure distro name is up-to-date - _bdist_rpm.run(self) - - - - - - - - - - - - - - def _make_spec_file(self): - version = self.distribution.get_version() - rpmversion = version.replace('-','_') - spec = _bdist_rpm._make_spec_file(self) - line23 = '%define version '+version - line24 = '%define version '+rpmversion - spec = [ - line.replace( - "Source0: %{name}-%{version}.tar", - "Source0: %{name}-%{unmangled_version}.tar" - ).replace( - "setup.py install ", - "setup.py install --single-version-externally-managed " - ).replace( - "%setup", - "%setup -n %{name}-%{unmangled_version}" - ).replace(line23,line24) - for line in spec - ] - spec.insert(spec.index(line24)+1, "%define unmangled_version "+version) - return spec - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/bdist_wininst.py b/libs/setuptools-2.2/setuptools/command/bdist_wininst.py deleted file mode 100644 index e8521f8..0000000 --- a/libs/setuptools-2.2/setuptools/command/bdist_wininst.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.bdist_wininst import bdist_wininst as _bdist_wininst -import os, sys - -class bdist_wininst(_bdist_wininst): - _good_upload = _bad_upload = None - - def create_exe(self, arcname, fullname, bitmap=None): - _bdist_wininst.create_exe(self, arcname, fullname, bitmap) - installer_name = self.get_installer_filename(fullname) - if self.target_version: - pyversion = self.target_version - # fix 2.5+ bdist_wininst ignoring --target-version spec - self._bad_upload = ('bdist_wininst', 'any', installer_name) - else: - pyversion = 'any' - self._good_upload = ('bdist_wininst', pyversion, installer_name) - - def _fix_upload_names(self): - good, bad = self._good_upload, self._bad_upload - dist_files = getattr(self.distribution, 'dist_files', []) - if bad in dist_files: - dist_files.remove(bad) - if good not in dist_files: - dist_files.append(good) - - def reinitialize_command (self, command, reinit_subcommands=0): - cmd = self.distribution.reinitialize_command( - command, reinit_subcommands) - if command in ('install', 'install_lib'): - cmd.install_lib = None # work around distutils bug - return cmd - - def run(self): - self._is_running = True - try: - _bdist_wininst.run(self) - self._fix_upload_names() - finally: - self._is_running = False - - - if not hasattr(_bdist_wininst, 'get_installer_filename'): - def get_installer_filename(self, fullname): - # Factored out to allow overriding in subclasses - if self.target_version: - # if we create an installer for a specific python version, - # it's better to include this in the name - installer_name = os.path.join(self.dist_dir, - "%s.win32-py%s.exe" % - (fullname, self.target_version)) - else: - installer_name = os.path.join(self.dist_dir, - "%s.win32.exe" % fullname) - return installer_name - # get_installer_filename() - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/build_ext.py b/libs/setuptools-2.2/setuptools/command/build_ext.py deleted file mode 100644 index 50a039c..0000000 --- a/libs/setuptools-2.2/setuptools/command/build_ext.py +++ /dev/null @@ -1,298 +0,0 @@ -from distutils.command.build_ext import build_ext as _du_build_ext -try: - # Attempt to use Pyrex for building extensions, if available - from Pyrex.Distutils.build_ext import build_ext as _build_ext -except ImportError: - _build_ext = _du_build_ext - -import os, sys -from distutils.file_util import copy_file -from setuptools.extension import Library -from distutils.ccompiler import new_compiler -from distutils.sysconfig import customize_compiler -try: - # Python 2.7 or >=3.2 - from sysconfig import _CONFIG_VARS -except ImportError: - from distutils.sysconfig import get_config_var - get_config_var("LDSHARED") # make sure _config_vars is initialized - del get_config_var - from distutils.sysconfig import _config_vars as _CONFIG_VARS -from distutils import log -from distutils.errors import * - -have_rtld = False -use_stubs = False -libtype = 'shared' - -if sys.platform == "darwin": - use_stubs = True -elif os.name != 'nt': - try: - from dl import RTLD_NOW - have_rtld = True - use_stubs = True - except ImportError: - pass - -def if_dl(s): - if have_rtld: - return s - return '' - - - - - - -class build_ext(_build_ext): - def run(self): - """Build extensions in build directory, then copy if --inplace""" - old_inplace, self.inplace = self.inplace, 0 - _build_ext.run(self) - self.inplace = old_inplace - if old_inplace: - self.copy_extensions_to_source() - - def copy_extensions_to_source(self): - build_py = self.get_finalized_command('build_py') - for ext in self.extensions: - fullname = self.get_ext_fullname(ext.name) - filename = self.get_ext_filename(fullname) - modpath = fullname.split('.') - package = '.'.join(modpath[:-1]) - package_dir = build_py.get_package_dir(package) - dest_filename = os.path.join(package_dir,os.path.basename(filename)) - src_filename = os.path.join(self.build_lib,filename) - - # Always copy, even if source is older than destination, to ensure - # that the right extensions for the current Python/platform are - # used. - copy_file( - src_filename, dest_filename, verbose=self.verbose, - dry_run=self.dry_run - ) - if ext._needs_stub: - self.write_stub(package_dir or os.curdir, ext, True) - - - if _build_ext is not _du_build_ext and not hasattr(_build_ext,'pyrex_sources'): - # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4 - def swig_sources(self, sources, *otherargs): - # first do any Pyrex processing - sources = _build_ext.swig_sources(self, sources) or sources - # Then do any actual SWIG stuff on the remainder - return _du_build_ext.swig_sources(self, sources, *otherargs) - - - - def get_ext_filename(self, fullname): - filename = _build_ext.get_ext_filename(self,fullname) - if fullname in self.ext_map: - ext = self.ext_map[fullname] - if isinstance(ext,Library): - fn, ext = os.path.splitext(filename) - return self.shlib_compiler.library_filename(fn,libtype) - elif use_stubs and ext._links_to_dynamic: - d,fn = os.path.split(filename) - return os.path.join(d,'dl-'+fn) - return filename - - def initialize_options(self): - _build_ext.initialize_options(self) - self.shlib_compiler = None - self.shlibs = [] - self.ext_map = {} - - def finalize_options(self): - _build_ext.finalize_options(self) - self.extensions = self.extensions or [] - self.check_extensions_list(self.extensions) - self.shlibs = [ext for ext in self.extensions - if isinstance(ext,Library)] - if self.shlibs: - self.setup_shlib_compiler() - for ext in self.extensions: - ext._full_name = self.get_ext_fullname(ext.name) - for ext in self.extensions: - fullname = ext._full_name - self.ext_map[fullname] = ext - - # distutils 3.1 will also ask for module names - # XXX what to do with conflicts? - self.ext_map[fullname.split('.')[-1]] = ext - - ltd = ext._links_to_dynamic = \ - self.shlibs and self.links_to_dynamic(ext) or False - ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library) - filename = ext._file_name = self.get_ext_filename(fullname) - libdir = os.path.dirname(os.path.join(self.build_lib,filename)) - if ltd and libdir not in ext.library_dirs: - ext.library_dirs.append(libdir) - if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: - ext.runtime_library_dirs.append(os.curdir) - - def setup_shlib_compiler(self): - compiler = self.shlib_compiler = new_compiler( - compiler=self.compiler, dry_run=self.dry_run, force=self.force - ) - if sys.platform == "darwin": - tmp = _CONFIG_VARS.copy() - try: - # XXX Help! I don't have any idea whether these are right... - _CONFIG_VARS['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup" - _CONFIG_VARS['CCSHARED'] = " -dynamiclib" - _CONFIG_VARS['SO'] = ".dylib" - customize_compiler(compiler) - finally: - _CONFIG_VARS.clear() - _CONFIG_VARS.update(tmp) - else: - customize_compiler(compiler) - - if self.include_dirs is not None: - compiler.set_include_dirs(self.include_dirs) - if self.define is not None: - # 'define' option is a list of (name,value) tuples - for (name,value) in self.define: - compiler.define_macro(name, value) - if self.undef is not None: - for macro in self.undef: - compiler.undefine_macro(macro) - if self.libraries is not None: - compiler.set_libraries(self.libraries) - if self.library_dirs is not None: - compiler.set_library_dirs(self.library_dirs) - if self.rpath is not None: - compiler.set_runtime_library_dirs(self.rpath) - if self.link_objects is not None: - compiler.set_link_objects(self.link_objects) - - # hack so distutils' build_extension() builds a library instead - compiler.link_shared_object = link_shared_object.__get__(compiler) - - - - def get_export_symbols(self, ext): - if isinstance(ext,Library): - return ext.export_symbols - return _build_ext.get_export_symbols(self,ext) - - def build_extension(self, ext): - _compiler = self.compiler - try: - if isinstance(ext,Library): - self.compiler = self.shlib_compiler - _build_ext.build_extension(self,ext) - if ext._needs_stub: - self.write_stub( - self.get_finalized_command('build_py').build_lib, ext - ) - finally: - self.compiler = _compiler - - def links_to_dynamic(self, ext): - """Return true if 'ext' links to a dynamic lib in the same package""" - # XXX this should check to ensure the lib is actually being built - # XXX as dynamic, and not just using a locally-found version or a - # XXX static-compiled version - libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) - pkg = '.'.join(ext._full_name.split('.')[:-1]+['']) - for libname in ext.libraries: - if pkg+libname in libnames: return True - return False - - def get_outputs(self): - outputs = _build_ext.get_outputs(self) - optimize = self.get_finalized_command('build_py').optimize - for ext in self.extensions: - if ext._needs_stub: - base = os.path.join(self.build_lib, *ext._full_name.split('.')) - outputs.append(base+'.py') - outputs.append(base+'.pyc') - if optimize: - outputs.append(base+'.pyo') - return outputs - - def write_stub(self, output_dir, ext, compile=False): - log.info("writing stub loader for %s to %s",ext._full_name, output_dir) - stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py' - if compile and os.path.exists(stub_file): - raise DistutilsError(stub_file+" already exists! Please delete.") - if not self.dry_run: - f = open(stub_file,'w') - f.write('\n'.join([ - "def __bootstrap__():", - " global __bootstrap__, __file__, __loader__", - " import sys, os, pkg_resources, imp"+if_dl(", dl"), - " __file__ = pkg_resources.resource_filename(__name__,%r)" - % os.path.basename(ext._file_name), - " del __bootstrap__", - " if '__loader__' in globals():", - " del __loader__", - if_dl(" old_flags = sys.getdlopenflags()"), - " old_dir = os.getcwd()", - " try:", - " os.chdir(os.path.dirname(__file__))", - if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), - " imp.load_dynamic(__name__,__file__)", - " finally:", - if_dl(" sys.setdlopenflags(old_flags)"), - " os.chdir(old_dir)", - "__bootstrap__()", - "" # terminal \n - ])) - f.close() - if compile: - from distutils.util import byte_compile - byte_compile([stub_file], optimize=0, - force=True, dry_run=self.dry_run) - optimize = self.get_finalized_command('install_lib').optimize - if optimize > 0: - byte_compile([stub_file], optimize=optimize, - force=True, dry_run=self.dry_run) - if os.path.exists(stub_file) and not self.dry_run: - os.unlink(stub_file) - - -if use_stubs or os.name=='nt': - # Build shared libraries - # - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): self.link( - self.SHARED_LIBRARY, objects, output_libname, - output_dir, libraries, library_dirs, runtime_library_dirs, - export_symbols, debug, extra_preargs, extra_postargs, - build_temp, target_lang - ) -else: - # Build static libraries everywhere else - libtype = 'static' - - def link_shared_object(self, objects, output_libname, output_dir=None, - libraries=None, library_dirs=None, runtime_library_dirs=None, - export_symbols=None, debug=0, extra_preargs=None, - extra_postargs=None, build_temp=None, target_lang=None - ): - # XXX we need to either disallow these attrs on Library instances, - # or warn/abort here if set, or something... - #libraries=None, library_dirs=None, runtime_library_dirs=None, - #export_symbols=None, extra_preargs=None, extra_postargs=None, - #build_temp=None - - assert output_dir is None # distutils build_ext doesn't pass this - output_dir,filename = os.path.split(output_libname) - basename, ext = os.path.splitext(filename) - if self.library_filename("x").startswith('lib'): - # strip 'lib' prefix; this is kludgy if some platform uses - # a different prefix - basename = basename[3:] - - self.create_static_lib( - objects, basename, output_dir, debug, target_lang - ) - - diff --git a/libs/setuptools-2.2/setuptools/command/build_py.py b/libs/setuptools-2.2/setuptools/command/build_py.py deleted file mode 100644 index 090b44d..0000000 --- a/libs/setuptools-2.2/setuptools/command/build_py.py +++ /dev/null @@ -1,221 +0,0 @@ -import os -import sys -import fnmatch -import textwrap -from distutils.command.build_py import build_py as _build_py -from distutils.util import convert_path -from glob import glob - -try: - from setuptools.lib2to3_ex import Mixin2to3 -except ImportError: - class Mixin2to3: - def run_2to3(self, files, doctests=True): - "do nothing" - -class build_py(_build_py, Mixin2to3): - """Enhanced 'build_py' command that includes data files with packages - - The data files are specified via a 'package_data' argument to 'setup()'. - See 'setuptools.dist.Distribution' for more details. - - Also, this version of the 'build_py' command allows you to specify both - 'py_modules' and 'packages' in the same setup operation. - """ - def finalize_options(self): - _build_py.finalize_options(self) - self.package_data = self.distribution.package_data - self.exclude_package_data = self.distribution.exclude_package_data or {} - if 'data_files' in self.__dict__: del self.__dict__['data_files'] - self.__updated_files = [] - self.__doctests_2to3 = [] - - def run(self): - """Build modules, packages, and copy data files to build directory""" - if not self.py_modules and not self.packages: - return - - if self.py_modules: - self.build_modules() - - if self.packages: - self.build_packages() - self.build_package_data() - - self.run_2to3(self.__updated_files, False) - self.run_2to3(self.__updated_files, True) - self.run_2to3(self.__doctests_2to3, True) - - # Only compile actual .py files, using our base class' idea of what our - # output files are. - self.byte_compile(_build_py.get_outputs(self, include_bytecode=0)) - - def __getattr__(self, attr): - if attr=='data_files': # lazily compute data files - self.data_files = files = self._get_data_files() - return files - return _build_py.__getattr__(self,attr) - - def build_module(self, module, module_file, package): - outfile, copied = _build_py.build_module(self, module, module_file, package) - if copied: - self.__updated_files.append(outfile) - return outfile, copied - - def _get_data_files(self): - """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" - self.analyze_manifest() - data = [] - for package in self.packages or (): - # Locate package source directory - src_dir = self.get_package_dir(package) - - # Compute package build directory - build_dir = os.path.join(*([self.build_lib] + package.split('.'))) - - # Length of path to strip from found files - plen = len(src_dir)+1 - - # Strip directory from globbed filenames - filenames = [ - file[plen:] for file in self.find_data_files(package, src_dir) - ] - data.append((package, src_dir, build_dir, filenames)) - return data - - def find_data_files(self, package, src_dir): - """Return filenames for package's data files in 'src_dir'""" - globs = (self.package_data.get('', []) - + self.package_data.get(package, [])) - files = self.manifest_files.get(package, [])[:] - for pattern in globs: - # Each pattern has to be converted to a platform-specific path - files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) - return self.exclude_data_files(package, src_dir, files) - - def build_package_data(self): - """Copy data files into build directory""" - for package, src_dir, build_dir, filenames in self.data_files: - for filename in filenames: - target = os.path.join(build_dir, filename) - self.mkpath(os.path.dirname(target)) - srcfile = os.path.join(src_dir, filename) - outf, copied = self.copy_file(srcfile, target) - srcfile = os.path.abspath(srcfile) - if copied and srcfile in self.distribution.convert_2to3_doctests: - self.__doctests_2to3.append(outf) - - def analyze_manifest(self): - self.manifest_files = mf = {} - if not self.distribution.include_package_data: - return - src_dirs = {} - for package in self.packages or (): - # Locate package source directory - src_dirs[assert_relative(self.get_package_dir(package))] = package - - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - for path in ei_cmd.filelist.files: - d,f = os.path.split(assert_relative(path)) - prev = None - oldf = f - while d and d!=prev and d not in src_dirs: - prev = d - d, df = os.path.split(d) - f = os.path.join(df, f) - if d in src_dirs: - if path.endswith('.py') and f==oldf: - continue # it's a module, not data - mf.setdefault(src_dirs[d],[]).append(path) - - def get_data_files(self): pass # kludge 2.4 for lazy computation - - if sys.version<"2.4": # Python 2.4 already has this code - def get_outputs(self, include_bytecode=1): - """Return complete list of files copied to the build directory - - This includes both '.py' files and data files, as well as '.pyc' - and '.pyo' files if 'include_bytecode' is true. (This method is - needed for the 'install_lib' command to do its job properly, and to - generate a correct installation manifest.) - """ - return _build_py.get_outputs(self, include_bytecode) + [ - os.path.join(build_dir, filename) - for package, src_dir, build_dir,filenames in self.data_files - for filename in filenames - ] - - def check_package(self, package, package_dir): - """Check namespace packages' __init__ for declare_namespace""" - try: - return self.packages_checked[package] - except KeyError: - pass - - init_py = _build_py.check_package(self, package, package_dir) - self.packages_checked[package] = init_py - - if not init_py or not self.distribution.namespace_packages: - return init_py - - for pkg in self.distribution.namespace_packages: - if pkg==package or pkg.startswith(package+'.'): - break - else: - return init_py - - f = open(init_py,'rbU') - if 'declare_namespace'.encode() not in f.read(): - from distutils import log - log.warn( - "WARNING: %s is a namespace package, but its __init__.py does\n" - "not declare_namespace(); setuptools 0.7 will REQUIRE this!\n" - '(See the setuptools manual under "Namespace Packages" for ' - "details.)\n", package - ) - f.close() - return init_py - - def initialize_options(self): - self.packages_checked={} - _build_py.initialize_options(self) - - def get_package_dir(self, package): - res = _build_py.get_package_dir(self, package) - if self.distribution.src_root is not None: - return os.path.join(self.distribution.src_root, res) - return res - - def exclude_data_files(self, package, src_dir, files): - """Filter filenames for package's data files in 'src_dir'""" - globs = (self.exclude_package_data.get('', []) - + self.exclude_package_data.get(package, [])) - bad = [] - for pattern in globs: - bad.extend( - fnmatch.filter( - files, os.path.join(src_dir, convert_path(pattern)) - ) - ) - bad = dict.fromkeys(bad) - seen = {} - return [ - f for f in files if f not in bad - and f not in seen and seen.setdefault(f,1) # ditch dupes - ] - - -def assert_relative(path): - if not os.path.isabs(path): - return path - from distutils.errors import DistutilsSetupError - msg = textwrap.dedent(""" - Error: setup script specifies an absolute path: - - %s - - setup() arguments must *always* be /-separated paths relative to the - setup.py directory, *never* absolute paths. - """).lstrip() % path - raise DistutilsSetupError(msg) diff --git a/libs/setuptools-2.2/setuptools/command/develop.py b/libs/setuptools-2.2/setuptools/command/develop.py deleted file mode 100644 index 1d50004..0000000 --- a/libs/setuptools-2.2/setuptools/command/develop.py +++ /dev/null @@ -1,167 +0,0 @@ -from setuptools.command.easy_install import easy_install -from distutils.util import convert_path, subst_vars -from pkg_resources import Distribution, PathMetadata, normalize_path -from distutils import log -from distutils.errors import DistutilsError, DistutilsOptionError -import os, sys, setuptools, glob - -class develop(easy_install): - """Set up package for development""" - - description = "install package in 'development mode'" - - user_options = easy_install.user_options + [ - ("uninstall", "u", "Uninstall this source package"), - ("egg-path=", None, "Set the path to be used in the .egg-link file"), - ] - - boolean_options = easy_install.boolean_options + ['uninstall'] - - command_consumes_arguments = False # override base - - def run(self): - if self.uninstall: - self.multi_version = True - self.uninstall_link() - else: - self.install_for_development() - self.warn_deprecated_options() - - def initialize_options(self): - self.uninstall = None - self.egg_path = None - easy_install.initialize_options(self) - self.setup_path = None - self.always_copy_from = '.' # always copy eggs installed in curdir - - - - def finalize_options(self): - ei = self.get_finalized_command("egg_info") - if ei.broken_egg_info: - raise DistutilsError( - "Please rename %r to %r before using 'develop'" - % (ei.egg_info, ei.broken_egg_info) - ) - self.args = [ei.egg_name] - - - - - easy_install.finalize_options(self) - self.expand_basedirs() - self.expand_dirs() - # pick up setup-dir .egg files only: no .egg-info - self.package_index.scan(glob.glob('*.egg')) - - self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link') - self.egg_base = ei.egg_base - if self.egg_path is None: - self.egg_path = os.path.abspath(ei.egg_base) - - target = normalize_path(self.egg_base) - if normalize_path(os.path.join(self.install_dir, self.egg_path)) != target: - raise DistutilsOptionError( - "--egg-path must be a relative path from the install" - " directory to "+target - ) - - # Make a distribution for the package's source - self.dist = Distribution( - target, - PathMetadata(target, os.path.abspath(ei.egg_info)), - project_name = ei.egg_name - ) - - p = self.egg_base.replace(os.sep,'/') - if p!= os.curdir: - p = '../' * (p.count('/')+1) - self.setup_path = p - p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) - if p != normalize_path(os.curdir): - raise DistutilsOptionError( - "Can't get a consistent path to setup script from" - " installation directory", p, normalize_path(os.curdir)) - - def install_for_development(self): - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - - # Fixup egg-link and easy-install.pth - ei_cmd = self.get_finalized_command("egg_info") - self.egg_path = build_path - self.dist.location = build_path - self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) # XXX - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - self.install_site_py() # ensure that target dir is site-safe - if setuptools.bootstrap_install_from: - self.easy_install(setuptools.bootstrap_install_from) - setuptools.bootstrap_install_from = None - - # create an .egg-link in the installation dir, pointing to our egg - log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) - if not self.dry_run: - f = open(self.egg_link,"w") - f.write(self.egg_path + "\n" + self.setup_path) - f.close() - # postprocess the installed distro, fixing up .pth, installing scripts, - # and handling requirements - self.process_distribution(None, self.dist, not self.no_deps) - - - def uninstall_link(self): - if os.path.exists(self.egg_link): - log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) - egg_link_file = open(self.egg_link) - contents = [line.rstrip() for line in egg_link_file] - egg_link_file.close() - if contents not in ([self.egg_path], [self.egg_path, self.setup_path]): - log.warn("Link points to %s: uninstall aborted", contents) - return - if not self.dry_run: - os.unlink(self.egg_link) - if not self.dry_run: - self.update_pth(self.dist) # remove any .pth link to us - if self.distribution.scripts: - # XXX should also check for entry point scripts! - log.warn("Note: you must uninstall or replace scripts manually!") - - def install_egg_scripts(self, dist): - if dist is not self.dist: - # Installing a dependency, so fall back to normal behavior - return easy_install.install_egg_scripts(self,dist) - - # create wrapper scripts in the script dir, pointing to dist.scripts - - # new-style... - self.install_wrapper_scripts(dist) - - # ...and old-style - for script_name in self.distribution.scripts or []: - script_path = os.path.abspath(convert_path(script_name)) - script_name = os.path.basename(script_path) - f = open(script_path,'rU') - script_text = f.read() - f.close() - self.install_script(dist, script_name, script_text, script_path) - diff --git a/libs/setuptools-2.2/setuptools/command/easy_install.py b/libs/setuptools-2.2/setuptools/command/easy_install.py deleted file mode 100644 index 08ebf3e..0000000 --- a/libs/setuptools-2.2/setuptools/command/easy_install.py +++ /dev/null @@ -1,1915 +0,0 @@ -#!/usr/bin/env python - -""" -Easy Install ------------- - -A tool for doing automatic download/extract/build of distutils-based Python -packages. For detailed documentation, see the accompanying EasyInstall.txt -file, or visit the `EasyInstall home page`__. - -__ https://pythonhosted.org/setuptools/easy_install.html - -""" - -import sys -import os -import zipimport -import shutil -import tempfile -import zipfile -import re -import stat -import random -import platform -import textwrap -import warnings -import site -import struct -from glob import glob -from distutils import log, dir_util - -import pkg_resources -from setuptools import Command, _dont_write_bytecode -from setuptools.sandbox import run_setup -from setuptools.py31compat import get_path, get_config_vars - -from distutils.util import get_platform -from distutils.util import convert_path, subst_vars -from distutils.errors import DistutilsArgError, DistutilsOptionError, \ - DistutilsError, DistutilsPlatformError -from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS -from setuptools.command import setopt -from setuptools.archive_util import unpack_archive -from setuptools.package_index import PackageIndex -from setuptools.package_index import URL_SCHEME -from setuptools.command import bdist_egg, egg_info -from setuptools.compat import (iteritems, maxsize, basestring, unicode, - reraise) -from pkg_resources import ( - yield_lines, normalize_path, resource_string, ensure_directory, - get_distribution, find_distributions, Environment, Requirement, - Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, - VersionConflict, DEVELOP_DIST, -) - -sys_executable = os.environ.get('__VENV_LAUNCHER__', - os.path.normpath(sys.executable)) - -__all__ = [ - 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', - 'main', 'get_exe_prefixes', -] - -def is_64bit(): - return struct.calcsize("P") == 8 - -def samefile(p1, p2): - both_exist = os.path.exists(p1) and os.path.exists(p2) - use_samefile = hasattr(os.path, 'samefile') and both_exist - if use_samefile: - return os.path.samefile(p1, p2) - norm_p1 = os.path.normpath(os.path.normcase(p1)) - norm_p2 = os.path.normpath(os.path.normcase(p2)) - return norm_p1 == norm_p2 - -if sys.version_info <= (3,): - def _to_ascii(s): - return s - def isascii(s): - try: - unicode(s, 'ascii') - return True - except UnicodeError: - return False -else: - def _to_ascii(s): - return s.encode('ascii') - def isascii(s): - try: - s.encode('ascii') - return True - except UnicodeError: - return False - -class easy_install(Command): - """Manage a download/build/install process""" - description = "Find/get/install Python packages" - command_consumes_arguments = True - - user_options = [ - ('prefix=', None, "installation prefix"), - ("zip-ok", "z", "install package as a zipfile"), - ("multi-version", "m", "make apps have to require() a version"), - ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), - ("install-dir=", "d", "install package to DIR"), - ("script-dir=", "s", "install scripts to DIR"), - ("exclude-scripts", "x", "Don't install scripts"), - ("always-copy", "a", "Copy all needed packages to install dir"), - ("index-url=", "i", "base URL of Python Package Index"), - ("find-links=", "f", "additional URL(s) to search for packages"), - ("build-directory=", "b", - "download/extract/build in DIR; keep the results"), - ('optimize=', 'O', - "also compile with optimization: -O1 for \"python -O\", " - "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), - ('record=', None, - "filename in which to record list of installed files"), - ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), - ('site-dirs=','S',"list of directories where .pth files work"), - ('editable', 'e', "Install specified packages in editable form"), - ('no-deps', 'N', "don't install dependencies"), - ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), - ('local-snapshots-ok', 'l', - "allow building eggs from local checkouts"), - ('version', None, "print version information and exit"), - ('no-find-links', None, - "Don't load find-links defined in packages being installed") - ] - boolean_options = [ - 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', - 'editable', - 'no-deps', 'local-snapshots-ok', 'version' - ] - - if site.ENABLE_USER_SITE: - help_msg = "install in user site-package '%s'" % site.USER_SITE - user_options.append(('user', None, help_msg)) - boolean_options.append('user') - - negative_opt = {'always-unzip': 'zip-ok'} - create_index = PackageIndex - - def initialize_options(self): - if site.ENABLE_USER_SITE: - whereami = os.path.abspath(__file__) - self.user = whereami.startswith(site.USER_SITE) - else: - self.user = 0 - - self.zip_ok = self.local_snapshots_ok = None - self.install_dir = self.script_dir = self.exclude_scripts = None - self.index_url = None - self.find_links = None - self.build_directory = None - self.args = None - self.optimize = self.record = None - self.upgrade = self.always_copy = self.multi_version = None - self.editable = self.no_deps = self.allow_hosts = None - self.root = self.prefix = self.no_report = None - self.version = None - self.install_purelib = None # for pure module distributions - self.install_platlib = None # non-pure (dists w/ extensions) - self.install_headers = None # for C/C++ headers - self.install_lib = None # set to either purelib or platlib - self.install_scripts = None - self.install_data = None - self.install_base = None - self.install_platbase = None - if site.ENABLE_USER_SITE: - self.install_userbase = site.USER_BASE - self.install_usersite = site.USER_SITE - else: - self.install_userbase = None - self.install_usersite = None - self.no_find_links = None - - # Options not specifiable via command line - self.package_index = None - self.pth_file = self.always_copy_from = None - self.site_dirs = None - self.installed_projects = {} - self.sitepy_installed = False - # Always read easy_install options, even if we are subclassed, or have - # an independent instance created. This ensures that defaults will - # always come from the standard configuration file(s)' "easy_install" - # section, even if this is a "develop" or "install" command, or some - # other embedding. - self._dry_run = None - self.verbose = self.distribution.verbose - self.distribution._set_command_options( - self, self.distribution.get_option_dict('easy_install') - ) - - def delete_blockers(self, blockers): - for filename in blockers: - if os.path.exists(filename) or os.path.islink(filename): - log.info("Deleting %s", filename) - if not self.dry_run: - if os.path.isdir(filename) and not os.path.islink(filename): - rmtree(filename) - else: - os.unlink(filename) - - def finalize_options(self): - if self.version: - print('setuptools %s' % get_distribution('setuptools').version) - sys.exit() - - py_version = sys.version.split()[0] - prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') - - self.config_vars = { - 'dist_name': self.distribution.get_name(), - 'dist_version': self.distribution.get_version(), - 'dist_fullname': self.distribution.get_fullname(), - 'py_version': py_version, - 'py_version_short': py_version[0:3], - 'py_version_nodot': py_version[0] + py_version[2], - 'sys_prefix': prefix, - 'prefix': prefix, - 'sys_exec_prefix': exec_prefix, - 'exec_prefix': exec_prefix, - # Only python 3.2+ has abiflags - 'abiflags': getattr(sys, 'abiflags', ''), - } - - if site.ENABLE_USER_SITE: - self.config_vars['userbase'] = self.install_userbase - self.config_vars['usersite'] = self.install_usersite - - # fix the install_dir if "--user" was used - #XXX: duplicate of the code in the setup command - if self.user and site.ENABLE_USER_SITE: - self.create_home_path() - if self.install_userbase is None: - raise DistutilsPlatformError( - "User base directory is not specified") - self.install_base = self.install_platbase = self.install_userbase - if os.name == 'posix': - self.select_scheme("unix_user") - else: - self.select_scheme(os.name + "_user") - - self.expand_basedirs() - self.expand_dirs() - - self._expand('install_dir','script_dir','build_directory','site_dirs') - # If a non-default installation directory was specified, default the - # script directory to match it. - if self.script_dir is None: - self.script_dir = self.install_dir - - if self.no_find_links is None: - self.no_find_links = False - - # Let install_dir get set by install_lib command, which in turn - # gets its info from the install command, and takes into account - # --prefix and --home and all that other crud. - self.set_undefined_options('install_lib', - ('install_dir','install_dir') - ) - # Likewise, set default script_dir from 'install_scripts.install_dir' - self.set_undefined_options('install_scripts', - ('install_dir', 'script_dir') - ) - - if self.user and self.install_purelib: - self.install_dir = self.install_purelib - self.script_dir = self.install_scripts - # default --record from the install command - self.set_undefined_options('install', ('record', 'record')) - # Should this be moved to the if statement below? It's not used - # elsewhere - normpath = map(normalize_path, sys.path) - self.all_site_dirs = get_site_dirs() - if self.site_dirs is not None: - site_dirs = [ - os.path.expanduser(s.strip()) for s in self.site_dirs.split(',') - ] - for d in site_dirs: - if not os.path.isdir(d): - log.warn("%s (in --site-dirs) does not exist", d) - elif normalize_path(d) not in normpath: - raise DistutilsOptionError( - d+" (in --site-dirs) is not on sys.path" - ) - else: - self.all_site_dirs.append(normalize_path(d)) - if not self.editable: self.check_site_dir() - self.index_url = self.index_url or "https://pypi.python.org/simple" - self.shadow_path = self.all_site_dirs[:] - for path_item in self.install_dir, normalize_path(self.script_dir): - if path_item not in self.shadow_path: - self.shadow_path.insert(0, path_item) - - if self.allow_hosts is not None: - hosts = [s.strip() for s in self.allow_hosts.split(',')] - else: - hosts = ['*'] - if self.package_index is None: - self.package_index = self.create_index( - self.index_url, search_path = self.shadow_path, hosts=hosts, - ) - self.local_index = Environment(self.shadow_path+sys.path) - - if self.find_links is not None: - if isinstance(self.find_links, basestring): - self.find_links = self.find_links.split() - else: - self.find_links = [] - if self.local_snapshots_ok: - self.package_index.scan_egg_links(self.shadow_path+sys.path) - if not self.no_find_links: - self.package_index.add_find_links(self.find_links) - self.set_undefined_options('install_lib', ('optimize','optimize')) - if not isinstance(self.optimize,int): - try: - self.optimize = int(self.optimize) - if not (0 <= self.optimize <= 2): raise ValueError - except ValueError: - raise DistutilsOptionError("--optimize must be 0, 1, or 2") - - if self.editable and not self.build_directory: - raise DistutilsArgError( - "Must specify a build directory (-b) when using --editable" - ) - if not self.args: - raise DistutilsArgError( - "No urls, filenames, or requirements specified (see --help)") - - self.outputs = [] - - def _expand_attrs(self, attrs): - for attr in attrs: - val = getattr(self, attr) - if val is not None: - if os.name == 'posix' or os.name == 'nt': - val = os.path.expanduser(val) - val = subst_vars(val, self.config_vars) - setattr(self, attr, val) - - def expand_basedirs(self): - """Calls `os.path.expanduser` on install_base, install_platbase and - root.""" - self._expand_attrs(['install_base', 'install_platbase', 'root']) - - def expand_dirs(self): - """Calls `os.path.expanduser` on install dirs.""" - self._expand_attrs(['install_purelib', 'install_platlib', - 'install_lib', 'install_headers', - 'install_scripts', 'install_data',]) - - def run(self): - if self.verbose != self.distribution.verbose: - log.set_verbosity(self.verbose) - try: - for spec in self.args: - self.easy_install(spec, not self.no_deps) - if self.record: - outputs = self.outputs - if self.root: # strip any package prefix - root_len = len(self.root) - for counter in range(len(outputs)): - outputs[counter] = outputs[counter][root_len:] - from distutils import file_util - self.execute( - file_util.write_file, (self.record, outputs), - "writing list of installed files to '%s'" % - self.record - ) - self.warn_deprecated_options() - finally: - log.set_verbosity(self.distribution.verbose) - - def pseudo_tempname(self): - """Return a pseudo-tempname base in the install directory. - This code is intentionally naive; if a malicious party can write to - the target directory you're already in deep doodoo. - """ - try: - pid = os.getpid() - except: - pid = random.randint(0, maxsize) - return os.path.join(self.install_dir, "test-easy-install-%s" % pid) - - def warn_deprecated_options(self): - pass - - def check_site_dir(self): - """Verify that self.install_dir is .pth-capable dir, if needed""" - - instdir = normalize_path(self.install_dir) - pth_file = os.path.join(instdir,'easy-install.pth') - - # Is it a configured, PYTHONPATH, implicit, or explicit site dir? - is_site_dir = instdir in self.all_site_dirs - - if not is_site_dir and not self.multi_version: - # No? Then directly test whether it does .pth file processing - is_site_dir = self.check_pth_processing() - else: - # make sure we can write to target dir - testfile = self.pseudo_tempname()+'.write-test' - test_exists = os.path.exists(testfile) - try: - if test_exists: os.unlink(testfile) - open(testfile,'w').close() - os.unlink(testfile) - except (OSError,IOError): - self.cant_write_to_target() - - if not is_site_dir and not self.multi_version: - # Can't install non-multi to non-site dir - raise DistutilsError(self.no_default_version_msg()) - - if is_site_dir: - if self.pth_file is None: - self.pth_file = PthDistributions(pth_file, self.all_site_dirs) - else: - self.pth_file = None - - PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep) - if instdir not in map(normalize_path, [_f for _f in PYTHONPATH if _f]): - # only PYTHONPATH dirs need a site.py, so pretend it's there - self.sitepy_installed = True - elif self.multi_version and not os.path.exists(pth_file): - self.sitepy_installed = True # don't need site.py in this case - self.pth_file = None # and don't create a .pth file - self.install_dir = instdir - - def cant_write_to_target(self): - template = """can't create or remove files in install directory - -The following error occurred while trying to add or remove files in the -installation directory: - - %s - -The installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s -""" - msg = template % (sys.exc_info()[1], self.install_dir,) - - if not os.path.exists(self.install_dir): - msg += """ -This directory does not currently exist. Please create it and try again, or -choose a different installation directory (using the -d or --install-dir -option). -""" - else: - msg += """ -Perhaps your account does not have write access to this directory? If the -installation directory is a system-owned directory, you may need to sign in -as the administrator or "root" account. If you do not have administrative -access to this machine, you may wish to choose a different installation -directory, preferably one that is listed in your PYTHONPATH environment -variable. - -For information on other options, you may wish to consult the -documentation at: - - https://pythonhosted.org/setuptools/easy_install.html - -Please make the appropriate changes for your system and try again. -""" - raise DistutilsError(msg) - - def check_pth_processing(self): - """Empirically verify whether .pth files are supported in inst. dir""" - instdir = self.install_dir - log.info("Checking .pth file support in %s", instdir) - pth_file = self.pseudo_tempname()+".pth" - ok_file = pth_file+'.ok' - ok_exists = os.path.exists(ok_file) - try: - if ok_exists: os.unlink(ok_file) - dirname = os.path.dirname(ok_file) - if not os.path.exists(dirname): - os.makedirs(dirname) - f = open(pth_file,'w') - except (OSError,IOError): - self.cant_write_to_target() - else: - try: - f.write("import os; f = open(%r, 'w'); f.write('OK'); f.close()\n" % (ok_file,)) - f.close() - f=None - executable = sys.executable - if os.name=='nt': - dirname,basename = os.path.split(executable) - alt = os.path.join(dirname,'pythonw.exe') - if basename.lower()=='python.exe' and os.path.exists(alt): - # use pythonw.exe to avoid opening a console window - executable = alt - - from distutils.spawn import spawn - spawn([executable,'-E','-c','pass'],0) - - if os.path.exists(ok_file): - log.info( - "TEST PASSED: %s appears to support .pth files", - instdir - ) - return True - finally: - if f: - f.close() - if os.path.exists(ok_file): - os.unlink(ok_file) - if os.path.exists(pth_file): - os.unlink(pth_file) - if not self.multi_version: - log.warn("TEST FAILED: %s does NOT support .pth files", instdir) - return False - - def install_egg_scripts(self, dist): - """Write all the scripts for `dist`, unless scripts are excluded""" - if not self.exclude_scripts and dist.metadata_isdir('scripts'): - for script_name in dist.metadata_listdir('scripts'): - if dist.metadata_isdir('scripts/' + script_name): - # The "script" is a directory, likely a Python 3 - # __pycache__ directory, so skip it. - continue - self.install_script( - dist, script_name, - dist.get_metadata('scripts/'+script_name) - ) - self.install_wrapper_scripts(dist) - - def add_output(self, path): - if os.path.isdir(path): - for base, dirs, files in os.walk(path): - for filename in files: - self.outputs.append(os.path.join(base,filename)) - else: - self.outputs.append(path) - - def not_editable(self, spec): - if self.editable: - raise DistutilsArgError( - "Invalid argument %r: you can't use filenames or URLs " - "with --editable (except via the --find-links option)." - % (spec,) - ) - - def check_editable(self,spec): - if not self.editable: - return - - if os.path.exists(os.path.join(self.build_directory, spec.key)): - raise DistutilsArgError( - "%r already exists in %s; can't do a checkout there" % - (spec.key, self.build_directory) - ) - - def easy_install(self, spec, deps=False): - tmpdir = tempfile.mkdtemp(prefix="easy_install-") - download = None - if not self.editable: self.install_site_py() - - try: - if not isinstance(spec,Requirement): - if URL_SCHEME(spec): - # It's a url, download it to tmpdir and process - self.not_editable(spec) - download = self.package_index.download(spec, tmpdir) - return self.install_item(None, download, tmpdir, deps, True) - - elif os.path.exists(spec): - # Existing file or directory, just process it directly - self.not_editable(spec) - return self.install_item(None, spec, tmpdir, deps, True) - else: - spec = parse_requirement_arg(spec) - - self.check_editable(spec) - dist = self.package_index.fetch_distribution( - spec, tmpdir, self.upgrade, self.editable, not self.always_copy, - self.local_index - ) - if dist is None: - msg = "Could not find suitable distribution for %r" % spec - if self.always_copy: - msg+=" (--always-copy skips system and development eggs)" - raise DistutilsError(msg) - elif dist.precedence==DEVELOP_DIST: - # .egg-info dists don't need installing, just process deps - self.process_distribution(spec, dist, deps, "Using") - return dist - else: - return self.install_item(spec, dist.location, tmpdir, deps) - - finally: - if os.path.exists(tmpdir): - rmtree(tmpdir) - - def install_item(self, spec, download, tmpdir, deps, install_needed=False): - - # Installation is also needed if file in tmpdir or is not an egg - install_needed = install_needed or self.always_copy - install_needed = install_needed or os.path.dirname(download) == tmpdir - install_needed = install_needed or not download.endswith('.egg') - install_needed = install_needed or ( - self.always_copy_from is not None and - os.path.dirname(normalize_path(download)) == - normalize_path(self.always_copy_from) - ) - - if spec and not install_needed: - # at this point, we know it's a local .egg, we just don't know if - # it's already installed. - for dist in self.local_index[spec.project_name]: - if dist.location==download: - break - else: - install_needed = True # it's not in the local index - - log.info("Processing %s", os.path.basename(download)) - - if install_needed: - dists = self.install_eggs(spec, download, tmpdir) - for dist in dists: - self.process_distribution(spec, dist, deps) - else: - dists = [self.egg_distribution(download)] - self.process_distribution(spec, dists[0], deps, "Using") - - if spec is not None: - for dist in dists: - if dist in spec: - return dist - - def select_scheme(self, name): - """Sets the install directories by applying the install schemes.""" - # it's the caller's problem if they supply a bad name! - scheme = INSTALL_SCHEMES[name] - for key in SCHEME_KEYS: - attrname = 'install_' + key - if getattr(self, attrname) is None: - setattr(self, attrname, scheme[key]) - - def process_distribution(self, requirement, dist, deps=True, *info): - self.update_pth(dist) - self.package_index.add(dist) - self.local_index.add(dist) - self.install_egg_scripts(dist) - self.installed_projects[dist.key] = dist - log.info(self.installation_report(requirement, dist, *info)) - if (dist.has_metadata('dependency_links.txt') and - not self.no_find_links): - self.package_index.add_find_links( - dist.get_metadata_lines('dependency_links.txt') - ) - if not deps and not self.always_copy: - return - elif requirement is not None and dist.key != requirement.key: - log.warn("Skipping dependencies for %s", dist) - return # XXX this is not the distribution we were looking for - elif requirement is None or dist not in requirement: - # if we wound up with a different version, resolve what we've got - distreq = dist.as_requirement() - requirement = requirement or distreq - requirement = Requirement( - distreq.project_name, distreq.specs, requirement.extras - ) - log.info("Processing dependencies for %s", requirement) - try: - distros = WorkingSet([]).resolve( - [requirement], self.local_index, self.easy_install - ) - except DistributionNotFound: - e = sys.exc_info()[1] - raise DistutilsError( - "Could not find required distribution %s" % e.args - ) - except VersionConflict: - e = sys.exc_info()[1] - raise DistutilsError( - "Installed distribution %s conflicts with requirement %s" - % e.args - ) - if self.always_copy or self.always_copy_from: - # Force all the relevant distros to be copied or activated - for dist in distros: - if dist.key not in self.installed_projects: - self.easy_install(dist.as_requirement()) - log.info("Finished processing dependencies for %s", requirement) - - def should_unzip(self, dist): - if self.zip_ok is not None: - return not self.zip_ok - if dist.has_metadata('not-zip-safe'): - return True - if not dist.has_metadata('zip-safe'): - return True - return False - - def maybe_move(self, spec, dist_filename, setup_base): - dst = os.path.join(self.build_directory, spec.key) - if os.path.exists(dst): - msg = "%r already exists in %s; build directory %s will not be kept" - log.warn(msg, spec.key, self.build_directory, setup_base) - return setup_base - if os.path.isdir(dist_filename): - setup_base = dist_filename - else: - if os.path.dirname(dist_filename)==setup_base: - os.unlink(dist_filename) # get it out of the tmp dir - contents = os.listdir(setup_base) - if len(contents)==1: - dist_filename = os.path.join(setup_base,contents[0]) - if os.path.isdir(dist_filename): - # if the only thing there is a directory, move it instead - setup_base = dist_filename - ensure_directory(dst) - shutil.move(setup_base, dst) - return dst - - def install_wrapper_scripts(self, dist): - if not self.exclude_scripts: - for args in get_script_args(dist): - self.write_script(*args) - - def install_script(self, dist, script_name, script_text, dev_path=None): - """Generate a legacy script wrapper and install it""" - spec = str(dist.as_requirement()) - is_script = is_python_script(script_text, script_name) - - def get_template(filename): - """ - There are a couple of template scripts in the package. This - function loads one of them and prepares it for use. - - These templates use triple-quotes to escape variable - substitutions so the scripts get the 2to3 treatment when build - on Python 3. The templates cannot use triple-quotes naturally. - """ - raw_bytes = resource_string('setuptools', template_name) - template_str = raw_bytes.decode('utf-8') - clean_template = template_str.replace('"""', '') - return clean_template - - if is_script: - template_name = 'script template.py' - if dev_path: - template_name = template_name.replace('.py', ' (dev).py') - script_text = (get_script_header(script_text) + - get_template(template_name) % locals()) - self.write_script(script_name, _to_ascii(script_text), 'b') - - def write_script(self, script_name, contents, mode="t", blockers=()): - """Write an executable file to the scripts directory""" - self.delete_blockers( # clean up old .py/.pyw w/o a script - [os.path.join(self.script_dir,x) for x in blockers]) - log.info("Installing %s script to %s", script_name, self.script_dir) - target = os.path.join(self.script_dir, script_name) - self.add_output(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - if os.path.exists(target): - os.unlink(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target, 0x1FF-mask) # 0777 - - def install_eggs(self, spec, dist_filename, tmpdir): - # .egg dirs or files are already built, so just return them - if dist_filename.lower().endswith('.egg'): - return [self.install_egg(dist_filename, tmpdir)] - elif dist_filename.lower().endswith('.exe'): - return [self.install_exe(dist_filename, tmpdir)] - - # Anything else, try to extract and build - setup_base = tmpdir - if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): - unpack_archive(dist_filename, tmpdir, self.unpack_progress) - elif os.path.isdir(dist_filename): - setup_base = os.path.abspath(dist_filename) - - if (setup_base.startswith(tmpdir) # something we downloaded - and self.build_directory and spec is not None): - setup_base = self.maybe_move(spec, dist_filename, setup_base) - - # Find the setup.py file - setup_script = os.path.join(setup_base, 'setup.py') - - if not os.path.exists(setup_script): - setups = glob(os.path.join(setup_base, '*', 'setup.py')) - if not setups: - raise DistutilsError( - "Couldn't find a setup script in %s" % os.path.abspath(dist_filename) - ) - if len(setups)>1: - raise DistutilsError( - "Multiple setup scripts in %s" % os.path.abspath(dist_filename) - ) - setup_script = setups[0] - - # Now run it, and return the result - if self.editable: - log.info(self.report_editable(spec, setup_script)) - return [] - else: - return self.build_and_install(setup_script, setup_base) - - def egg_distribution(self, egg_path): - if os.path.isdir(egg_path): - metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO')) - else: - metadata = EggMetadata(zipimport.zipimporter(egg_path)) - return Distribution.from_filename(egg_path,metadata=metadata) - - def install_egg(self, egg_path, tmpdir): - destination = os.path.join(self.install_dir,os.path.basename(egg_path)) - destination = os.path.abspath(destination) - if not self.dry_run: - ensure_directory(destination) - - dist = self.egg_distribution(egg_path) - if not samefile(egg_path, destination): - if os.path.isdir(destination) and not os.path.islink(destination): - dir_util.remove_tree(destination, dry_run=self.dry_run) - elif os.path.exists(destination): - self.execute(os.unlink,(destination,),"Removing "+destination) - uncache_zipdir(destination) - if os.path.isdir(egg_path): - if egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copytree, "Copying" - elif self.should_unzip(dist): - self.mkpath(destination) - f,m = self.unpack_and_compile, "Extracting" - elif egg_path.startswith(tmpdir): - f,m = shutil.move, "Moving" - else: - f,m = shutil.copy2, "Copying" - - self.execute(f, (egg_path, destination), - (m+" %s to %s") % - (os.path.basename(egg_path),os.path.dirname(destination))) - - self.add_output(destination) - return self.egg_distribution(destination) - - def install_exe(self, dist_filename, tmpdir): - # See if it's valid, get data - cfg = extract_wininst_cfg(dist_filename) - if cfg is None: - raise DistutilsError( - "%s is not a valid distutils Windows .exe" % dist_filename - ) - # Create a dummy distribution object until we build the real distro - dist = Distribution( - None, - project_name=cfg.get('metadata','name'), - version=cfg.get('metadata','version'), platform=get_platform(), - ) - - # Convert the .exe to an unpacked egg - egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg') - egg_tmp = egg_path + '.tmp' - _egg_info = os.path.join(egg_tmp, 'EGG-INFO') - pkg_inf = os.path.join(_egg_info, 'PKG-INFO') - ensure_directory(pkg_inf) # make sure EGG-INFO dir exists - dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX - self.exe_to_egg(dist_filename, egg_tmp) - - # Write EGG-INFO/PKG-INFO - if not os.path.exists(pkg_inf): - f = open(pkg_inf,'w') - f.write('Metadata-Version: 1.0\n') - for k,v in cfg.items('metadata'): - if k != 'target_version': - f.write('%s: %s\n' % (k.replace('_','-').title(), v)) - f.close() - script_dir = os.path.join(_egg_info,'scripts') - self.delete_blockers( # delete entry-point scripts to avoid duping - [os.path.join(script_dir,args[0]) for args in get_script_args(dist)] - ) - # Build .egg file from tmpdir - bdist_egg.make_zipfile( - egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run - ) - # install the .egg - return self.install_egg(egg_path, tmpdir) - - def exe_to_egg(self, dist_filename, egg_tmp): - """Extract a bdist_wininst to the directories an egg would use""" - # Check for .pth file and set up prefix translations - prefixes = get_exe_prefixes(dist_filename) - to_compile = [] - native_libs = [] - top_level = {} - def process(src,dst): - s = src.lower() - for old,new in prefixes: - if s.startswith(old): - src = new+src[len(old):] - parts = src.split('/') - dst = os.path.join(egg_tmp, *parts) - dl = dst.lower() - if dl.endswith('.pyd') or dl.endswith('.dll'): - parts[-1] = bdist_egg.strip_module(parts[-1]) - top_level[os.path.splitext(parts[0])[0]] = 1 - native_libs.append(src) - elif dl.endswith('.py') and old!='SCRIPTS/': - top_level[os.path.splitext(parts[0])[0]] = 1 - to_compile.append(dst) - return dst - if not src.endswith('.pth'): - log.warn("WARNING: can't process %s", src) - return None - # extract, tracking .pyd/.dll->native_libs and .py -> to_compile - unpack_archive(dist_filename, egg_tmp, process) - stubs = [] - for res in native_libs: - if res.lower().endswith('.pyd'): # create stubs for .pyd's - parts = res.split('/') - resource = parts[-1] - parts[-1] = bdist_egg.strip_module(parts[-1])+'.py' - pyfile = os.path.join(egg_tmp, *parts) - to_compile.append(pyfile) - stubs.append(pyfile) - bdist_egg.write_stub(resource, pyfile) - self.byte_compile(to_compile) # compile .py's - bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'), - bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag - - for name in 'top_level','native_libs': - if locals()[name]: - txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt') - if not os.path.exists(txt): - f = open(txt,'w') - f.write('\n'.join(locals()[name])+'\n') - f.close() - - def installation_report(self, req, dist, what="Installed"): - """Helpful installation message for display to package users""" - msg = "\n%(what)s %(eggloc)s%(extras)s" - if self.multi_version and not self.no_report: - msg += """ - -Because this distribution was installed --multi-version, before you can -import modules from this package in an application, you will need to -'import pkg_resources' and then use a 'require()' call similar to one of -these examples, in order to select the desired version: - - pkg_resources.require("%(name)s") # latest installed version - pkg_resources.require("%(name)s==%(version)s") # this exact version - pkg_resources.require("%(name)s>=%(version)s") # this version or higher -""" - if self.install_dir not in map(normalize_path,sys.path): - msg += """ - -Note also that the installation directory must be on sys.path at runtime for -this to work. (e.g. by being the application's script directory, by being on -PYTHONPATH, or by being added to sys.path by your code.) -""" - eggloc = dist.location - name = dist.project_name - version = dist.version - extras = '' # TODO: self.report_extras(req, dist) - return msg % locals() - - def report_editable(self, spec, setup_script): - dirname = os.path.dirname(setup_script) - python = sys.executable - return """\nExtracted editable version of %(spec)s to %(dirname)s - -If it uses setuptools in its setup script, you can activate it in -"development" mode by going to that directory and running:: - - %(python)s setup.py develop - -See the setuptools documentation for the "develop" command for more info. -""" % locals() - - def run_setup(self, setup_script, setup_base, args): - sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) - sys.modules.setdefault('distutils.command.egg_info', egg_info) - - args = list(args) - if self.verbose>2: - v = 'v' * (self.verbose - 1) - args.insert(0,'-'+v) - elif self.verbose<2: - args.insert(0,'-q') - if self.dry_run: - args.insert(0,'-n') - log.info( - "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args) - ) - try: - run_setup(setup_script, args) - except SystemExit: - v = sys.exc_info()[1] - raise DistutilsError("Setup script exited with %s" % (v.args[0],)) - - def build_and_install(self, setup_script, setup_base): - args = ['bdist_egg', '--dist-dir'] - - dist_dir = tempfile.mkdtemp( - prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) - ) - try: - self._set_fetcher_options(os.path.dirname(setup_script)) - args.append(dist_dir) - - self.run_setup(setup_script, setup_base, args) - all_eggs = Environment([dist_dir]) - eggs = [] - for key in all_eggs: - for dist in all_eggs[key]: - eggs.append(self.install_egg(dist.location, setup_base)) - if not eggs and not self.dry_run: - log.warn("No eggs found in %s (setup script problem?)", - dist_dir) - return eggs - finally: - rmtree(dist_dir) - log.set_verbosity(self.verbose) # restore our log verbosity - - def _set_fetcher_options(self, base): - """ - When easy_install is about to run bdist_egg on a source dist, that - source dist might have 'setup_requires' directives, requiring - additional fetching. Ensure the fetcher options given to easy_install - are available to that command as well. - """ - # find the fetch options from easy_install and write them out - # to the setup.cfg file. - ei_opts = self.distribution.get_option_dict('easy_install').copy() - fetch_directives = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts', - ) - fetch_options = {} - for key, val in ei_opts.items(): - if key not in fetch_directives: continue - fetch_options[key.replace('_', '-')] = val[1] - # create a settings dictionary suitable for `edit_config` - settings = dict(easy_install=fetch_options) - cfg_filename = os.path.join(base, 'setup.cfg') - setopt.edit_config(cfg_filename, settings) - - def update_pth(self, dist): - if self.pth_file is None: - return - - for d in self.pth_file[dist.key]: # drop old entries - if self.multi_version or d.location != dist.location: - log.info("Removing %s from easy-install.pth file", d) - self.pth_file.remove(d) - if d.location in self.shadow_path: - self.shadow_path.remove(d.location) - - if not self.multi_version: - if dist.location in self.pth_file.paths: - log.info( - "%s is already the active version in easy-install.pth", - dist - ) - else: - log.info("Adding %s to easy-install.pth file", dist) - self.pth_file.add(dist) # add new entry - if dist.location not in self.shadow_path: - self.shadow_path.append(dist.location) - - if not self.dry_run: - - self.pth_file.save() - - if dist.key=='setuptools': - # Ensure that setuptools itself never becomes unavailable! - # XXX should this check for latest version? - filename = os.path.join(self.install_dir,'setuptools.pth') - if os.path.islink(filename): os.unlink(filename) - f = open(filename, 'wt') - f.write(self.pth_file.make_relative(dist.location)+'\n') - f.close() - - def unpack_progress(self, src, dst): - # Progress filter for unpacking - log.debug("Unpacking %s to %s", src, dst) - return dst # only unpack-and-compile skips files for dry run - - def unpack_and_compile(self, egg_path, destination): - to_compile = [] - to_chmod = [] - - def pf(src, dst): - if dst.endswith('.py') and not src.startswith('EGG-INFO/'): - to_compile.append(dst) - elif dst.endswith('.dll') or dst.endswith('.so'): - to_chmod.append(dst) - self.unpack_progress(src,dst) - return not self.dry_run and dst or None - - unpack_archive(egg_path, destination, pf) - self.byte_compile(to_compile) - if not self.dry_run: - for f in to_chmod: - mode = ((os.stat(f)[stat.ST_MODE]) | 0x16D) & 0xFED # 0555, 07755 - chmod(f, mode) - - def byte_compile(self, to_compile): - if _dont_write_bytecode: - self.warn('byte-compiling is disabled, skipping.') - return - - from distutils.util import byte_compile - try: - # try to make the byte compile messages quieter - log.set_verbosity(self.verbose - 1) - - byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) - if self.optimize: - byte_compile( - to_compile, optimize=self.optimize, force=1, - dry_run=self.dry_run - ) - finally: - log.set_verbosity(self.verbose) # restore original verbosity - - def no_default_version_msg(self): - template = """bad install directory or PYTHONPATH - -You are attempting to install a package to a directory that is not -on PYTHONPATH and which Python does not read ".pth" files from. The -installation directory you specified (via --install-dir, --prefix, or -the distutils default setting) was: - - %s - -and your PYTHONPATH environment variable currently contains: - - %r - -Here are some of your options for correcting the problem: - -* You can choose a different installation directory, i.e., one that is - on PYTHONPATH or supports .pth files - -* You can add the installation directory to the PYTHONPATH environment - variable. (It must then also be on PYTHONPATH whenever you run - Python and want to use the package(s) you are installing.) - -* You can set up the installation directory to support ".pth" files by - using one of the approaches described here: - - https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations - -Please make the appropriate changes for your system and try again.""" - return template % (self.install_dir, os.environ.get('PYTHONPATH','')) - - def install_site_py(self): - """Make sure there's a site.py in the target dir, if needed""" - - if self.sitepy_installed: - return # already did it, or don't need to - - sitepy = os.path.join(self.install_dir, "site.py") - source = resource_string("setuptools", "site-patch.py") - current = "" - - if os.path.exists(sitepy): - log.debug("Checking existing site.py in %s", self.install_dir) - f = open(sitepy,'rb') - current = f.read() - # we want str, not bytes - if sys.version_info >= (3,): - current = current.decode() - - f.close() - if not current.startswith('def __boot():'): - raise DistutilsError( - "%s is not a setuptools-generated site.py; please" - " remove it." % sitepy - ) - - if current != source: - log.info("Creating %s", sitepy) - if not self.dry_run: - ensure_directory(sitepy) - f = open(sitepy,'wb') - f.write(source) - f.close() - self.byte_compile([sitepy]) - - self.sitepy_installed = True - - def create_home_path(self): - """Create directories under ~.""" - if not self.user: - return - home = convert_path(os.path.expanduser("~")) - for name, path in iteritems(self.config_vars): - if path.startswith(home) and not os.path.isdir(path): - self.debug_print("os.makedirs('%s', 0700)" % path) - os.makedirs(path, 0x1C0) # 0700 - - INSTALL_SCHEMES = dict( - posix = dict( - install_dir = '$base/lib/python$py_version_short/site-packages', - script_dir = '$base/bin', - ), - ) - - DEFAULT_SCHEME = dict( - install_dir = '$base/Lib/site-packages', - script_dir = '$base/Scripts', - ) - - def _expand(self, *attrs): - config_vars = self.get_finalized_command('install').config_vars - - if self.prefix: - # Set default install_dir/scripts from --prefix - config_vars = config_vars.copy() - config_vars['base'] = self.prefix - scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME) - for attr,val in scheme.items(): - if getattr(self,attr,None) is None: - setattr(self,attr,val) - - from distutils.util import subst_vars - for attr in attrs: - val = getattr(self, attr) - if val is not None: - val = subst_vars(val, config_vars) - if os.name == 'posix': - val = os.path.expanduser(val) - setattr(self, attr, val) - -def get_site_dirs(): - # return a list of 'site' dirs - sitedirs = [_f for _f in os.environ.get('PYTHONPATH', - '').split(os.pathsep) if _f] - prefixes = [sys.prefix] - if sys.exec_prefix != sys.prefix: - prefixes.append(sys.exec_prefix) - for prefix in prefixes: - if prefix: - if sys.platform in ('os2emx', 'riscos'): - sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) - elif os.sep == '/': - sitedirs.extend([os.path.join(prefix, - "lib", - "python" + sys.version[:3], - "site-packages"), - os.path.join(prefix, "lib", "site-python")]) - else: - sitedirs.extend( - [prefix, os.path.join(prefix, "lib", "site-packages")] - ) - if sys.platform == 'darwin': - # for framework builds *only* we add the standard Apple - # locations. Currently only per-user, but /Library and - # /Network/Library could be added too - if 'Python.framework' in prefix: - home = os.environ.get('HOME') - if home: - sitedirs.append( - os.path.join(home, - 'Library', - 'Python', - sys.version[:3], - 'site-packages')) - lib_paths = get_path('purelib'), get_path('platlib') - for site_lib in lib_paths: - if site_lib not in sitedirs: sitedirs.append(site_lib) - - if site.ENABLE_USER_SITE: - sitedirs.append(site.USER_SITE) - - sitedirs = list(map(normalize_path, sitedirs)) - - return sitedirs - - -def expand_paths(inputs): - """Yield sys.path directories that might contain "old-style" packages""" - - seen = {} - - for dirname in inputs: - dirname = normalize_path(dirname) - if dirname in seen: - continue - - seen[dirname] = 1 - if not os.path.isdir(dirname): - continue - - files = os.listdir(dirname) - yield dirname, files - - for name in files: - if not name.endswith('.pth'): - # We only care about the .pth files - continue - if name in ('easy-install.pth','setuptools.pth'): - # Ignore .pth files that we control - continue - - # Read the .pth file - f = open(os.path.join(dirname,name)) - lines = list(yield_lines(f)) - f.close() - - # Yield existing non-dupe, non-import directory lines from it - for line in lines: - if not line.startswith("import"): - line = normalize_path(line.rstrip()) - if line not in seen: - seen[line] = 1 - if not os.path.isdir(line): - continue - yield line, os.listdir(line) - - -def extract_wininst_cfg(dist_filename): - """Extract configuration data from a bdist_wininst .exe - - Returns a ConfigParser.RawConfigParser, or None - """ - f = open(dist_filename,'rb') - try: - endrec = zipfile._EndRecData(f) - if endrec is None: - return None - - prepended = (endrec[9] - endrec[5]) - endrec[6] - if prepended < 12: # no wininst data here - return None - f.seek(prepended-12) - - from setuptools.compat import StringIO, ConfigParser - import struct - tag, cfglen, bmlen = struct.unpack("= (2,6): - null_byte = bytes([0]) - else: - null_byte = chr(0) - config = part.split(null_byte, 1)[0] - # Now the config is in bytes, but for RawConfigParser, it should - # be text, so decode it. - config = config.decode(sys.getfilesystemencoding()) - cfg.readfp(StringIO(config)) - except ConfigParser.Error: - return None - if not cfg.has_section('metadata') or not cfg.has_section('Setup'): - return None - return cfg - - finally: - f.close() - - -def get_exe_prefixes(exe_filename): - """Get exe->egg path translations for a given .exe file""" - - prefixes = [ - ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), - ('PLATLIB/', ''), - ('SCRIPTS/', 'EGG-INFO/scripts/'), - ('DATA/lib/site-packages', ''), - ] - z = zipfile.ZipFile(exe_filename) - try: - for info in z.infolist(): - name = info.filename - parts = name.split('/') - if len(parts)==3 and parts[2]=='PKG-INFO': - if parts[1].endswith('.egg-info'): - prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/')) - break - if len(parts) != 2 or not name.endswith('.pth'): - continue - if name.endswith('-nspkg.pth'): - continue - if parts[0].upper() in ('PURELIB','PLATLIB'): - contents = z.read(name) - if sys.version_info >= (3,): - contents = contents.decode() - for pth in yield_lines(contents): - pth = pth.strip().replace('\\','/') - if not pth.startswith('import'): - prefixes.append((('%s/%s/' % (parts[0],pth)), '')) - finally: - z.close() - prefixes = [(x.lower(),y) for x, y in prefixes] - prefixes.sort() - prefixes.reverse() - return prefixes - - -def parse_requirement_arg(spec): - try: - return Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % (spec,) - ) - -class PthDistributions(Environment): - """A .pth file with Distribution paths in it""" - - dirty = False - - def __init__(self, filename, sitedirs=()): - self.filename = filename - self.sitedirs = list(map(normalize_path, sitedirs)) - self.basedir = normalize_path(os.path.dirname(self.filename)) - self._load() - Environment.__init__(self, [], None, None) - for path in yield_lines(self.paths): - list(map(self.add, find_distributions(path, True))) - - def _load(self): - self.paths = [] - saw_import = False - seen = dict.fromkeys(self.sitedirs) - if os.path.isfile(self.filename): - f = open(self.filename,'rt') - for line in f: - if line.startswith('import'): - saw_import = True - continue - path = line.rstrip() - self.paths.append(path) - if not path.strip() or path.strip().startswith('#'): - continue - # skip non-existent paths, in case somebody deleted a package - # manually, and duplicate paths as well - path = self.paths[-1] = normalize_path( - os.path.join(self.basedir,path) - ) - if not os.path.exists(path) or path in seen: - self.paths.pop() # skip it - self.dirty = True # we cleaned up, so we're dirty now :) - continue - seen[path] = 1 - f.close() - - if self.paths and not saw_import: - self.dirty = True # ensure anything we touch has import wrappers - while self.paths and not self.paths[-1].strip(): - self.paths.pop() - - def save(self): - """Write changed .pth file back to disk""" - if not self.dirty: - return - - data = '\n'.join(map(self.make_relative,self.paths)) - if data: - log.debug("Saving %s", self.filename) - data = ( - "import sys; sys.__plen = len(sys.path)\n" - "%s\n" - "import sys; new=sys.path[sys.__plen:];" - " del sys.path[sys.__plen:];" - " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;" - " sys.__egginsert = p+len(new)\n" - ) % data - - if os.path.islink(self.filename): - os.unlink(self.filename) - f = open(self.filename,'wt') - f.write(data) - f.close() - - elif os.path.exists(self.filename): - log.debug("Deleting empty %s", self.filename) - os.unlink(self.filename) - - self.dirty = False - - def add(self, dist): - """Add `dist` to the distribution map""" - if (dist.location not in self.paths and ( - dist.location not in self.sitedirs or - dist.location == os.getcwd() # account for '.' being in PYTHONPATH - )): - self.paths.append(dist.location) - self.dirty = True - Environment.add(self, dist) - - def remove(self, dist): - """Remove `dist` from the distribution map""" - while dist.location in self.paths: - self.paths.remove(dist.location) - self.dirty = True - Environment.remove(self, dist) - - def make_relative(self,path): - npath, last = os.path.split(normalize_path(path)) - baselen = len(self.basedir) - parts = [last] - sep = os.altsep=='/' and '/' or os.sep - while len(npath)>=baselen: - if npath==self.basedir: - parts.append(os.curdir) - parts.reverse() - return sep.join(parts) - npath, last = os.path.split(npath) - parts.append(last) - else: - return path - -def get_script_header(script_text, executable=sys_executable, wininst=False): - """Create a #! line, getting options (if any) from script_text""" - from distutils.command.build_scripts import first_line_re - - # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. - if not isinstance(first_line_re.pattern, str): - first_line_re = re.compile(first_line_re.pattern.decode()) - - first = (script_text+'\n').splitlines()[0] - match = first_line_re.match(first) - options = '' - if match: - options = match.group(1) or '' - if options: options = ' '+options - if wininst: - executable = "python.exe" - else: - executable = nt_quote_arg(executable) - hdr = "#!%(executable)s%(options)s\n" % locals() - if not isascii(hdr): - # Non-ascii path to sys.executable, use -x to prevent warnings - if options: - if options.strip().startswith('-'): - options = ' -x'+options.strip()[1:] - # else: punt, we can't do it, let the warning happen anyway - else: - options = ' -x' - executable = fix_jython_executable(executable, options) - hdr = "#!%(executable)s%(options)s\n" % locals() - return hdr - -def auto_chmod(func, arg, exc): - if func is os.remove and os.name=='nt': - chmod(arg, stat.S_IWRITE) - return func(arg) - et, ev, _ = sys.exc_info() - reraise(et, (ev[0], ev[1] + (" %s %s" % (func,arg)))) - -def uncache_zipdir(path): - """Ensure that the importer caches dont have stale info for `path`""" - from zipimport import _zip_directory_cache as zdc - _uncache(path, zdc) - _uncache(path, sys.path_importer_cache) - -def _uncache(path, cache): - if path in cache: - del cache[path] - else: - path = normalize_path(path) - for p in cache: - if normalize_path(p)==path: - del cache[p] - return - -def is_python(text, filename=''): - "Is this string a valid Python script?" - try: - compile(text, filename, 'exec') - except (SyntaxError, TypeError): - return False - else: - return True - -def is_sh(executable): - """Determine if the specified executable is a .sh (contains a #! line)""" - try: - fp = open(executable) - magic = fp.read(2) - fp.close() - except (OSError,IOError): return executable - return magic == '#!' - -def nt_quote_arg(arg): - """Quote a command line argument according to Windows parsing rules""" - - result = [] - needquote = False - nb = 0 - - needquote = (" " in arg) or ("\t" in arg) - if needquote: - result.append('"') - - for c in arg: - if c == '\\': - nb += 1 - elif c == '"': - # double preceding backslashes, then add a \" - result.append('\\' * (nb*2) + '\\"') - nb = 0 - else: - if nb: - result.append('\\' * nb) - nb = 0 - result.append(c) - - if nb: - result.append('\\' * nb) - - if needquote: - result.append('\\' * nb) # double the trailing backslashes - result.append('"') - - return ''.join(result) - -def is_python_script(script_text, filename): - """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. - """ - if filename.endswith('.py') or filename.endswith('.pyw'): - return True # extension says it's Python - if is_python(script_text, filename): - return True # it's syntactically valid Python - if script_text.startswith('#!'): - # It begins with a '#!' line, so check if 'python' is in it somewhere - return 'python' in script_text.splitlines()[0].lower() - - return False # Not any Python I can recognize - -try: - from os import chmod as _chmod -except ImportError: - # Jython compatibility - def _chmod(*args): pass - -def chmod(path, mode): - log.debug("changing mode of %s to %o", path, mode) - try: - _chmod(path, mode) - except os.error: - e = sys.exc_info()[1] - log.debug("chmod failed: %s", e) - -def fix_jython_executable(executable, options): - if sys.platform.startswith('java') and is_sh(executable): - # Workaround for Jython is not needed on Linux systems. - import java - if java.lang.System.getProperty("os.name") == "Linux": - return executable - - # Workaround Jython's sys.executable being a .sh (an invalid - # shebang line interpreter) - if options: - # Can't apply the workaround, leave it broken - log.warn( - "WARNING: Unable to adapt shebang line for Jython," - " the following script is NOT executable\n" - " see http://bugs.jython.org/issue1112 for" - " more information.") - else: - return '/usr/bin/env %s' % executable - return executable - - -class ScriptWriter(object): - """ - Encapsulates behavior around writing entry point scripts for console and - gui apps. - """ - - template = textwrap.dedent(""" - # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r - __requires__ = %(spec)r - import sys - from pkg_resources import load_entry_point - - if __name__ == '__main__': - sys.exit( - load_entry_point(%(spec)r, %(group)r, %(name)r)() - ) - """).lstrip() - - @classmethod - def get_script_args(cls, dist, executable=sys_executable, wininst=False): - """ - Yield write_script() argument tuples for a distribution's entrypoints - """ - gen_class = cls.get_writer(wininst) - spec = str(dist.as_requirement()) - header = get_script_header("", executable, wininst) - for type_ in 'console', 'gui': - group = type_ + '_scripts' - for name, ep in dist.get_entry_map(group).items(): - script_text = gen_class.template % locals() - for res in gen_class._get_script_args(type_, name, header, - script_text): - yield res - - @classmethod - def get_writer(cls, force_windows): - if force_windows or sys.platform=='win32': - return WindowsScriptWriter.get_writer() - return cls - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - # Simply write the stub with no extension. - yield (name, header+script_text) - - -class WindowsScriptWriter(ScriptWriter): - @classmethod - def get_writer(cls): - """ - Get a script writer suitable for Windows - """ - writer_lookup = dict( - executable=WindowsExecutableLauncherWriter, - natural=cls, - ) - # for compatibility, use the executable launcher by default - launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') - return writer_lookup[launcher] - - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - "For Windows, add a .py extension" - ext = dict(console='.pya', gui='.pyw')[type_] - if ext not in os.environ['PATHEXT'].lower().split(';'): - warnings.warn("%s not listed in PATHEXT; scripts will not be " - "recognized as executables." % ext, UserWarning) - old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] - old.remove(ext) - header = cls._adjust_header(type_, header) - blockers = [name+x for x in old] - yield name+ext, header+script_text, 't', blockers - - @staticmethod - def _adjust_header(type_, orig_header): - """ - Make sure 'pythonw' is used for gui and and 'python' is used for - console (regardless of what sys.executable is). - """ - pattern = 'pythonw.exe' - repl = 'python.exe' - if type_ == 'gui': - pattern, repl = repl, pattern - pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) - new_header = pattern_ob.sub(string=orig_header, repl=repl) - clean_header = new_header[2:-1].strip('"') - if sys.platform == 'win32' and not os.path.exists(clean_header): - # the adjusted version doesn't exist, so return the original - return orig_header - return new_header - - -class WindowsExecutableLauncherWriter(WindowsScriptWriter): - @classmethod - def _get_script_args(cls, type_, name, header, script_text): - """ - For Windows, add a .py extension and an .exe launcher - """ - if type_=='gui': - launcher_type = 'gui' - ext = '-script.pyw' - old = ['.pyw'] - else: - launcher_type = 'cli' - ext = '-script.py' - old = ['.py','.pyc','.pyo'] - hdr = cls._adjust_header(type_, header) - blockers = [name+x for x in old] - yield (name+ext, hdr+script_text, 't', blockers) - yield ( - name+'.exe', get_win_launcher(launcher_type), - 'b' # write in binary mode - ) - if not is_64bit(): - # install a manifest for the launcher to prevent Windows - # from detecting it as an installer (which it will for - # launchers like easy_install.exe). Consider only - # adding a manifest for launchers detected as installers. - # See Distribute #143 for details. - m_name = name + '.exe.manifest' - yield (m_name, load_launcher_manifest(name), 't') - -# for backward-compatibility -get_script_args = ScriptWriter.get_script_args - -def get_win_launcher(type): - """ - Load the Windows launcher (executable) suitable for launching a script. - - `type` should be either 'cli' or 'gui' - - Returns the executable as a byte string. - """ - launcher_fn = '%s.exe' % type - if platform.machine().lower()=='arm': - launcher_fn = launcher_fn.replace(".", "-arm.") - if is_64bit(): - launcher_fn = launcher_fn.replace(".", "-64.") - else: - launcher_fn = launcher_fn.replace(".", "-32.") - return resource_string('setuptools', launcher_fn) - -def load_launcher_manifest(name): - manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') - if sys.version_info[0] < 3: - return manifest % vars() - else: - return manifest.decode('utf-8') % vars() - -def rmtree(path, ignore_errors=False, onerror=auto_chmod): - """Recursively delete a directory tree. - - This code is taken from the Python 2.4 version of 'shutil', because - the 2.3 version doesn't really work right. - """ - if ignore_errors: - def onerror(*args): - pass - elif onerror is None: - def onerror(*args): - raise - names = [] - try: - names = os.listdir(path) - except os.error: - onerror(os.listdir, path, sys.exc_info()) - for name in names: - fullname = os.path.join(path, name) - try: - mode = os.lstat(fullname).st_mode - except os.error: - mode = 0 - if stat.S_ISDIR(mode): - rmtree(fullname, ignore_errors, onerror) - else: - try: - os.remove(fullname) - except os.error: - onerror(os.remove, fullname, sys.exc_info()) - try: - os.rmdir(path) - except os.error: - onerror(os.rmdir, path, sys.exc_info()) - -def current_umask(): - tmp = os.umask(0x12) # 022 - os.umask(tmp) - return tmp - -def bootstrap(): - # This function is called when setuptools*.egg is run using /bin/sh - import setuptools - argv0 = os.path.dirname(setuptools.__path__[0]) - sys.argv[0] = argv0 - sys.argv.append(argv0) - main() - -def main(argv=None, **kw): - from setuptools import setup - from setuptools.dist import Distribution - import distutils.core - - USAGE = """\ -usage: %(script)s [options] requirement_or_url ... - or: %(script)s --help -""" - - def gen_usage(script_name): - return USAGE % dict( - script=os.path.basename(script_name), - ) - - def with_ei_usage(f): - old_gen_usage = distutils.core.gen_usage - try: - distutils.core.gen_usage = gen_usage - return f() - finally: - distutils.core.gen_usage = old_gen_usage - - class DistributionWithoutHelpCommands(Distribution): - common_usage = "" - - def _show_help(self,*args,**kw): - with_ei_usage(lambda: Distribution._show_help(self,*args,**kw)) - - if argv is None: - argv = sys.argv[1:] - - with_ei_usage(lambda: - setup( - script_args = ['-q','easy_install', '-v']+argv, - script_name = sys.argv[0] or 'easy_install', - distclass=DistributionWithoutHelpCommands, **kw - ) - ) diff --git a/libs/setuptools-2.2/setuptools/command/egg_info.py b/libs/setuptools-2.2/setuptools/command/egg_info.py deleted file mode 100644 index 5953aad..0000000 --- a/libs/setuptools-2.2/setuptools/command/egg_info.py +++ /dev/null @@ -1,392 +0,0 @@ -"""setuptools.command.egg_info - -Create a distribution's .egg-info directory and contents""" - -import os -import re -import sys - -from setuptools import Command -import distutils.errors -from distutils import log -from setuptools.command.sdist import sdist -from setuptools.compat import basestring -from setuptools import svn_utils -from distutils.util import convert_path -from distutils.filelist import FileList as _FileList -from pkg_resources import (parse_requirements, safe_name, parse_version, - safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) -from setuptools.command.sdist import walk_revctrl - - -class egg_info(Command): - description = "create a distribution's .egg-info directory" - - user_options = [ - ('egg-base=', 'e', "directory containing .egg-info directories" - " (default: top of the source tree)"), - ('tag-svn-revision', 'r', - "Add subversion revision ID to version number"), - ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), - ('tag-build=', 'b', "Specify explicit tag to add to version number"), - ('no-svn-revision', 'R', - "Don't add subversion revision ID [default]"), - ('no-date', 'D', "Don't include date stamp [default]"), - ] - - boolean_options = ['tag-date', 'tag-svn-revision'] - negative_opt = {'no-svn-revision': 'tag-svn-revision', - 'no-date': 'tag-date'} - - def initialize_options(self): - self.egg_name = None - self.egg_version = None - self.egg_base = None - self.egg_info = None - self.tag_build = None - self.tag_svn_revision = 0 - self.tag_date = 0 - self.broken_egg_info = False - self.vtags = None - - def save_version_info(self, filename): - from setuptools.command.setopt import edit_config - values = dict( - egg_info=dict( - tag_svn_revision=0, - tag_date=0, - tag_build=self.tags(), - ) - ) - edit_config(filename, values) - - def finalize_options(self): - self.egg_name = safe_name(self.distribution.get_name()) - self.vtags = self.tags() - self.egg_version = self.tagged_version() - - try: - list( - parse_requirements('%s==%s' % (self.egg_name,self.egg_version)) - ) - except ValueError: - raise distutils.errors.DistutilsOptionError( - "Invalid distribution name or version syntax: %s-%s" % - (self.egg_name,self.egg_version) - ) - - if self.egg_base is None: - dirs = self.distribution.package_dir - self.egg_base = (dirs or {}).get('',os.curdir) - - self.ensure_dirname('egg_base') - self.egg_info = to_filename(self.egg_name)+'.egg-info' - if self.egg_base != os.curdir: - self.egg_info = os.path.join(self.egg_base, self.egg_info) - if '-' in self.egg_name: self.check_broken_egg_info() - - # Set package version for the benefit of dumber commands - # (e.g. sdist, bdist_wininst, etc.) - # - self.distribution.metadata.version = self.egg_version - - # If we bootstrapped around the lack of a PKG-INFO, as might be the - # case in a fresh checkout, make sure that any special tags get added - # to the version info - # - pd = self.distribution._patched_dist - if pd is not None and pd.key==self.egg_name.lower(): - pd._version = self.egg_version - pd._parsed_version = parse_version(self.egg_version) - self.distribution._patched_dist = None - - def write_or_delete_file(self, what, filename, data, force=False): - """Write `data` to `filename` or delete if empty - - If `data` is non-empty, this routine is the same as ``write_file()``. - If `data` is empty but not ``None``, this is the same as calling - ``delete_file(filename)`. If `data` is ``None``, then this is a no-op - unless `filename` exists, in which case a warning is issued about the - orphaned file (if `force` is false), or deleted (if `force` is true). - """ - if data: - self.write_file(what, filename, data) - elif os.path.exists(filename): - if data is None and not force: - log.warn( - "%s not set in setup(), but %s exists", what, filename - ) - return - else: - self.delete_file(filename) - - def write_file(self, what, filename, data): - """Write `data` to `filename` (if not a dry run) after announcing it - - `what` is used in a log message to identify what is being written - to the file. - """ - log.info("writing %s to %s", what, filename) - if sys.version_info >= (3,): - data = data.encode("utf-8") - if not self.dry_run: - f = open(filename, 'wb') - f.write(data) - f.close() - - def delete_file(self, filename): - """Delete `filename` (if not a dry run) after announcing it""" - log.info("deleting %s", filename) - if not self.dry_run: - os.unlink(filename) - - def tagged_version(self): - version = self.distribution.get_version() - # egg_info may be called more than once for a distribution, - # in which case the version string already contains all tags. - if self.vtags and version.endswith(self.vtags): - return safe_version(version) - return safe_version(version + self.vtags) - - def run(self): - self.mkpath(self.egg_info) - installer = self.distribution.fetch_build_egg - for ep in iter_entry_points('egg_info.writers'): - writer = ep.load(installer=installer) - writer(self, ep.name, os.path.join(self.egg_info,ep.name)) - - # Get rid of native_libs.txt if it was put there by older bdist_egg - nl = os.path.join(self.egg_info, "native_libs.txt") - if os.path.exists(nl): - self.delete_file(nl) - - self.find_sources() - - def tags(self): - version = '' - if self.tag_build: - version+=self.tag_build - if self.tag_svn_revision and ( - os.path.exists('.svn') or os.path.exists('PKG-INFO') - ): version += '-r%s' % self.get_svn_revision() - if self.tag_date: - import time - version += time.strftime("-%Y%m%d") - return version - - @staticmethod - def get_svn_revision(): - return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) - - def find_sources(self): - """Generate SOURCES.txt manifest file""" - manifest_filename = os.path.join(self.egg_info,"SOURCES.txt") - mm = manifest_maker(self.distribution) - mm.manifest = manifest_filename - mm.run() - self.filelist = mm.filelist - - def check_broken_egg_info(self): - bei = self.egg_name+'.egg-info' - if self.egg_base != os.curdir: - bei = os.path.join(self.egg_base, bei) - if os.path.exists(bei): - log.warn( - "-"*78+'\n' - "Note: Your current .egg-info directory has a '-' in its name;" - '\nthis will not work correctly with "setup.py develop".\n\n' - 'Please rename %s to %s to correct this problem.\n'+'-'*78, - bei, self.egg_info - ) - self.broken_egg_info = self.egg_info - self.egg_info = bei # make it work for now - -class FileList(_FileList): - """File list that accepts only existing, platform-independent paths""" - - def append(self, item): - if item.endswith('\r'): # Fix older sdists built on Windows - item = item[:-1] - path = convert_path(item) - - if sys.version_info >= (3,): - try: - if os.path.exists(path) or os.path.exists(path.encode('utf-8')): - self.files.append(path) - except UnicodeEncodeError: - # Accept UTF-8 filenames even if LANG=C - if os.path.exists(path.encode('utf-8')): - self.files.append(path) - else: - log.warn("'%s' not %s encodable -- skipping", path, - sys.getfilesystemencoding()) - else: - if os.path.exists(path): - self.files.append(path) - - -class manifest_maker(sdist): - - template = "MANIFEST.in" - - def initialize_options(self): - self.use_defaults = 1 - self.prune = 1 - self.manifest_only = 1 - self.force_manifest = 1 - - def finalize_options(self): - pass - - def run(self): - self.filelist = FileList() - if not os.path.exists(self.manifest): - self.write_manifest() # it must exist so it'll get in the list - self.filelist.findall() - self.add_defaults() - if os.path.exists(self.template): - self.read_template() - self.prune_file_list() - self.filelist.sort() - self.filelist.remove_duplicates() - self.write_manifest() - - def write_manifest(self): - """Write the file list in 'self.filelist' (presumably as filled in - by 'add_defaults()' and 'read_template()') to the manifest file - named by 'self.manifest'. - """ - # The manifest must be UTF-8 encodable. See #303. - if sys.version_info >= (3,): - files = [] - for file in self.filelist.files: - try: - file.encode("utf-8") - except UnicodeEncodeError: - log.warn("'%s' not UTF-8 encodable -- skipping" % file) - else: - files.append(file) - self.filelist.files = files - - files = self.filelist.files - if os.sep!='/': - files = [f.replace(os.sep,'/') for f in files] - self.execute(write_file, (self.manifest, files), - "writing manifest file '%s'" % self.manifest) - - def warn(self, msg): # suppress missing-file warnings from sdist - if not msg.startswith("standard file not found:"): - sdist.warn(self, msg) - - def add_defaults(self): - sdist.add_defaults(self) - self.filelist.append(self.template) - self.filelist.append(self.manifest) - rcfiles = list(walk_revctrl()) - if rcfiles: - self.filelist.extend(rcfiles) - elif os.path.exists(self.manifest): - self.read_manifest() - ei_cmd = self.get_finalized_command('egg_info') - self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) - - def prune_file_list(self): - build = self.get_finalized_command('build') - base_dir = self.distribution.get_fullname() - self.filelist.exclude_pattern(None, prefix=build.build_base) - self.filelist.exclude_pattern(None, prefix=base_dir) - sep = re.escape(os.sep) - self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1) - - -def write_file(filename, contents): - """Create a file with the specified name and write 'contents' (a - sequence of strings without line terminators) to it. - """ - contents = "\n".join(contents) - if sys.version_info >= (3,): - contents = contents.encode("utf-8") - f = open(filename, "wb") # always write POSIX-style manifest - f.write(contents) - f.close() - -def write_pkg_info(cmd, basename, filename): - log.info("writing %s", filename) - if not cmd.dry_run: - metadata = cmd.distribution.metadata - metadata.version, oldver = cmd.egg_version, metadata.version - metadata.name, oldname = cmd.egg_name, metadata.name - try: - # write unescaped data to PKG-INFO, so older pkg_resources - # can still parse it - metadata.write_pkg_info(cmd.egg_info) - finally: - metadata.name, metadata.version = oldname, oldver - - safe = getattr(cmd.distribution,'zip_safe',None) - from setuptools.command import bdist_egg - bdist_egg.write_safety_flag(cmd.egg_info, safe) - -def warn_depends_obsolete(cmd, basename, filename): - if os.path.exists(filename): - log.warn( - "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" - "Use the install_requires/extras_require setup() args instead." - ) - - -def write_requirements(cmd, basename, filename): - dist = cmd.distribution - data = ['\n'.join(yield_lines(dist.install_requires or ()))] - for extra,reqs in (dist.extras_require or {}).items(): - data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs)))) - cmd.write_or_delete_file("requirements", filename, ''.join(data)) - -def write_toplevel_names(cmd, basename, filename): - pkgs = dict.fromkeys( - [ - k.split('.',1)[0] - for k in cmd.distribution.iter_distribution_names() - ] - ) - cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n') - - -def overwrite_arg(cmd, basename, filename): - write_arg(cmd, basename, filename, True) - -def write_arg(cmd, basename, filename, force=False): - argname = os.path.splitext(basename)[0] - value = getattr(cmd.distribution, argname, None) - if value is not None: - value = '\n'.join(value)+'\n' - cmd.write_or_delete_file(argname, filename, value, force) - -def write_entries(cmd, basename, filename): - ep = cmd.distribution.entry_points - - if isinstance(ep,basestring) or ep is None: - data = ep - elif ep is not None: - data = [] - for section, contents in ep.items(): - if not isinstance(contents,basestring): - contents = EntryPoint.parse_group(section, contents) - contents = '\n'.join(map(str,contents.values())) - data.append('[%s]\n%s\n\n' % (section,contents)) - data = ''.join(data) - - cmd.write_or_delete_file('entry points', filename, data, True) - -def get_pkg_info_revision(): - # See if we can get a -r### off of PKG-INFO, in case this is an sdist of - # a subversion revision - # - if os.path.exists('PKG-INFO'): - f = open('PKG-INFO','rU') - for line in f: - match = re.match(r"Version:.*-r(\d+)\s*$", line) - if match: - return int(match.group(1)) - f.close() - return 0 diff --git a/libs/setuptools-2.2/setuptools/command/install.py b/libs/setuptools-2.2/setuptools/command/install.py deleted file mode 100644 index 459cd3c..0000000 --- a/libs/setuptools-2.2/setuptools/command/install.py +++ /dev/null @@ -1,103 +0,0 @@ -import setuptools -import sys -import glob -from distutils.command.install import install as _install -from distutils.errors import DistutilsArgError - -class install(_install): - """Use easy_install to install the package, w/dependencies""" - - user_options = _install.user_options + [ - ('old-and-unmanageable', None, "Try not to use this!"), - ('single-version-externally-managed', None, - "used by system package builders to create 'flat' eggs"), - ] - boolean_options = _install.boolean_options + [ - 'old-and-unmanageable', 'single-version-externally-managed', - ] - new_commands = [ - ('install_egg_info', lambda self: True), - ('install_scripts', lambda self: True), - ] - _nc = dict(new_commands) - - def initialize_options(self): - _install.initialize_options(self) - self.old_and_unmanageable = None - self.single_version_externally_managed = None - self.no_compile = None # make DISTUTILS_DEBUG work right! - - def finalize_options(self): - _install.finalize_options(self) - if self.root: - self.single_version_externally_managed = True - elif self.single_version_externally_managed: - if not self.root and not self.record: - raise DistutilsArgError( - "You must specify --record or --root when building system" - " packages" - ) - - def handle_extra_path(self): - if self.root or self.single_version_externally_managed: - # explicit backward-compatibility mode, allow extra_path to work - return _install.handle_extra_path(self) - - # Ignore extra_path when installing an egg (or being run by another - # command without --root or --single-version-externally-managed - self.path_file = None - self.extra_dirs = '' - - def run(self): - # Explicit request for old-style install? Just do it - if self.old_and_unmanageable or self.single_version_externally_managed: - return _install.run(self) - - # Attempt to detect whether we were called from setup() or by another - # command. If we were called by setup(), our caller will be the - # 'run_command' method in 'distutils.dist', and *its* caller will be - # the 'run_commands' method. If we were called any other way, our - # immediate caller *might* be 'run_command', but it won't have been - # called by 'run_commands'. This is slightly kludgy, but seems to - # work. - # - caller = sys._getframe(2) - caller_module = caller.f_globals.get('__name__','') - caller_name = caller.f_code.co_name - - if caller_module != 'distutils.dist' or caller_name!='run_commands': - # We weren't called from the command line or setup(), so we - # should run in backward-compatibility mode to support bdist_* - # commands. - _install.run(self) - else: - self.do_egg_install() - - def do_egg_install(self): - - easy_install = self.distribution.get_command_class('easy_install') - - cmd = easy_install( - self.distribution, args="x", root=self.root, record=self.record, - ) - cmd.ensure_finalized() # finalize before bdist_egg munges install cmd - cmd.always_copy_from = '.' # make sure local-dir eggs get installed - - # pick up setup-dir .egg files only: no .egg-info - cmd.package_index.scan(glob.glob('*.egg')) - - self.run_command('bdist_egg') - args = [self.distribution.get_command_obj('bdist_egg').egg_output] - - if setuptools.bootstrap_install_from: - # Bootstrap self-installation of setuptools - args.insert(0, setuptools.bootstrap_install_from) - - cmd.args = args - cmd.run() - setuptools.bootstrap_install_from = None - -# XXX Python 3.1 doesn't see _nc if this is inside the class -install.sub_commands = [ - cmd for cmd in _install.sub_commands if cmd[0] not in install._nc - ] + install.new_commands diff --git a/libs/setuptools-2.2/setuptools/command/install_egg_info.py b/libs/setuptools-2.2/setuptools/command/install_egg_info.py deleted file mode 100644 index f44b34b..0000000 --- a/libs/setuptools-2.2/setuptools/command/install_egg_info.py +++ /dev/null @@ -1,125 +0,0 @@ -from setuptools import Command -from setuptools.archive_util import unpack_archive -from distutils import log, dir_util -import os, shutil, pkg_resources - -class install_egg_info(Command): - """Install an .egg-info directory for the package""" - - description = "Install an .egg-info directory for the package" - - user_options = [ - ('install-dir=', 'd', "directory to install to"), - ] - - def initialize_options(self): - self.install_dir = None - - def finalize_options(self): - self.set_undefined_options('install_lib',('install_dir','install_dir')) - ei_cmd = self.get_finalized_command("egg_info") - basename = pkg_resources.Distribution( - None, None, ei_cmd.egg_name, ei_cmd.egg_version - ).egg_name()+'.egg-info' - self.source = ei_cmd.egg_info - self.target = os.path.join(self.install_dir, basename) - self.outputs = [self.target] - - def run(self): - self.run_command('egg_info') - target = self.target - if os.path.isdir(self.target) and not os.path.islink(self.target): - dir_util.remove_tree(self.target, dry_run=self.dry_run) - elif os.path.exists(self.target): - self.execute(os.unlink,(self.target,),"Removing "+self.target) - if not self.dry_run: - pkg_resources.ensure_directory(self.target) - self.execute(self.copytree, (), - "Copying %s to %s" % (self.source, self.target) - ) - self.install_namespaces() - - def get_outputs(self): - return self.outputs - - def copytree(self): - # Copy the .egg-info tree to site-packages - def skimmer(src,dst): - # filter out source-control directories; note that 'src' is always - # a '/'-separated path, regardless of platform. 'dst' is a - # platform-specific path. - for skip in '.svn/','CVS/': - if src.startswith(skip) or '/'+skip in src: - return None - self.outputs.append(dst) - log.debug("Copying %s to %s", src, dst) - return dst - unpack_archive(self.source, self.target, skimmer) - - - - - - - - - - - - - - - - - - - - - - - - - - def install_namespaces(self): - nsp = self._get_all_ns_packages() - if not nsp: return - filename,ext = os.path.splitext(self.target) - filename += '-nspkg.pth'; self.outputs.append(filename) - log.info("Installing %s",filename) - if not self.dry_run: - f = open(filename,'wt') - for pkg in nsp: - # ensure pkg is not a unicode string under Python 2.7 - pkg = str(pkg) - pth = tuple(pkg.split('.')) - trailer = '\n' - if '.' in pkg: - trailer = ( - "; m and setattr(sys.modules[%r], %r, m)\n" - % ('.'.join(pth[:-1]), pth[-1]) - ) - f.write( - "import sys,types,os; " - "p = os.path.join(sys._getframe(1).f_locals['sitedir'], " - "*%(pth)r); " - "ie = os.path.exists(os.path.join(p,'__init__.py')); " - "m = not ie and " - "sys.modules.setdefault(%(pkg)r,types.ModuleType(%(pkg)r)); " - "mp = (m or []) and m.__dict__.setdefault('__path__',[]); " - "(p not in mp) and mp.append(p)%(trailer)s" - % locals() - ) - f.close() - - def _get_all_ns_packages(self): - nsp = {} - for pkg in self.distribution.namespace_packages or []: - pkg = pkg.split('.') - while pkg: - nsp['.'.join(pkg)] = 1 - pkg.pop() - nsp=list(nsp) - nsp.sort() # set up shorter names first - return nsp - - diff --git a/libs/setuptools-2.2/setuptools/command/install_lib.py b/libs/setuptools-2.2/setuptools/command/install_lib.py deleted file mode 100644 index 82afa14..0000000 --- a/libs/setuptools-2.2/setuptools/command/install_lib.py +++ /dev/null @@ -1,82 +0,0 @@ -from distutils.command.install_lib import install_lib as _install_lib -import os - -class install_lib(_install_lib): - """Don't add compiled flags to filenames of non-Python files""" - - def _bytecode_filenames (self, py_filenames): - bytecode_files = [] - for py_file in py_filenames: - if not py_file.endswith('.py'): - continue - if self.compile: - bytecode_files.append(py_file + "c") - if self.optimize > 0: - bytecode_files.append(py_file + "o") - - return bytecode_files - - def run(self): - self.build() - outfiles = self.install() - if outfiles is not None: - # always compile, in case we have any extension stubs to deal with - self.byte_compile(outfiles) - - def get_exclusions(self): - exclude = {} - nsp = self.distribution.namespace_packages - - if (nsp and self.get_finalized_command('install') - .single_version_externally_managed - ): - for pkg in nsp: - parts = pkg.split('.') - while parts: - pkgdir = os.path.join(self.install_dir, *parts) - for f in '__init__.py', '__init__.pyc', '__init__.pyo': - exclude[os.path.join(pkgdir,f)] = 1 - parts.pop() - return exclude - - def copy_tree( - self, infile, outfile, - preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 - ): - assert preserve_mode and preserve_times and not preserve_symlinks - exclude = self.get_exclusions() - - if not exclude: - return _install_lib.copy_tree(self, infile, outfile) - - # Exclude namespace package __init__.py* files from the output - - from setuptools.archive_util import unpack_directory - from distutils import log - - outfiles = [] - - def pf(src, dst): - if dst in exclude: - log.warn("Skipping installation of %s (namespace package)",dst) - return False - - log.info("copying %s -> %s", src, os.path.dirname(dst)) - outfiles.append(dst) - return dst - - unpack_directory(infile, outfile, pf) - return outfiles - - def get_outputs(self): - outputs = _install_lib.get_outputs(self) - exclude = self.get_exclusions() - if exclude: - return [f for f in outputs if f not in exclude] - return outputs - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/install_scripts.py b/libs/setuptools-2.2/setuptools/command/install_scripts.py deleted file mode 100644 index 105dabc..0000000 --- a/libs/setuptools-2.2/setuptools/command/install_scripts.py +++ /dev/null @@ -1,54 +0,0 @@ -from distutils.command.install_scripts import install_scripts \ - as _install_scripts -from pkg_resources import Distribution, PathMetadata, ensure_directory -import os -from distutils import log - -class install_scripts(_install_scripts): - """Do normal script install, plus any egg_info wrapper scripts""" - - def initialize_options(self): - _install_scripts.initialize_options(self) - self.no_ep = False - - def run(self): - from setuptools.command.easy_install import get_script_args - from setuptools.command.easy_install import sys_executable - - self.run_command("egg_info") - if self.distribution.scripts: - _install_scripts.run(self) # run first to set up self.outfiles - else: - self.outfiles = [] - if self.no_ep: - # don't install entry point scripts into .egg file! - return - - ei_cmd = self.get_finalized_command("egg_info") - dist = Distribution( - ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), - ei_cmd.egg_name, ei_cmd.egg_version, - ) - bs_cmd = self.get_finalized_command('build_scripts') - executable = getattr(bs_cmd,'executable',sys_executable) - is_wininst = getattr( - self.get_finalized_command("bdist_wininst"), '_is_running', False - ) - for args in get_script_args(dist, executable, is_wininst): - self.write_script(*args) - - def write_script(self, script_name, contents, mode="t", *ignored): - """Write an executable file to the scripts directory""" - from setuptools.command.easy_install import chmod, current_umask - log.info("Installing %s script to %s", script_name, self.install_dir) - target = os.path.join(self.install_dir, script_name) - self.outfiles.append(target) - - mask = current_umask() - if not self.dry_run: - ensure_directory(target) - f = open(target,"w"+mode) - f.write(contents) - f.close() - chmod(target, 0x1FF-mask) # 0777 - diff --git a/libs/setuptools-2.2/setuptools/command/launcher manifest.xml b/libs/setuptools-2.2/setuptools/command/launcher manifest.xml deleted file mode 100644 index 844d226..0000000 --- a/libs/setuptools-2.2/setuptools/command/launcher manifest.xml +++ /dev/null @@ -1,15 +0,0 @@ - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/register.py b/libs/setuptools-2.2/setuptools/command/register.py deleted file mode 100644 index 3b2e085..0000000 --- a/libs/setuptools-2.2/setuptools/command/register.py +++ /dev/null @@ -1,10 +0,0 @@ -from distutils.command.register import register as _register - -class register(_register): - __doc__ = _register.__doc__ - - def run(self): - # Make sure that we are using valid current name/version info - self.run_command('egg_info') - _register.run(self) - diff --git a/libs/setuptools-2.2/setuptools/command/rotate.py b/libs/setuptools-2.2/setuptools/command/rotate.py deleted file mode 100644 index b10acfb..0000000 --- a/libs/setuptools-2.2/setuptools/command/rotate.py +++ /dev/null @@ -1,83 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.compat import basestring -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -class rotate(Command): - """Delete older distributions""" - - description = "delete older distributions, keeping N newest files" - user_options = [ - ('match=', 'm', "patterns to match (required)"), - ('dist-dir=', 'd', "directory where the distributions are"), - ('keep=', 'k', "number of matching distributions to keep"), - ] - - boolean_options = [] - - def initialize_options(self): - self.match = None - self.dist_dir = None - self.keep = None - - def finalize_options(self): - if self.match is None: - raise DistutilsOptionError( - "Must specify one or more (comma-separated) match patterns " - "(e.g. '.zip' or '.egg')" - ) - if self.keep is None: - raise DistutilsOptionError("Must specify number of files to keep") - try: - self.keep = int(self.keep) - except ValueError: - raise DistutilsOptionError("--keep must be an integer") - if isinstance(self.match, basestring): - self.match = [ - convert_path(p.strip()) for p in self.match.split(',') - ] - self.set_undefined_options('bdist',('dist_dir', 'dist_dir')) - - def run(self): - self.run_command("egg_info") - from glob import glob - for pattern in self.match: - pattern = self.distribution.get_name()+'*'+pattern - files = glob(os.path.join(self.dist_dir,pattern)) - files = [(os.path.getmtime(f),f) for f in files] - files.sort() - files.reverse() - - log.info("%d file(s) matching %s", len(files), pattern) - files = files[self.keep:] - for (t,f) in files: - log.info("Deleting %s", f) - if not self.dry_run: - os.unlink(f) - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/saveopts.py b/libs/setuptools-2.2/setuptools/command/saveopts.py deleted file mode 100644 index 7209be4..0000000 --- a/libs/setuptools-2.2/setuptools/command/saveopts.py +++ /dev/null @@ -1,24 +0,0 @@ -import distutils, os -from setuptools import Command -from setuptools.command.setopt import edit_config, option_base - -class saveopts(option_base): - """Save command-line options to a file""" - - description = "save supplied options to setup.cfg or other config file" - - def run(self): - dist = self.distribution - settings = {} - - for cmd in dist.command_options: - - if cmd=='saveopts': - continue # don't save our own options! - - for opt,(src,val) in dist.get_option_dict(cmd).items(): - if src=="command line": - settings.setdefault(cmd,{})[opt] = val - - edit_config(self.filename, settings, self.dry_run) - diff --git a/libs/setuptools-2.2/setuptools/command/sdist.py b/libs/setuptools-2.2/setuptools/command/sdist.py deleted file mode 100644 index 76e1c5f..0000000 --- a/libs/setuptools-2.2/setuptools/command/sdist.py +++ /dev/null @@ -1,244 +0,0 @@ -import os -import re -import sys -from glob import glob - -import pkg_resources -from distutils.command.sdist import sdist as _sdist -from distutils.util import convert_path -from distutils import log -from setuptools import svn_utils - -READMES = ('README', 'README.rst', 'README.txt') - - -def walk_revctrl(dirname=''): - """Find all files under revision control""" - for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): - for item in ep.load()(dirname): - yield item - - -#TODO will need test case -class re_finder(object): - """ - Finder that locates files based on entries in a file matched by a - regular expression. - """ - - def __init__(self, path, pattern, postproc=lambda x: x): - self.pattern = pattern - self.postproc = postproc - self.entries_path = convert_path(path) - - def _finder(self, dirname, filename): - f = open(filename,'rU') - try: - data = f.read() - finally: - f.close() - for match in self.pattern.finditer(data): - path = match.group(1) - # postproc was formerly used when the svn finder - # was an re_finder for calling unescape - path = self.postproc(path) - yield svn_utils.joinpath(dirname, path) - - def find(self, dirname=''): - path = svn_utils.joinpath(dirname, self.entries_path) - - if not os.path.isfile(path): - # entries file doesn't exist - return - for path in self._finder(dirname,path): - if os.path.isfile(path): - yield path - elif os.path.isdir(path): - for item in self.find(path): - yield item - __call__ = find - - -def _default_revctrl(dirname=''): - 'Primary svn_cvs entry point' - for finder in finders: - for item in finder(dirname): - yield item - - -finders = [ - re_finder('CVS/Entries', re.compile(r"^\w?/([^/]+)/", re.M)), - svn_utils.svn_finder, -] - - -class sdist(_sdist): - """Smart sdist that finds anything supported by revision control""" - - user_options = [ - ('formats=', None, - "formats for source distribution (comma-separated list)"), - ('keep-temp', 'k', - "keep the distribution tree around after creating " + - "archive file(s)"), - ('dist-dir=', 'd', - "directory to put the source distribution archive(s) in " - "[default: dist]"), - ] - - negative_opt = {} - - def run(self): - self.run_command('egg_info') - ei_cmd = self.get_finalized_command('egg_info') - self.filelist = ei_cmd.filelist - self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt')) - self.check_readme() - - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - # Call check_metadata only if no 'check' command - # (distutils <= 2.6) - import distutils.command - if 'check' not in distutils.command.__all__: - self.check_metadata() - - self.make_distribution() - - dist_files = getattr(self.distribution,'dist_files',[]) - for file in self.archive_files: - data = ('sdist', '', file) - if data not in dist_files: - dist_files.append(data) - - def __read_template_hack(self): - # This grody hack closes the template file (MANIFEST.in) if an - # exception occurs during read_template. - # Doing so prevents an error when easy_install attempts to delete the - # file. - try: - _sdist.read_template(self) - except: - sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close() - raise - # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle - # has been fixed, so only override the method if we're using an earlier - # Python. - has_leaky_handle = ( - sys.version_info < (2,7,2) - or (3,0) <= sys.version_info < (3,1,4) - or (3,2) <= sys.version_info < (3,2,1) - ) - if has_leaky_handle: - read_template = __read_template_hack - - def add_defaults(self): - standards = [READMES, - self.distribution.script_name] - for fn in standards: - if isinstance(fn, tuple): - alts = fn - got_it = 0 - for fn in alts: - if os.path.exists(fn): - got_it = 1 - self.filelist.append(fn) - break - - if not got_it: - self.warn("standard file not found: should have one of " + - ', '.join(alts)) - else: - if os.path.exists(fn): - self.filelist.append(fn) - else: - self.warn("standard file '%s' not found" % fn) - - optional = ['test/test*.py', 'setup.cfg'] - for pattern in optional: - files = list(filter(os.path.isfile, glob(pattern))) - if files: - self.filelist.extend(files) - - # getting python files - if self.distribution.has_pure_modules(): - build_py = self.get_finalized_command('build_py') - self.filelist.extend(build_py.get_source_files()) - # This functionality is incompatible with include_package_data, and - # will in fact create an infinite recursion if include_package_data - # is True. Use of include_package_data will imply that - # distutils-style automatic handling of package_data is disabled - if not self.distribution.include_package_data: - for _, src_dir, _, filenames in build_py.data_files: - self.filelist.extend([os.path.join(src_dir, filename) - for filename in filenames]) - - if self.distribution.has_ext_modules(): - build_ext = self.get_finalized_command('build_ext') - self.filelist.extend(build_ext.get_source_files()) - - if self.distribution.has_c_libraries(): - build_clib = self.get_finalized_command('build_clib') - self.filelist.extend(build_clib.get_source_files()) - - if self.distribution.has_scripts(): - build_scripts = self.get_finalized_command('build_scripts') - self.filelist.extend(build_scripts.get_source_files()) - - def check_readme(self): - for f in READMES: - if os.path.exists(f): - return - else: - self.warn( - "standard file not found: should have one of " +', '.join(READMES) - ) - - def make_release_tree(self, base_dir, files): - _sdist.make_release_tree(self, base_dir, files) - - # Save any egg_info command line options used to create this sdist - dest = os.path.join(base_dir, 'setup.cfg') - if hasattr(os,'link') and os.path.exists(dest): - # unlink and re-copy, since it might be hard-linked, and - # we don't want to change the source version - os.unlink(dest) - self.copy_file('setup.cfg', dest) - - self.get_finalized_command('egg_info').save_version_info(dest) - - def _manifest_is_not_generated(self): - # check for special comment used in 2.7.1 and higher - if not os.path.isfile(self.manifest): - return False - - fp = open(self.manifest, 'rbU') - try: - first_line = fp.readline() - finally: - fp.close() - return first_line != '# file GENERATED by distutils, do NOT edit\n'.encode() - - def read_manifest(self): - """Read the manifest file (named by 'self.manifest') and use it to - fill in 'self.filelist', the list of files to include in the source - distribution. - """ - log.info("reading manifest file '%s'", self.manifest) - manifest = open(self.manifest, 'rbU') - for line in manifest: - # The manifest must contain UTF-8. See #303. - if sys.version_info >= (3,): - try: - line = line.decode('UTF-8') - except UnicodeDecodeError: - log.warn("%r not UTF-8 decodable -- skipping" % line) - continue - # ignore comments and blank lines - line = line.strip() - if line.startswith('#') or not line: - continue - self.filelist.append(line) - manifest.close() diff --git a/libs/setuptools-2.2/setuptools/command/setopt.py b/libs/setuptools-2.2/setuptools/command/setopt.py deleted file mode 100644 index aa468c8..0000000 --- a/libs/setuptools-2.2/setuptools/command/setopt.py +++ /dev/null @@ -1,164 +0,0 @@ -import distutils, os -from setuptools import Command -from distutils.util import convert_path -from distutils import log -from distutils.errors import * - -__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] - - -def config_file(kind="local"): - """Get the filename of the distutils, local, global, or per-user config - - `kind` must be one of "local", "global", or "user" - """ - if kind=='local': - return 'setup.cfg' - if kind=='global': - return os.path.join( - os.path.dirname(distutils.__file__),'distutils.cfg' - ) - if kind=='user': - dot = os.name=='posix' and '.' or '' - return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) - raise ValueError( - "config_file() type must be 'local', 'global', or 'user'", kind - ) - - - - - - - - - - - - - - - -def edit_config(filename, settings, dry_run=False): - """Edit a configuration file to include `settings` - - `settings` is a dictionary of dictionaries or ``None`` values, keyed by - command/section name. A ``None`` value means to delete the entire section, - while a dictionary lists settings to be changed or deleted in that section. - A setting of ``None`` means to delete that setting. - """ - from setuptools.compat import ConfigParser - log.debug("Reading configuration from %s", filename) - opts = ConfigParser.RawConfigParser() - opts.read([filename]) - for section, options in settings.items(): - if options is None: - log.info("Deleting section [%s] from %s", section, filename) - opts.remove_section(section) - else: - if not opts.has_section(section): - log.debug("Adding new section [%s] to %s", section, filename) - opts.add_section(section) - for option,value in options.items(): - if value is None: - log.debug("Deleting %s.%s from %s", - section, option, filename - ) - opts.remove_option(section,option) - if not opts.options(section): - log.info("Deleting empty [%s] section from %s", - section, filename) - opts.remove_section(section) - else: - log.debug( - "Setting %s.%s to %r in %s", - section, option, value, filename - ) - opts.set(section,option,value) - - log.info("Writing %s", filename) - if not dry_run: - f = open(filename,'w'); opts.write(f); f.close() - -class option_base(Command): - """Abstract base class for commands that mess with config files""" - - user_options = [ - ('global-config', 'g', - "save options to the site-wide distutils.cfg file"), - ('user-config', 'u', - "save options to the current user's pydistutils.cfg file"), - ('filename=', 'f', - "configuration file to use (default=setup.cfg)"), - ] - - boolean_options = [ - 'global-config', 'user-config', - ] - - def initialize_options(self): - self.global_config = None - self.user_config = None - self.filename = None - - def finalize_options(self): - filenames = [] - if self.global_config: - filenames.append(config_file('global')) - if self.user_config: - filenames.append(config_file('user')) - if self.filename is not None: - filenames.append(self.filename) - if not filenames: - filenames.append(config_file('local')) - if len(filenames)>1: - raise DistutilsOptionError( - "Must specify only one configuration file option", - filenames - ) - self.filename, = filenames - - - - -class setopt(option_base): - """Save command-line options to a file""" - - description = "set an option in setup.cfg or another config file" - - user_options = [ - ('command=', 'c', 'command to set an option for'), - ('option=', 'o', 'option to set'), - ('set-value=', 's', 'value of the option'), - ('remove', 'r', 'remove (unset) the value'), - ] + option_base.user_options - - boolean_options = option_base.boolean_options + ['remove'] - - def initialize_options(self): - option_base.initialize_options(self) - self.command = None - self.option = None - self.set_value = None - self.remove = None - - def finalize_options(self): - option_base.finalize_options(self) - if self.command is None or self.option is None: - raise DistutilsOptionError("Must specify --command *and* --option") - if self.set_value is None and not self.remove: - raise DistutilsOptionError("Must specify --set-value or --remove") - - def run(self): - edit_config( - self.filename, { - self.command: {self.option.replace('-','_'):self.set_value} - }, - self.dry_run - ) - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/test.py b/libs/setuptools-2.2/setuptools/command/test.py deleted file mode 100644 index db2fc7b..0000000 --- a/libs/setuptools-2.2/setuptools/command/test.py +++ /dev/null @@ -1,198 +0,0 @@ -from setuptools import Command -from distutils.errors import DistutilsOptionError -import sys -from pkg_resources import * -from pkg_resources import _namespace_packages -from unittest import TestLoader, main - -class ScanningLoader(TestLoader): - - def loadTestsFromModule(self, module): - """Return a suite of all tests cases contained in the given module - - If the module is a package, load tests from all the modules in it. - If the module has an ``additional_tests`` function, call it and add - the return value to the tests. - """ - tests = [] - if module.__name__!='setuptools.tests.doctest': # ugh - tests.append(TestLoader.loadTestsFromModule(self,module)) - - if hasattr(module, "additional_tests"): - tests.append(module.additional_tests()) - - if hasattr(module, '__path__'): - for file in resource_listdir(module.__name__, ''): - if file.endswith('.py') and file!='__init__.py': - submodule = module.__name__+'.'+file[:-3] - else: - if resource_exists( - module.__name__, file+'/__init__.py' - ): - submodule = module.__name__+'.'+file - else: - continue - tests.append(self.loadTestsFromName(submodule)) - - if len(tests)!=1: - return self.suiteClass(tests) - else: - return tests[0] # don't create a nested suite for only one return - - -class test(Command): - - """Command to run unit tests after in-place build""" - - description = "run unit tests after in-place build" - - user_options = [ - ('test-module=','m', "Run 'test_suite' in specified module"), - ('test-suite=','s', - "Test suite to run (e.g. 'some_module.test_suite')"), - ] - - def initialize_options(self): - self.test_suite = None - self.test_module = None - self.test_loader = None - - - def finalize_options(self): - - if self.test_suite is None: - if self.test_module is None: - self.test_suite = self.distribution.test_suite - else: - self.test_suite = self.test_module+".test_suite" - elif self.test_module: - raise DistutilsOptionError( - "You may specify a module or a suite, but not both" - ) - - self.test_args = [self.test_suite] - - if self.verbose: - self.test_args.insert(0,'--verbose') - if self.test_loader is None: - self.test_loader = getattr(self.distribution,'test_loader',None) - if self.test_loader is None: - self.test_loader = "setuptools.command.test:ScanningLoader" - - - - def with_project_on_sys_path(self, func): - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): - # If we run 2to3 we can not do this inplace: - - # Ensure metadata is up-to-date - self.reinitialize_command('build_py', inplace=0) - self.run_command('build_py') - bpy_cmd = self.get_finalized_command("build_py") - build_path = normalize_path(bpy_cmd.build_lib) - - # Build extensions - self.reinitialize_command('egg_info', egg_base=build_path) - self.run_command('egg_info') - - self.reinitialize_command('build_ext', inplace=0) - self.run_command('build_ext') - else: - # Without 2to3 inplace works fine: - self.run_command('egg_info') - - # Build extensions in-place - self.reinitialize_command('build_ext', inplace=1) - self.run_command('build_ext') - - ei_cmd = self.get_finalized_command("egg_info") - - old_path = sys.path[:] - old_modules = sys.modules.copy() - - try: - sys.path.insert(0, normalize_path(ei_cmd.egg_base)) - working_set.__init__() - add_activation_listener(lambda dist: dist.activate()) - require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) - func() - finally: - sys.path[:] = old_path - sys.modules.clear() - sys.modules.update(old_modules) - working_set.__init__() - - - def run(self): - if self.distribution.install_requires: - self.distribution.fetch_build_eggs(self.distribution.install_requires) - if self.distribution.tests_require: - self.distribution.fetch_build_eggs(self.distribution.tests_require) - - if self.test_suite: - cmd = ' '.join(self.test_args) - if self.dry_run: - self.announce('skipping "unittest %s" (dry run)' % cmd) - else: - self.announce('running "unittest %s"' % cmd) - self.with_project_on_sys_path(self.run_tests) - - - def run_tests(self): - import unittest - - # Purge modules under test from sys.modules. The test loader will - # re-import them from the build location. Required when 2to3 is used - # with namespace packages. - if sys.version_info >= (3,) and getattr(self.distribution, 'use_2to3', False): - module = self.test_args[-1].split('.')[0] - if module in _namespace_packages: - del_modules = [] - if module in sys.modules: - del_modules.append(module) - module += '.' - for name in sys.modules: - if name.startswith(module): - del_modules.append(name) - list(map(sys.modules.__delitem__, del_modules)) - - loader_ep = EntryPoint.parse("x="+self.test_loader) - loader_class = loader_ep.load(require=False) - cks = loader_class() - unittest.main( - None, None, [unittest.__file__]+self.test_args, - testLoader = cks - ) - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/libs/setuptools-2.2/setuptools/command/upload_docs.py b/libs/setuptools-2.2/setuptools/command/upload_docs.py deleted file mode 100644 index cad7a52..0000000 --- a/libs/setuptools-2.2/setuptools/command/upload_docs.py +++ /dev/null @@ -1,193 +0,0 @@ -# -*- coding: utf-8 -*- -"""upload_docs - -Implements a Distutils 'upload_docs' subcommand (upload documentation to -PyPI's pythonhosted.org). -""" - -import os -import socket -import zipfile -import tempfile -import sys -import shutil - -from base64 import standard_b64encode -from pkg_resources import iter_entry_points - -from distutils import log -from distutils.errors import DistutilsOptionError -from distutils.command.upload import upload - -from setuptools.compat import httplib, urlparse, unicode, iteritems, PY3 - -errors = 'surrogateescape' if PY3 else 'strict' - - -# This is not just a replacement for byte literals -# but works as a general purpose encoder -def b(s, encoding='utf-8'): - if isinstance(s, unicode): - return s.encode(encoding, errors) - return s - - -class upload_docs(upload): - - description = 'Upload documentation to PyPI' - - user_options = [ - ('repository=', 'r', - "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), - ('show-response', None, - 'display full response text from server'), - ('upload-dir=', None, 'directory to upload'), - ] - boolean_options = upload.boolean_options - - def has_sphinx(self): - if self.upload_dir is None: - for ep in iter_entry_points('distutils.commands', 'build_sphinx'): - return True - - sub_commands = [('build_sphinx', has_sphinx)] - - def initialize_options(self): - upload.initialize_options(self) - self.upload_dir = None - self.target_dir = None - - def finalize_options(self): - upload.finalize_options(self) - if self.upload_dir is None: - if self.has_sphinx(): - build_sphinx = self.get_finalized_command('build_sphinx') - self.target_dir = build_sphinx.builder_target_dir - else: - build = self.get_finalized_command('build') - self.target_dir = os.path.join(build.build_base, 'docs') - else: - self.ensure_dirname('upload_dir') - self.target_dir = self.upload_dir - self.announce('Using upload directory %s' % self.target_dir) - - def create_zipfile(self, filename): - zip_file = zipfile.ZipFile(filename, "w") - try: - self.mkpath(self.target_dir) # just in case - for root, dirs, files in os.walk(self.target_dir): - if root == self.target_dir and not files: - raise DistutilsOptionError( - "no files found in upload directory '%s'" - % self.target_dir) - for name in files: - full = os.path.join(root, name) - relative = root[len(self.target_dir):].lstrip(os.path.sep) - dest = os.path.join(relative, name) - zip_file.write(full, dest) - finally: - zip_file.close() - - def run(self): - # Run sub commands - for cmd_name in self.get_sub_commands(): - self.run_command(cmd_name) - - tmp_dir = tempfile.mkdtemp() - name = self.distribution.metadata.get_name() - zip_file = os.path.join(tmp_dir, "%s.zip" % name) - try: - self.create_zipfile(zip_file) - self.upload_file(zip_file) - finally: - shutil.rmtree(tmp_dir) - - def upload_file(self, filename): - f = open(filename, 'rb') - content = f.read() - f.close() - meta = self.distribution.metadata - data = { - ':action': 'doc_upload', - 'name': meta.get_name(), - 'content': (os.path.basename(filename), content), - } - # set up the authentication - credentials = b(self.username + ':' + self.password) - credentials = standard_b64encode(credentials) - if PY3: - credentials = credentials.decode('ascii') - auth = "Basic " + credentials - - # Build up the MIME payload for the POST data - boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' - sep_boundary = b('\n--') + b(boundary) - end_boundary = sep_boundary + b('--') - body = [] - for key, values in iteritems(data): - title = '\nContent-Disposition: form-data; name="%s"' % key - # handle multiple entries for the same name - if not isinstance(values, list): - values = [values] - for value in values: - if type(value) is tuple: - title += '; filename="%s"' % value[0] - value = value[1] - else: - value = b(value) - body.append(sep_boundary) - body.append(b(title)) - body.append(b("\n\n")) - body.append(value) - if value and value[-1:] == b('\r'): - body.append(b('\n')) # write an extra newline (lurve Macs) - body.append(end_boundary) - body.append(b("\n")) - body = b('').join(body) - - self.announce("Submitting documentation to %s" % (self.repository), - log.INFO) - - # build the Request - # We can't use urllib2 since we need to send the Basic - # auth right with the first request - schema, netloc, url, params, query, fragments = \ - urlparse(self.repository) - assert not params and not query and not fragments - if schema == 'http': - conn = httplib.HTTPConnection(netloc) - elif schema == 'https': - conn = httplib.HTTPSConnection(netloc) - else: - raise AssertionError("unsupported schema "+schema) - - data = '' - try: - conn.connect() - conn.putrequest("POST", url) - content_type = 'multipart/form-data; boundary=%s' % boundary - conn.putheader('Content-type', content_type) - conn.putheader('Content-length', str(len(body))) - conn.putheader('Authorization', auth) - conn.endheaders() - conn.send(body) - except socket.error: - e = sys.exc_info()[1] - self.announce(str(e), log.ERROR) - return - - r = conn.getresponse() - if r.status == 200: - self.announce('Server response (%s): %s' % (r.status, r.reason), - log.INFO) - elif r.status == 301: - location = r.getheader('Location') - if location is None: - location = 'https://pythonhosted.org/%s/' % meta.get_name() - self.announce('Upload successful. Visit %s' % location, - log.INFO) - else: - self.announce('Upload failed (%s): %s' % (r.status, r.reason), - log.ERROR) - if self.show_response: - print('-'*75, r.read(), '-'*75) diff --git a/libs/setuptools-2.2/setuptools/compat.py b/libs/setuptools-2.2/setuptools/compat.py deleted file mode 100644 index 7b824ba..0000000 --- a/libs/setuptools-2.2/setuptools/compat.py +++ /dev/null @@ -1,83 +0,0 @@ -import sys -import itertools - -if sys.version_info[0] < 3: - PY3 = False - - basestring = basestring - import __builtin__ as builtins - import ConfigParser - from StringIO import StringIO - BytesIO = StringIO - execfile = execfile - func_code = lambda o: o.func_code - func_globals = lambda o: o.func_globals - im_func = lambda o: o.im_func - from htmlentitydefs import name2codepoint - import httplib - from BaseHTTPServer import HTTPServer - from SimpleHTTPServer import SimpleHTTPRequestHandler - from BaseHTTPServer import BaseHTTPRequestHandler - iteritems = lambda o: o.iteritems() - long_type = long - maxsize = sys.maxint - next = lambda o: o.next() - numeric_types = (int, long, float) - unichr = unichr - unicode = unicode - bytes = str - from urllib import url2pathname, splittag, pathname2url - import urllib2 - from urllib2 import urlopen, HTTPError, URLError, unquote, splituser - from urlparse import urlparse, urlunparse, urljoin, urlsplit, urlunsplit - filterfalse = itertools.ifilterfalse - - exec("""def reraise(tp, value, tb=None): - raise tp, value, tb""") -else: - PY3 = True - - basestring = str - import builtins - import configparser as ConfigParser - from io import StringIO, BytesIO - func_code = lambda o: o.__code__ - func_globals = lambda o: o.__globals__ - im_func = lambda o: o.__func__ - from html.entities import name2codepoint - import http.client as httplib - from http.server import HTTPServer, SimpleHTTPRequestHandler - from http.server import BaseHTTPRequestHandler - iteritems = lambda o: o.items() - long_type = int - maxsize = sys.maxsize - next = next - numeric_types = (int, float) - unichr = chr - unicode = str - bytes = bytes - from urllib.error import HTTPError, URLError - import urllib.request as urllib2 - from urllib.request import urlopen, url2pathname, pathname2url - from urllib.parse import ( - urlparse, urlunparse, unquote, splituser, urljoin, urlsplit, - urlunsplit, splittag, - ) - filterfalse = itertools.filterfalse - - def execfile(fn, globs=None, locs=None): - if globs is None: - globs = globals() - if locs is None: - locs = globs - f = open(fn, 'rb') - try: - source = f.read() - finally: - f.close() - exec(compile(source, fn, 'exec'), globs, locs) - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value diff --git a/libs/setuptools-2.2/setuptools/depends.py b/libs/setuptools-2.2/setuptools/depends.py deleted file mode 100644 index 8b9d121..0000000 --- a/libs/setuptools-2.2/setuptools/depends.py +++ /dev/null @@ -1,246 +0,0 @@ -from __future__ import generators -import sys, imp, marshal -from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN -from distutils.version import StrictVersion, LooseVersion - -__all__ = [ - 'Require', 'find_module', 'get_module_constant', 'extract_constant' -] - -class Require: - """A prerequisite to building or installing a distribution""" - - def __init__(self,name,requested_version,module,homepage='', - attribute=None,format=None - ): - - if format is None and requested_version is not None: - format = StrictVersion - - if format is not None: - requested_version = format(requested_version) - if attribute is None: - attribute = '__version__' - - self.__dict__.update(locals()) - del self.self - - - def full_name(self): - """Return full package/distribution name, w/version""" - if self.requested_version is not None: - return '%s-%s' % (self.name,self.requested_version) - return self.name - - - def version_ok(self,version): - """Is 'version' sufficiently up-to-date?""" - return self.attribute is None or self.format is None or \ - str(version) != "unknown" and version >= self.requested_version - - - def get_version(self, paths=None, default="unknown"): - - """Get version number of installed module, 'None', or 'default' - - Search 'paths' for module. If not found, return 'None'. If found, - return the extracted version attribute, or 'default' if no version - attribute was specified, or the value cannot be determined without - importing the module. The version is formatted according to the - requirement's version format (if any), unless it is 'None' or the - supplied 'default'. - """ - - if self.attribute is None: - try: - f,p,i = find_module(self.module,paths) - if f: f.close() - return default - except ImportError: - return None - - v = get_module_constant(self.module,self.attribute,default,paths) - - if v is not None and v is not default and self.format is not None: - return self.format(v) - - return v - - - def is_present(self,paths=None): - """Return true if dependency is present on 'paths'""" - return self.get_version(paths) is not None - - - def is_current(self,paths=None): - """Return true if dependency is present and up-to-date on 'paths'""" - version = self.get_version(paths) - if version is None: - return False - return self.version_ok(version) - - -def _iter_code(code): - - """Yield '(op,arg)' pair for each operation in code object 'code'""" - - from array import array - from dis import HAVE_ARGUMENT, EXTENDED_ARG - - bytes = array('b',code.co_code) - eof = len(code.co_code) - - ptr = 0 - extended_arg = 0 - - while ptr=HAVE_ARGUMENT: - - arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg - ptr += 3 - - if op==EXTENDED_ARG: - extended_arg = arg * long_type(65536) - continue - - else: - arg = None - ptr += 1 - - yield op,arg - - - - - - - - - - -def find_module(module, paths=None): - """Just like 'imp.find_module()', but with package support""" - - parts = module.split('.') - - while parts: - part = parts.pop(0) - f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) - - if kind==PKG_DIRECTORY: - parts = parts or ['__init__'] - paths = [path] - - elif parts: - raise ImportError("Can't find %r in %s" % (parts,module)) - - return info - - - - - - - - - - - - - - - - - - - - - - - - -def get_module_constant(module, symbol, default=-1, paths=None): - - """Find 'module' by searching 'paths', and extract 'symbol' - - Return 'None' if 'module' does not exist on 'paths', or it does not define - 'symbol'. If the module defines 'symbol' as a constant, return the - constant. Otherwise, return 'default'.""" - - try: - f, path, (suffix,mode,kind) = find_module(module,paths) - except ImportError: - # Module doesn't exist - return None - - try: - if kind==PY_COMPILED: - f.read(8) # skip magic & date - code = marshal.load(f) - elif kind==PY_FROZEN: - code = imp.get_frozen_object(module) - elif kind==PY_SOURCE: - code = compile(f.read(), path, 'exec') - else: - # Not something we can parse; we'll have to import it. :( - if module not in sys.modules: - imp.load_module(module,f,path,(suffix,mode,kind)) - return getattr(sys.modules[module],symbol,None) - - finally: - if f: - f.close() - - return extract_constant(code,symbol,default) - - - - - - - - -def extract_constant(code,symbol,default=-1): - """Extract the constant value of 'symbol' from 'code' - - If the name 'symbol' is bound to a constant value by the Python code - object 'code', return that value. If 'symbol' is bound to an expression, - return 'default'. Otherwise, return 'None'. - - Return value is based on the first assignment to 'symbol'. 'symbol' must - be a global, or at least a non-"fast" local in the code block. That is, - only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' - must be present in 'code.co_names'. - """ - - if symbol not in code.co_names: - # name's not there, can't possibly be an assigment - return None - - name_idx = list(code.co_names).index(symbol) - - STORE_NAME = 90 - STORE_GLOBAL = 97 - LOAD_CONST = 100 - - const = default - - for op, arg in _iter_code(code): - - if op==LOAD_CONST: - const = code.co_consts[arg] - elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): - return const - else: - const = default - -if sys.platform.startswith('java') or sys.platform == 'cli': - # XXX it'd be better to test assertions about bytecode instead... - del extract_constant, get_module_constant - __all__.remove('extract_constant') - __all__.remove('get_module_constant') - - diff --git a/libs/setuptools-2.2/setuptools/dist.py b/libs/setuptools-2.2/setuptools/dist.py deleted file mode 100644 index 0801ae7..0000000 --- a/libs/setuptools-2.2/setuptools/dist.py +++ /dev/null @@ -1,797 +0,0 @@ -__all__ = ['Distribution'] - -import re -import os -import sys -import warnings -import distutils.log -import distutils.core -import distutils.cmd -from distutils.core import Distribution as _Distribution -from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, - DistutilsSetupError) - -from setuptools.depends import Require -from setuptools.compat import numeric_types, basestring -import pkg_resources - -def _get_unpatched(cls): - """Protect against re-patching the distutils if reloaded - - Also ensures that no other distutils extension monkeypatched the distutils - first. - """ - while cls.__module__.startswith('setuptools'): - cls, = cls.__bases__ - if not cls.__module__.startswith('distutils'): - raise AssertionError( - "distutils has already been patched by %r" % cls - ) - return cls - -_Distribution = _get_unpatched(_Distribution) - -sequence = tuple, list - -def check_importable(dist, attr, value): - try: - ep = pkg_resources.EntryPoint.parse('x='+value) - assert not ep.extras - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be importable 'module:attrs' string (got %r)" - % (attr,value) - ) - - -def assert_string_list(dist, attr, value): - """Verify that value is a string list or None""" - try: - assert ''.join(value)!=value - except (TypeError,ValueError,AttributeError,AssertionError): - raise DistutilsSetupError( - "%r must be a list of strings (got %r)" % (attr,value) - ) -def check_nsp(dist, attr, value): - """Verify that namespace packages are valid""" - assert_string_list(dist,attr,value) - for nsp in value: - if not dist.has_contents_for(nsp): - raise DistutilsSetupError( - "Distribution contains no modules or packages for " + - "namespace package %r" % nsp - ) - if '.' in nsp: - parent = '.'.join(nsp.split('.')[:-1]) - if parent not in value: - distutils.log.warn( - "WARNING: %r is declared as a package namespace, but %r" - " is not: please correct this in setup.py", nsp, parent - ) - -def check_extras(dist, attr, value): - """Verify that extras_require mapping is valid""" - try: - for k,v in value.items(): - if ':' in k: - k,m = k.split(':',1) - if pkg_resources.invalid_marker(m): - raise DistutilsSetupError("Invalid environment marker: "+m) - list(pkg_resources.parse_requirements(v)) - except (TypeError,ValueError,AttributeError): - raise DistutilsSetupError( - "'extras_require' must be a dictionary whose values are " - "strings or lists of strings containing valid project/version " - "requirement specifiers." - ) - -def assert_bool(dist, attr, value): - """Verify that value is True, False, 0, or 1""" - if bool(value) != value: - raise DistutilsSetupError( - "%r must be a boolean value (got %r)" % (attr,value) - ) -def check_requirements(dist, attr, value): - """Verify that install_requires is a valid requirements list""" - try: - list(pkg_resources.parse_requirements(value)) - except (TypeError,ValueError): - raise DistutilsSetupError( - "%r must be a string or list of strings " - "containing valid project/version requirement specifiers" % (attr,) - ) -def check_entry_points(dist, attr, value): - """Verify that entry_points map is parseable""" - try: - pkg_resources.EntryPoint.parse_map(value) - except ValueError: - e = sys.exc_info()[1] - raise DistutilsSetupError(e) - -def check_test_suite(dist, attr, value): - if not isinstance(value,basestring): - raise DistutilsSetupError("test_suite must be a string") - -def check_package_data(dist, attr, value): - """Verify that value is a dictionary of package names to glob lists""" - if isinstance(value,dict): - for k,v in value.items(): - if not isinstance(k,str): break - try: iter(v) - except TypeError: - break - else: - return - raise DistutilsSetupError( - attr+" must be a dictionary mapping package names to lists of " - "wildcard patterns" - ) - -def check_packages(dist, attr, value): - for pkgname in value: - if not re.match(r'\w+(\.\w+)*', pkgname): - distutils.log.warn( - "WARNING: %r not a valid package name; please use only" - ".-separated package names in setup.py", pkgname - ) - - -class Distribution(_Distribution): - """Distribution with support for features, tests, and package data - - This is an enhanced version of 'distutils.dist.Distribution' that - effectively adds the following new optional keyword arguments to 'setup()': - - 'install_requires' -- a string or sequence of strings specifying project - versions that the distribution requires when installed, in the format - used by 'pkg_resources.require()'. They will be installed - automatically when the package is installed. If you wish to use - packages that are not available in PyPI, or want to give your users an - alternate download location, you can add a 'find_links' option to the - '[easy_install]' section of your project's 'setup.cfg' file, and then - setuptools will scan the listed web pages for links that satisfy the - requirements. - - 'extras_require' -- a dictionary mapping names of optional "extras" to the - additional requirement(s) that using those extras incurs. For example, - this:: - - extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) - - indicates that the distribution can optionally provide an extra - capability called "reST", but it can only be used if docutils and - reSTedit are installed. If the user installs your package using - EasyInstall and requests one of your extras, the corresponding - additional requirements will be installed if needed. - - 'features' **deprecated** -- a dictionary mapping option names to - 'setuptools.Feature' - objects. Features are a portion of the distribution that can be - included or excluded based on user options, inter-feature dependencies, - and availability on the current system. Excluded features are omitted - from all setup commands, including source and binary distributions, so - you can create multiple distributions from the same source tree. - Feature names should be valid Python identifiers, except that they may - contain the '-' (minus) sign. Features can be included or excluded - via the command line options '--with-X' and '--without-X', where 'X' is - the name of the feature. Whether a feature is included by default, and - whether you are allowed to control this from the command line, is - determined by the Feature object. See the 'Feature' class for more - information. - - 'test_suite' -- the name of a test suite to run for the 'test' command. - If the user runs 'python setup.py test', the package will be installed, - and the named test suite will be run. The format is the same as - would be used on a 'unittest.py' command line. That is, it is the - dotted name of an object to import and call to generate a test suite. - - 'package_data' -- a dictionary mapping package names to lists of filenames - or globs to use to find data files contained in the named packages. - If the dictionary has filenames or globs listed under '""' (the empty - string), those names will be searched for in every package, in addition - to any names for the specific package. Data files found using these - names/globs will be installed along with the package, in the same - location as the package. Note that globs are allowed to reference - the contents of non-package subdirectories, as long as you use '/' as - a path separator. (Globs are automatically converted to - platform-specific paths at runtime.) - - In addition to these new keywords, this class also has several new methods - for manipulating the distribution's contents. For example, the 'include()' - and 'exclude()' methods can be thought of as in-place add and subtract - commands that add or remove packages, modules, extensions, and so on from - the distribution. They are used by the feature subsystem to configure the - distribution for the included and excluded features. - """ - - _patched_dist = None - - def patch_missing_pkg_info(self, attrs): - # Fake up a replacement for the data that would normally come from - # PKG-INFO, but which might not yet be built if this is a fresh - # checkout. - # - if not attrs or 'name' not in attrs or 'version' not in attrs: - return - key = pkg_resources.safe_name(str(attrs['name'])).lower() - dist = pkg_resources.working_set.by_key.get(key) - if dist is not None and not dist.has_metadata('PKG-INFO'): - dist._version = pkg_resources.safe_version(str(attrs['version'])) - self._patched_dist = dist - - def __init__(self, attrs=None): - have_package_data = hasattr(self, "package_data") - if not have_package_data: - self.package_data = {} - _attrs_dict = attrs or {} - if 'features' in _attrs_dict or 'require_features' in _attrs_dict: - Feature.warn_deprecated() - self.require_features = [] - self.features = {} - self.dist_files = [] - self.src_root = attrs and attrs.pop("src_root", None) - self.patch_missing_pkg_info(attrs) - # Make sure we have any eggs needed to interpret 'attrs' - if attrs is not None: - self.dependency_links = attrs.pop('dependency_links', []) - assert_string_list(self,'dependency_links',self.dependency_links) - if attrs and 'setup_requires' in attrs: - self.fetch_build_eggs(attrs.pop('setup_requires')) - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - if not hasattr(self,ep.name): - setattr(self,ep.name,None) - _Distribution.__init__(self,attrs) - if isinstance(self.metadata.version, numeric_types): - # Some people apparently take "version number" too literally :) - self.metadata.version = str(self.metadata.version) - - def parse_command_line(self): - """Process features after parsing command line options""" - result = _Distribution.parse_command_line(self) - if self.features: - self._finalize_features() - return result - - def _feature_attrname(self,name): - """Convert feature name to corresponding option attribute name""" - return 'with_'+name.replace('-','_') - - def fetch_build_eggs(self, requires): - """Resolve pre-setup requirements""" - from pkg_resources import working_set, parse_requirements - for dist in working_set.resolve( - parse_requirements(requires), installer=self.fetch_build_egg, - replace_conflicting=True - ): - working_set.add(dist, replace=True) - - def finalize_options(self): - _Distribution.finalize_options(self) - if self.features: - self._set_global_opts_from_features() - - for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): - value = getattr(self,ep.name,None) - if value is not None: - ep.require(installer=self.fetch_build_egg) - ep.load()(self, ep.name, value) - if getattr(self, 'convert_2to3_doctests', None): - # XXX may convert to set here when we can rely on set being builtin - self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] - else: - self.convert_2to3_doctests = [] - - def fetch_build_egg(self, req): - """Fetch an egg needed for building""" - - try: - cmd = self._egg_fetcher - cmd.package_index.to_scan = [] - except AttributeError: - from setuptools.command.easy_install import easy_install - dist = self.__class__({'script_args':['easy_install']}) - dist.parse_config_files() - opts = dist.get_option_dict('easy_install') - keep = ( - 'find_links', 'site_dirs', 'index_url', 'optimize', - 'site_dirs', 'allow_hosts' - ) - for key in list(opts): - if key not in keep: - del opts[key] # don't use any other settings - if self.dependency_links: - links = self.dependency_links[:] - if 'find_links' in opts: - links = opts['find_links'][1].split() + links - opts['find_links'] = ('setup', links) - cmd = easy_install( - dist, args=["x"], install_dir=os.curdir, exclude_scripts=True, - always_copy=False, build_directory=None, editable=False, - upgrade=False, multi_version=True, no_report=True, user=False - ) - cmd.ensure_finalized() - self._egg_fetcher = cmd - return cmd.easy_install(req) - - def _set_global_opts_from_features(self): - """Add --with-X/--without-X options based on optional features""" - - go = [] - no = self.negative_opt.copy() - - for name,feature in self.features.items(): - self._set_feature(name,None) - feature.validate(self) - - if feature.optional: - descr = feature.description - incdef = ' (default)' - excdef='' - if not feature.include_by_default(): - excdef, incdef = incdef, excdef - - go.append(('with-'+name, None, 'include '+descr+incdef)) - go.append(('without-'+name, None, 'exclude '+descr+excdef)) - no['without-'+name] = 'with-'+name - - self.global_options = self.feature_options = go + self.global_options - self.negative_opt = self.feature_negopt = no - - def _finalize_features(self): - """Add/remove features and resolve dependencies between them""" - - # First, flag all the enabled items (and thus their dependencies) - for name,feature in self.features.items(): - enabled = self.feature_is_included(name) - if enabled or (enabled is None and feature.include_by_default()): - feature.include_in(self) - self._set_feature(name,1) - - # Then disable the rest, so that off-by-default features don't - # get flagged as errors when they're required by an enabled feature - for name,feature in self.features.items(): - if not self.feature_is_included(name): - feature.exclude_from(self) - self._set_feature(name,0) - - def get_command_class(self, command): - """Pluggable version of get_command_class()""" - if command in self.cmdclass: - return self.cmdclass[command] - - for ep in pkg_resources.iter_entry_points('distutils.commands',command): - ep.require(installer=self.fetch_build_egg) - self.cmdclass[command] = cmdclass = ep.load() - return cmdclass - else: - return _Distribution.get_command_class(self, command) - - def print_commands(self): - for ep in pkg_resources.iter_entry_points('distutils.commands'): - if ep.name not in self.cmdclass: - cmdclass = ep.load(False) # don't require extras, we're not running - self.cmdclass[ep.name] = cmdclass - return _Distribution.print_commands(self) - - def _set_feature(self,name,status): - """Set feature's inclusion status""" - setattr(self,self._feature_attrname(name),status) - - def feature_is_included(self,name): - """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" - return getattr(self,self._feature_attrname(name)) - - def include_feature(self,name): - """Request inclusion of feature named 'name'""" - - if self.feature_is_included(name)==0: - descr = self.features[name].description - raise DistutilsOptionError( - descr + " is required, but was excluded or is not available" - ) - self.features[name].include_in(self) - self._set_feature(name,1) - - def include(self,**attrs): - """Add items to distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would add 'x' to - the distribution's 'py_modules' attribute, if it was not already - there. - - Currently, this method only supports inclusion for attributes that are - lists or tuples. If you need to add support for adding to other - attributes in this or a subclass, you can add an '_include_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' - will try to call 'dist._include_foo({"bar":"baz"})', which can then - handle whatever special inclusion logic is needed. - """ - for k,v in attrs.items(): - include = getattr(self, '_include_'+k, None) - if include: - include(v) - else: - self._include_misc(k,v) - - def exclude_package(self,package): - """Remove packages, modules, and extensions in named package""" - - pfx = package+'.' - if self.packages: - self.packages = [ - p for p in self.packages - if p != package and not p.startswith(pfx) - ] - - if self.py_modules: - self.py_modules = [ - p for p in self.py_modules - if p != package and not p.startswith(pfx) - ] - - if self.ext_modules: - self.ext_modules = [ - p for p in self.ext_modules - if p.name != package and not p.name.startswith(pfx) - ] - - def has_contents_for(self,package): - """Return true if 'exclude_package(package)' would do something""" - - pfx = package+'.' - - for p in self.iter_distribution_names(): - if p==package or p.startswith(pfx): - return True - - def _exclude_misc(self,name,value): - """Handle 'exclude()' for list/tuple attrs without a special handler""" - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list or tuple (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is not None and not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - elif old: - setattr(self,name,[item for item in old if item not in value]) - - def _include_misc(self,name,value): - """Handle 'include()' for list/tuple attrs without a special handler""" - - if not isinstance(value,sequence): - raise DistutilsSetupError( - "%s: setting must be a list (%r)" % (name, value) - ) - try: - old = getattr(self,name) - except AttributeError: - raise DistutilsSetupError( - "%s: No such distribution setting" % name - ) - if old is None: - setattr(self,name,value) - elif not isinstance(old,sequence): - raise DistutilsSetupError( - name+": this setting cannot be changed via include/exclude" - ) - else: - setattr(self,name,old+[item for item in value if item not in old]) - - def exclude(self,**attrs): - """Remove items from distribution that are named in keyword arguments - - For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from - the distribution's 'py_modules' attribute. Excluding packages uses - the 'exclude_package()' method, so all of the package's contained - packages, modules, and extensions are also excluded. - - Currently, this method only supports exclusion from attributes that are - lists or tuples. If you need to add support for excluding from other - attributes in this or a subclass, you can add an '_exclude_X' method, - where 'X' is the name of the attribute. The method will be called with - the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' - will try to call 'dist._exclude_foo({"bar":"baz"})', which can then - handle whatever special exclusion logic is needed. - """ - for k,v in attrs.items(): - exclude = getattr(self, '_exclude_'+k, None) - if exclude: - exclude(v) - else: - self._exclude_misc(k,v) - - def _exclude_packages(self,packages): - if not isinstance(packages,sequence): - raise DistutilsSetupError( - "packages: setting must be a list or tuple (%r)" % (packages,) - ) - list(map(self.exclude_package, packages)) - - def _parse_command_opts(self, parser, args): - # Remove --with-X/--without-X options when processing command args - self.global_options = self.__class__.global_options - self.negative_opt = self.__class__.negative_opt - - # First, expand any aliases - command = args[0] - aliases = self.get_option_dict('aliases') - while command in aliases: - src,alias = aliases[command] - del aliases[command] # ensure each alias can expand only once! - import shlex - args[:1] = shlex.split(alias,True) - command = args[0] - - nargs = _Distribution._parse_command_opts(self, parser, args) - - # Handle commands that want to consume all remaining arguments - cmd_class = self.get_command_class(command) - if getattr(cmd_class,'command_consumes_arguments',None): - self.get_option_dict(command)['args'] = ("command line", nargs) - if nargs is not None: - return [] - - return nargs - - def get_cmdline_options(self): - """Return a '{cmd: {opt:val}}' map of all command-line options - - Option names are all long, but do not include the leading '--', and - contain dashes rather than underscores. If the option doesn't take - an argument (e.g. '--quiet'), the 'val' is 'None'. - - Note that options provided by config files are intentionally excluded. - """ - - d = {} - - for cmd,opts in self.command_options.items(): - - for opt,(src,val) in opts.items(): - - if src != "command line": - continue - - opt = opt.replace('_','-') - - if val==0: - cmdobj = self.get_command_obj(cmd) - neg_opt = self.negative_opt.copy() - neg_opt.update(getattr(cmdobj,'negative_opt',{})) - for neg,pos in neg_opt.items(): - if pos==opt: - opt=neg - val=None - break - else: - raise AssertionError("Shouldn't be able to get here") - - elif val==1: - val = None - - d.setdefault(cmd,{})[opt] = val - - return d - - def iter_distribution_names(self): - """Yield all packages, modules, and extension names in distribution""" - - for pkg in self.packages or (): - yield pkg - - for module in self.py_modules or (): - yield module - - for ext in self.ext_modules or (): - if isinstance(ext,tuple): - name, buildinfo = ext - else: - name = ext.name - if name.endswith('module'): - name = name[:-6] - yield name - - def handle_display_options(self, option_order): - """If there were any non-global "display-only" options - (--help-commands or the metadata display options) on the command - line, display the requested info and return true; else return - false. - """ - import sys - - if sys.version_info < (3,) or self.help_commands: - return _Distribution.handle_display_options(self, option_order) - - # Stdout may be StringIO (e.g. in tests) - import io - if not isinstance(sys.stdout, io.TextIOWrapper): - return _Distribution.handle_display_options(self, option_order) - - # Don't wrap stdout if utf-8 is already the encoding. Provides - # workaround for #334. - if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): - return _Distribution.handle_display_options(self, option_order) - - # Print metadata in UTF-8 no matter the platform - encoding = sys.stdout.encoding - errors = sys.stdout.errors - newline = sys.platform != 'win32' and '\n' or None - line_buffering = sys.stdout.line_buffering - - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) - try: - return _Distribution.handle_display_options(self, option_order) - finally: - sys.stdout = io.TextIOWrapper( - sys.stdout.detach(), encoding, errors, newline, line_buffering) - - -# Install it throughout the distutils -for module in distutils.dist, distutils.core, distutils.cmd: - module.Distribution = Distribution - - -class Feature: - """ - **deprecated** -- The `Feature` facility was never completely implemented - or supported, `has reported issues - `_ and will be removed in - a future version. - - A subset of the distribution that can be excluded if unneeded/wanted - - Features are created using these keyword arguments: - - 'description' -- a short, human readable description of the feature, to - be used in error messages, and option help messages. - - 'standard' -- if true, the feature is included by default if it is - available on the current system. Otherwise, the feature is only - included if requested via a command line '--with-X' option, or if - another included feature requires it. The default setting is 'False'. - - 'available' -- if true, the feature is available for installation on the - current system. The default setting is 'True'. - - 'optional' -- if true, the feature's inclusion can be controlled from the - command line, using the '--with-X' or '--without-X' options. If - false, the feature's inclusion status is determined automatically, - based on 'availabile', 'standard', and whether any other feature - requires it. The default setting is 'True'. - - 'require_features' -- a string or sequence of strings naming features - that should also be included if this feature is included. Defaults to - empty list. May also contain 'Require' objects that should be - added/removed from the distribution. - - 'remove' -- a string or list of strings naming packages to be removed - from the distribution if this feature is *not* included. If the - feature *is* included, this argument is ignored. This argument exists - to support removing features that "crosscut" a distribution, such as - defining a 'tests' feature that removes all the 'tests' subpackages - provided by other features. The default for this argument is an empty - list. (Note: the named package(s) or modules must exist in the base - distribution when the 'setup()' function is initially called.) - - other keywords -- any other keyword arguments are saved, and passed to - the distribution's 'include()' and 'exclude()' methods when the - feature is included or excluded, respectively. So, for example, you - could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be - added or removed from the distribution as appropriate. - - A feature must include at least one 'requires', 'remove', or other - keyword argument. Otherwise, it can't affect the distribution in any way. - Note also that you can subclass 'Feature' to create your own specialized - feature types that modify the distribution in other ways when included or - excluded. See the docstrings for the various methods here for more detail. - Aside from the methods, the only feature attributes that distributions look - at are 'description' and 'optional'. - """ - - @staticmethod - def warn_deprecated(): - warnings.warn( - "Features are deprecated and will be removed in a future " - "version. See http://bitbucket.org/pypa/setuptools/65.", - DeprecationWarning, - stacklevel=3, - ) - - def __init__(self, description, standard=False, available=True, - optional=True, require_features=(), remove=(), **extras): - self.warn_deprecated() - - self.description = description - self.standard = standard - self.available = available - self.optional = optional - if isinstance(require_features,(str,Require)): - require_features = require_features, - - self.require_features = [ - r for r in require_features if isinstance(r,str) - ] - er = [r for r in require_features if not isinstance(r,str)] - if er: extras['require_features'] = er - - if isinstance(remove,str): - remove = remove, - self.remove = remove - self.extras = extras - - if not remove and not require_features and not extras: - raise DistutilsSetupError( - "Feature %s: must define 'require_features', 'remove', or at least one" - " of 'packages', 'py_modules', etc." - ) - - def include_by_default(self): - """Should this feature be included by default?""" - return self.available and self.standard - - def include_in(self,dist): - - """Ensure feature and its requirements are included in distribution - - You may override this in a subclass to perform additional operations on - the distribution. Note that this method may be called more than once - per feature, and so should be idempotent. - - """ - - if not self.available: - raise DistutilsPlatformError( - self.description+" is required," - "but is not available on this platform" - ) - - dist.include(**self.extras) - - for f in self.require_features: - dist.include_feature(f) - - def exclude_from(self,dist): - - """Ensure feature is excluded from distribution - - You may override this in a subclass to perform additional operations on - the distribution. This method will be called at most once per - feature, and only after all included features have been asked to - include themselves. - """ - - dist.exclude(**self.extras) - - if self.remove: - for item in self.remove: - dist.exclude_package(item) - - def validate(self,dist): - - """Verify that feature makes sense in context of distribution - - This method is called by the distribution just before it parses its - command line. It checks to ensure that the 'remove' attribute, if any, - contains only valid package/module names that are present in the base - distribution when 'setup()' is called. You may override it in a - subclass to perform any other required validation of the feature - against a target distribution. - """ - - for item in self.remove: - if not dist.has_contents_for(item): - raise DistutilsSetupError( - "%s wants to be able to remove %s, but the distribution" - " doesn't contain any packages or modules under %s" - % (self.description, item, item) - ) diff --git a/libs/setuptools-2.2/setuptools/extension.py b/libs/setuptools-2.2/setuptools/extension.py deleted file mode 100644 index d7892d3..0000000 --- a/libs/setuptools-2.2/setuptools/extension.py +++ /dev/null @@ -1,46 +0,0 @@ -import sys -import distutils.core -import distutils.extension - -from setuptools.dist import _get_unpatched - -_Extension = _get_unpatched(distutils.core.Extension) - -def have_pyrex(): - """ - Return True if Cython or Pyrex can be imported. - """ - pyrex_impls = 'Cython.Distutils.build_ext', 'Pyrex.Distutils.build_ext' - for pyrex_impl in pyrex_impls: - try: - # from (pyrex_impl) import build_ext - __import__(pyrex_impl, fromlist=['build_ext']).build_ext - return True - except Exception: - pass - return False - - -class Extension(_Extension): - """Extension that uses '.c' files in place of '.pyx' files""" - - def __init__(self, *args, **kw): - _Extension.__init__(self, *args, **kw) - if not have_pyrex(): - self._convert_pyx_sources_to_c() - - def _convert_pyx_sources_to_c(self): - "convert .pyx extensions to .c" - def pyx_to_c(source): - if source.endswith('.pyx'): - source = source[:-4] + '.c' - return source - self.sources = list(map(pyx_to_c, self.sources)) - -class Library(Extension): - """Just like a regular Extension, but built as a library instead""" - -distutils.core.Extension = Extension -distutils.extension.Extension = Extension -if 'distutils.command.build_ext' in sys.modules: - sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/libs/setuptools-2.2/setuptools/gui-32.exe b/libs/setuptools-2.2/setuptools/gui-32.exe deleted file mode 100644 index f8d3509..0000000 Binary files a/libs/setuptools-2.2/setuptools/gui-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/gui-64.exe b/libs/setuptools-2.2/setuptools/gui-64.exe deleted file mode 100644 index 330c51a..0000000 Binary files a/libs/setuptools-2.2/setuptools/gui-64.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/gui-arm-32.exe b/libs/setuptools-2.2/setuptools/gui-arm-32.exe deleted file mode 100644 index 537aff3..0000000 Binary files a/libs/setuptools-2.2/setuptools/gui-arm-32.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/gui.exe b/libs/setuptools-2.2/setuptools/gui.exe deleted file mode 100644 index f8d3509..0000000 Binary files a/libs/setuptools-2.2/setuptools/gui.exe and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/lib2to3_ex.py b/libs/setuptools-2.2/setuptools/lib2to3_ex.py deleted file mode 100644 index feef591..0000000 --- a/libs/setuptools-2.2/setuptools/lib2to3_ex.py +++ /dev/null @@ -1,58 +0,0 @@ -""" -Customized Mixin2to3 support: - - - adds support for converting doctests - - -This module raises an ImportError on Python 2. -""" - -from distutils.util import Mixin2to3 as _Mixin2to3 -from distutils import log -from lib2to3.refactor import RefactoringTool, get_fixers_from_package -import setuptools - -class DistutilsRefactoringTool(RefactoringTool): - def log_error(self, msg, *args, **kw): - log.error(msg, *args) - - def log_message(self, msg, *args): - log.info(msg, *args) - - def log_debug(self, msg, *args): - log.debug(msg, *args) - -class Mixin2to3(_Mixin2to3): - def run_2to3(self, files, doctests = False): - # See of the distribution option has been set, otherwise check the - # setuptools default. - if self.distribution.use_2to3 is not True: - return - if not files: - return - log.info("Fixing "+" ".join(files)) - self.__build_fixer_names() - self.__exclude_fixers() - if doctests: - if setuptools.run_2to3_on_doctests: - r = DistutilsRefactoringTool(self.fixer_names) - r.refactor(files, write=True, doctests_only=True) - else: - _Mixin2to3.run_2to3(self, files) - - def __build_fixer_names(self): - if self.fixer_names: return - self.fixer_names = [] - for p in setuptools.lib2to3_fixer_packages: - self.fixer_names.extend(get_fixers_from_package(p)) - if self.distribution.use_2to3_fixers is not None: - for p in self.distribution.use_2to3_fixers: - self.fixer_names.extend(get_fixers_from_package(p)) - - def __exclude_fixers(self): - excluded_fixers = getattr(self, 'exclude_fixers', []) - if self.distribution.use_2to3_exclude_fixers is not None: - excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) - for fixer_name in excluded_fixers: - if fixer_name in self.fixer_names: - self.fixer_names.remove(fixer_name) diff --git a/libs/setuptools-2.2/setuptools/package_index.py b/libs/setuptools-2.2/setuptools/package_index.py deleted file mode 100644 index 167c34e..0000000 --- a/libs/setuptools-2.2/setuptools/package_index.py +++ /dev/null @@ -1,1058 +0,0 @@ -"""PyPI and direct package downloading""" -import sys -import os -import re -import shutil -import socket -import base64 -import hashlib -from functools import wraps - -from pkg_resources import ( - CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, - require, Environment, find_distributions, safe_name, safe_version, - to_filename, Requirement, DEVELOP_DIST, -) -from setuptools import ssl_support -from distutils import log -from distutils.errors import DistutilsError -from setuptools.compat import (urllib2, httplib, StringIO, HTTPError, - urlparse, urlunparse, unquote, splituser, - url2pathname, name2codepoint, - unichr, urljoin, urlsplit, urlunsplit, - ConfigParser) -from setuptools.compat import filterfalse -from fnmatch import translate -from setuptools.py26compat import strip_fragment -from setuptools.py27compat import get_all_headers - -EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') -HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) -# this is here to fix emacs' cruddy broken syntax highlighting -PYPI_MD5 = re.compile( - '([^<]+)\n\s+\\(md5\\)' -) -URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match -EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() - -__all__ = [ - 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', - 'interpret_distro_name', -] - -_SOCKET_TIMEOUT = 15 - -def parse_bdist_wininst(name): - """Return (base,pyversion) or (None,None) for possible .exe name""" - - lower = name.lower() - base, py_ver, plat = None, None, None - - if lower.endswith('.exe'): - if lower.endswith('.win32.exe'): - base = name[:-10] - plat = 'win32' - elif lower.startswith('.win32-py',-16): - py_ver = name[-7:-4] - base = name[:-16] - plat = 'win32' - elif lower.endswith('.win-amd64.exe'): - base = name[:-14] - plat = 'win-amd64' - elif lower.startswith('.win-amd64-py',-20): - py_ver = name[-7:-4] - base = name[:-20] - plat = 'win-amd64' - return base,py_ver,plat - - -def egg_info_for_url(url): - scheme, server, path, parameters, query, fragment = urlparse(url) - base = unquote(path.split('/')[-1]) - if server=='sourceforge.net' and base=='download': # XXX Yuck - base = unquote(path.split('/')[-2]) - if '#' in base: base, fragment = base.split('#',1) - return base,fragment - -def distros_for_url(url, metadata=None): - """Yield egg or source distribution objects that might be found at a URL""" - base, fragment = egg_info_for_url(url) - for dist in distros_for_location(url, base, metadata): yield dist - if fragment: - match = EGG_FRAGMENT.match(fragment) - if match: - for dist in interpret_distro_name( - url, match.group(1), metadata, precedence = CHECKOUT_DIST - ): - yield dist - -def distros_for_location(location, basename, metadata=None): - """Yield egg or source distribution objects based on basename""" - if basename.endswith('.egg.zip'): - basename = basename[:-4] # strip the .zip - if basename.endswith('.egg') and '-' in basename: - # only one, unambiguous interpretation - return [Distribution.from_location(location, basename, metadata)] - if basename.endswith('.exe'): - win_base, py_ver, platform = parse_bdist_wininst(basename) - if win_base is not None: - return interpret_distro_name( - location, win_base, metadata, py_ver, BINARY_DIST, platform - ) - # Try source distro extensions (.zip, .tgz, etc.) - # - for ext in EXTENSIONS: - if basename.endswith(ext): - basename = basename[:-len(ext)] - return interpret_distro_name(location, basename, metadata) - return [] # no extension matched - -def distros_for_filename(filename, metadata=None): - """Yield possible egg or source distribution objects based on a filename""" - return distros_for_location( - normalize_path(filename), os.path.basename(filename), metadata - ) - - -def interpret_distro_name( - location, basename, metadata, py_version=None, precedence=SOURCE_DIST, - platform=None - ): - """Generate alternative interpretations of a source distro name - - Note: if `location` is a filesystem filename, you should call - ``pkg_resources.normalize_path()`` on it before passing it to this - routine! - """ - # Generate alternative interpretations of a source distro name - # Because some packages are ambiguous as to name/versions split - # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. - # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" - # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, - # the spurious interpretations should be ignored, because in the event - # there's also an "adns" package, the spurious "python-1.1.0" version will - # compare lower than any numeric version number, and is therefore unlikely - # to match a request for it. It's still a potential problem, though, and - # in the long run PyPI and the distutils should go for "safe" names and - # versions in distribution archive names (sdist and bdist). - - parts = basename.split('-') - if not py_version: - for i,p in enumerate(parts[2:]): - if len(p)==5 and p.startswith('py2.'): - return # It's a bdist_dumb, not an sdist -- bail out - - for p in range(1,len(parts)+1): - yield Distribution( - location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), - py_version=py_version, precedence = precedence, - platform = platform - ) - -# From Python 2.7 docs -def unique_everseen(iterable, key=None): - "List unique elements, preserving order. Remember all elements ever seen." - # unique_everseen('AAAABBBCCDAABBB') --> A B C D - # unique_everseen('ABBCcAD', str.lower) --> A B C D - seen = set() - seen_add = seen.add - if key is None: - for element in filterfalse(seen.__contains__, iterable): - seen_add(element) - yield element - else: - for element in iterable: - k = key(element) - if k not in seen: - seen_add(k) - yield element - -def unique_values(func): - """ - Wrap a function returning an iterable such that the resulting iterable - only ever yields unique items. - """ - @wraps(func) - def wrapper(*args, **kwargs): - return unique_everseen(func(*args, **kwargs)) - return wrapper - -REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) -# this line is here to fix emacs' cruddy broken syntax highlighting - -@unique_values -def find_external_links(url, page): - """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" - - for match in REL.finditer(page): - tag, rel = match.groups() - rels = set(map(str.strip, rel.lower().split(','))) - if 'homepage' in rels or 'download' in rels: - for match in HREF.finditer(tag): - yield urljoin(url, htmldecode(match.group(1))) - - for tag in ("Home Page", "Download URL"): - pos = page.find(tag) - if pos!=-1: - match = HREF.search(page,pos) - if match: - yield urljoin(url, htmldecode(match.group(1))) - -user_agent = "Python-urllib/%s setuptools/%s" % ( - sys.version[:3], require('setuptools')[0].version -) - -class ContentChecker(object): - """ - A null content checker that defines the interface for checking content - """ - def feed(self, block): - """ - Feed a block of data to the hash. - """ - return - - def is_valid(self): - """ - Check the hash. Return False if validation fails. - """ - return True - - def report(self, reporter, template): - """ - Call reporter with information about the checker (hash name) - substituted into the template. - """ - return - -class HashChecker(ContentChecker): - pattern = re.compile( - r'(?Psha1|sha224|sha384|sha256|sha512|md5)=' - r'(?P[a-f0-9]+)' - ) - - def __init__(self, hash_name, expected): - self.hash_name = hash_name - self.hash = hashlib.new(hash_name) - self.expected = expected - - @classmethod - def from_url(cls, url): - "Construct a (possibly null) ContentChecker from a URL" - fragment = urlparse(url)[-1] - if not fragment: - return ContentChecker() - match = cls.pattern.search(fragment) - if not match: - return ContentChecker() - return cls(**match.groupdict()) - - def feed(self, block): - self.hash.update(block) - - def is_valid(self): - return self.hash.hexdigest() == self.expected - - def report(self, reporter, template): - msg = template % self.hash_name - return reporter(msg) - - -class PackageIndex(Environment): - """A distribution index that scans web pages for download URLs""" - - def __init__( - self, index_url="https://pypi.python.org/simple", hosts=('*',), - ca_bundle=None, verify_ssl=True, *args, **kw - ): - Environment.__init__(self,*args,**kw) - self.index_url = index_url + "/"[:not index_url.endswith('/')] - self.scanned_urls = {} - self.fetched_urls = {} - self.package_pages = {} - self.allows = re.compile('|'.join(map(translate,hosts))).match - self.to_scan = [] - if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): - self.opener = ssl_support.opener_for(ca_bundle) - else: self.opener = urllib2.urlopen - - def process_url(self, url, retrieve=False): - """Evaluate a URL as a possible download, and maybe retrieve it""" - if url in self.scanned_urls and not retrieve: - return - self.scanned_urls[url] = True - if not URL_SCHEME(url): - self.process_filename(url) - return - else: - dists = list(distros_for_url(url)) - if dists: - if not self.url_ok(url): - return - self.debug("Found link: %s", url) - - if dists or not retrieve or url in self.fetched_urls: - list(map(self.add, dists)) - return # don't need the actual page - - if not self.url_ok(url): - self.fetched_urls[url] = True - return - - self.info("Reading %s", url) - self.fetched_urls[url] = True # prevent multiple fetch attempts - f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) - if f is None: return - self.fetched_urls[f.url] = True - if 'html' not in f.headers.get('content-type', '').lower(): - f.close() # not html, we can't process it - return - - base = f.url # handle redirects - page = f.read() - if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. - if isinstance(f, HTTPError): - # Errors have no charset, assume latin1: - charset = 'latin-1' - else: - charset = f.headers.get_param('charset') or 'latin-1' - page = page.decode(charset, "ignore") - f.close() - for match in HREF.finditer(page): - link = urljoin(base, htmldecode(match.group(1))) - self.process_url(link) - if url.startswith(self.index_url) and getattr(f,'code',None)!=404: - page = self.process_index(url, page) - - def process_filename(self, fn, nested=False): - # process filenames or directories - if not os.path.exists(fn): - self.warn("Not found: %s", fn) - return - - if os.path.isdir(fn) and not nested: - path = os.path.realpath(fn) - for item in os.listdir(path): - self.process_filename(os.path.join(path,item), True) - - dists = distros_for_filename(fn) - if dists: - self.debug("Found: %s", fn) - list(map(self.add, dists)) - - def url_ok(self, url, fatal=False): - s = URL_SCHEME(url) - if (s and s.group(1).lower()=='file') or self.allows(urlparse(url)[1]): - return True - msg = ("\nNote: Bypassing %s (disallowed host; see " - "http://bit.ly/1dg9ijs for details).\n") - if fatal: - raise DistutilsError(msg % url) - else: - self.warn(msg, url) - - def scan_egg_links(self, search_path): - for item in search_path: - if os.path.isdir(item): - for entry in os.listdir(item): - if entry.endswith('.egg-link'): - self.scan_egg_link(item, entry) - - def scan_egg_link(self, path, entry): - lines = [_f for _f in map(str.strip, - open(os.path.join(path, entry))) if _f] - if len(lines)==2: - for dist in find_distributions(os.path.join(path, lines[0])): - dist.location = os.path.join(path, *lines) - dist.precedence = SOURCE_DIST - self.add(dist) - - def process_index(self,url,page): - """Process the contents of a PyPI page""" - def scan(link): - # Process a URL to see if it's for a package page - if link.startswith(self.index_url): - parts = list(map( - unquote, link[len(self.index_url):].split('/') - )) - if len(parts)==2 and '#' not in parts[1]: - # it's a package page, sanitize and index it - pkg = safe_name(parts[0]) - ver = safe_version(parts[1]) - self.package_pages.setdefault(pkg.lower(),{})[link] = True - return to_filename(pkg), to_filename(ver) - return None, None - - # process an index page into the package-page index - for match in HREF.finditer(page): - try: - scan(urljoin(url, htmldecode(match.group(1)))) - except ValueError: - pass - - pkg, ver = scan(url) # ensure this page is in the page index - if pkg: - # process individual package page - for new_url in find_external_links(url, page): - # Process the found URL - base, frag = egg_info_for_url(new_url) - if base.endswith('.py') and not frag: - if ver: - new_url+='#egg=%s-%s' % (pkg,ver) - else: - self.need_version_info(url) - self.scan_url(new_url) - - return PYPI_MD5.sub( - lambda m: '%s' % m.group(1,3,2), page - ) - else: - return "" # no sense double-scanning non-package pages - - def need_version_info(self, url): - self.scan_all( - "Page at %s links to .py file(s) without version info; an index " - "scan is required.", url - ) - - def scan_all(self, msg=None, *args): - if self.index_url not in self.fetched_urls: - if msg: self.warn(msg,*args) - self.info( - "Scanning index of all packages (this may take a while)" - ) - self.scan_url(self.index_url) - - def find_packages(self, requirement): - self.scan_url(self.index_url + requirement.unsafe_name+'/') - - if not self.package_pages.get(requirement.key): - # Fall back to safe version of the name - self.scan_url(self.index_url + requirement.project_name+'/') - - if not self.package_pages.get(requirement.key): - # We couldn't find the target package, so search the index page too - self.not_found_in_index(requirement) - - for url in list(self.package_pages.get(requirement.key,())): - # scan each page that might be related to the desired package - self.scan_url(url) - - def obtain(self, requirement, installer=None): - self.prescan() - self.find_packages(requirement) - for dist in self[requirement.key]: - if dist in requirement: - return dist - self.debug("%s does not match %s", requirement, dist) - return super(PackageIndex, self).obtain(requirement,installer) - - def check_hash(self, checker, filename, tfp): - """ - checker is a ContentChecker - """ - checker.report(self.debug, - "Validating %%s checksum for %s" % filename) - if not checker.is_valid(): - tfp.close() - os.unlink(filename) - raise DistutilsError( - "%s validation failed for %s; " - "possible download problem?" % ( - checker.hash.name, os.path.basename(filename)) - ) - - def add_find_links(self, urls): - """Add `urls` to the list that will be prescanned for searches""" - for url in urls: - if ( - self.to_scan is None # if we have already "gone online" - or not URL_SCHEME(url) # or it's a local file/directory - or url.startswith('file:') - or list(distros_for_url(url)) # or a direct package link - ): - # then go ahead and process it now - self.scan_url(url) - else: - # otherwise, defer retrieval till later - self.to_scan.append(url) - - def prescan(self): - """Scan urls scheduled for prescanning (e.g. --find-links)""" - if self.to_scan: - list(map(self.scan_url, self.to_scan)) - self.to_scan = None # from now on, go ahead and process immediately - - def not_found_in_index(self, requirement): - if self[requirement.key]: # we've seen at least one distro - meth, msg = self.info, "Couldn't retrieve index page for %r" - else: # no distros seen for this name, might be misspelled - meth, msg = (self.warn, - "Couldn't find index page for %r (maybe misspelled?)") - meth(msg, requirement.unsafe_name) - self.scan_all() - - def download(self, spec, tmpdir): - """Locate and/or download `spec` to `tmpdir`, returning a local path - - `spec` may be a ``Requirement`` object, or a string containing a URL, - an existing local filename, or a project/version requirement spec - (i.e. the string form of a ``Requirement`` object). If it is the URL - of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one - that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is - automatically created alongside the downloaded file. - - If `spec` is a ``Requirement`` object or a string containing a - project/version requirement spec, this method returns the location of - a matching distribution (possibly after downloading it to `tmpdir`). - If `spec` is a locally existing file or directory name, it is simply - returned unchanged. If `spec` is a URL, it is downloaded to a subpath - of `tmpdir`, and the local filename is returned. Various errors may be - raised if a problem occurs during downloading. - """ - if not isinstance(spec,Requirement): - scheme = URL_SCHEME(spec) - if scheme: - # It's a url, download it to tmpdir - found = self._download_url(scheme.group(1), spec, tmpdir) - base, fragment = egg_info_for_url(spec) - if base.endswith('.py'): - found = self.gen_setup(found,fragment,tmpdir) - return found - elif os.path.exists(spec): - # Existing file or directory, just return it - return spec - else: - try: - spec = Requirement.parse(spec) - except ValueError: - raise DistutilsError( - "Not a URL, existing file, or requirement spec: %r" % - (spec,) - ) - return getattr(self.fetch_distribution(spec, tmpdir),'location',None) - - def fetch_distribution( - self, requirement, tmpdir, force_scan=False, source=False, - develop_ok=False, local_index=None - ): - """Obtain a distribution suitable for fulfilling `requirement` - - `requirement` must be a ``pkg_resources.Requirement`` instance. - If necessary, or if the `force_scan` flag is set, the requirement is - searched for in the (online) package index as well as the locally - installed packages. If a distribution matching `requirement` is found, - the returned distribution's ``location`` is the value you would have - gotten from calling the ``download()`` method with the matching - distribution's URL or filename. If no matching distribution is found, - ``None`` is returned. - - If the `source` flag is set, only source distributions and source - checkout links will be considered. Unless the `develop_ok` flag is - set, development and system eggs (i.e., those using the ``.egg-info`` - format) will be ignored. - """ - # process a Requirement - self.info("Searching for %s", requirement) - skipped = {} - dist = None - - def find(req, env=None): - if env is None: - env = self - # Find a matching distribution; may be called more than once - - for dist in env[req.key]: - - if dist.precedence==DEVELOP_DIST and not develop_ok: - if dist not in skipped: - self.warn("Skipping development or system egg: %s",dist) - skipped[dist] = 1 - continue - - if dist in req and (dist.precedence<=SOURCE_DIST or not source): - return dist - - if force_scan: - self.prescan() - self.find_packages(requirement) - dist = find(requirement) - - if local_index is not None: - dist = dist or find(requirement, local_index) - - if dist is None: - if self.to_scan is not None: - self.prescan() - dist = find(requirement) - - if dist is None and not force_scan: - self.find_packages(requirement) - dist = find(requirement) - - if dist is None: - self.warn( - "No local packages or download links found for %s%s", - (source and "a source distribution of " or ""), - requirement, - ) - else: - self.info("Best match: %s", dist) - return dist.clone(location=self.download(dist.location, tmpdir)) - - def fetch(self, requirement, tmpdir, force_scan=False, source=False): - """Obtain a file suitable for fulfilling `requirement` - - DEPRECATED; use the ``fetch_distribution()`` method now instead. For - backward compatibility, this routine is identical but returns the - ``location`` of the downloaded distribution instead of a distribution - object. - """ - dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) - if dist is not None: - return dist.location - return None - - def gen_setup(self, filename, fragment, tmpdir): - match = EGG_FRAGMENT.match(fragment) - dists = match and [ - d for d in - interpret_distro_name(filename, match.group(1), None) if d.version - ] or [] - - if len(dists)==1: # unambiguous ``#egg`` fragment - basename = os.path.basename(filename) - - # Make sure the file has been downloaded to the temp dir. - if os.path.dirname(filename) != tmpdir: - dst = os.path.join(tmpdir, basename) - from setuptools.command.easy_install import samefile - if not samefile(filename, dst): - shutil.copy2(filename, dst) - filename=dst - - file = open(os.path.join(tmpdir, 'setup.py'), 'w') - file.write( - "from setuptools import setup\n" - "setup(name=%r, version=%r, py_modules=[%r])\n" - % ( - dists[0].project_name, dists[0].version, - os.path.splitext(basename)[0] - ) - ) - file.close() - return filename - - elif match: - raise DistutilsError( - "Can't unambiguously interpret project/version identifier %r; " - "any dashes in the name or version should be escaped using " - "underscores. %r" % (fragment,dists) - ) - else: - raise DistutilsError( - "Can't process plain .py files without an '#egg=name-version'" - " suffix to enable automatic setup script generation." - ) - - dl_blocksize = 8192 - def _download_to(self, url, filename): - self.info("Downloading %s", url) - # Download the file - fp, tfp, info = None, None, None - try: - checker = HashChecker.from_url(url) - fp = self.open_url(strip_fragment(url)) - if isinstance(fp, HTTPError): - raise DistutilsError( - "Can't download %s: %s %s" % (url, fp.code,fp.msg) - ) - headers = fp.info() - blocknum = 0 - bs = self.dl_blocksize - size = -1 - if "content-length" in headers: - # Some servers return multiple Content-Length headers :( - sizes = get_all_headers(headers, 'Content-Length') - size = max(map(int, sizes)) - self.reporthook(url, filename, blocknum, bs, size) - tfp = open(filename,'wb') - while True: - block = fp.read(bs) - if block: - checker.feed(block) - tfp.write(block) - blocknum += 1 - self.reporthook(url, filename, blocknum, bs, size) - else: - break - self.check_hash(checker, filename, tfp) - return headers - finally: - if fp: fp.close() - if tfp: tfp.close() - - def reporthook(self, url, filename, blocknum, blksize, size): - pass # no-op - - def open_url(self, url, warning=None): - if url.startswith('file:'): - return local_open(url) - try: - return open_with_auth(url, self.opener) - except (ValueError, httplib.InvalidURL): - v = sys.exc_info()[1] - msg = ' '.join([str(arg) for arg in v.args]) - if warning: - self.warn(warning, msg) - else: - raise DistutilsError('%s %s' % (url, msg)) - except urllib2.HTTPError: - v = sys.exc_info()[1] - return v - except urllib2.URLError: - v = sys.exc_info()[1] - if warning: - self.warn(warning, v.reason) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v.reason)) - except httplib.BadStatusLine: - v = sys.exc_info()[1] - if warning: - self.warn(warning, v.line) - else: - raise DistutilsError( - '%s returned a bad status line. The server might be ' - 'down, %s' % - (url, v.line) - ) - except httplib.HTTPException: - v = sys.exc_info()[1] - if warning: - self.warn(warning, v) - else: - raise DistutilsError("Download error for %s: %s" - % (url, v)) - - def _download_url(self, scheme, url, tmpdir): - # Determine download filename - # - name, fragment = egg_info_for_url(url) - if name: - while '..' in name: - name = name.replace('..','.').replace('\\','_') - else: - name = "__downloaded__" # default if URL has no path contents - - if name.endswith('.egg.zip'): - name = name[:-4] # strip the extra .zip before download - - filename = os.path.join(tmpdir,name) - - # Download the file - # - if scheme=='svn' or scheme.startswith('svn+'): - return self._download_svn(url, filename) - elif scheme=='git' or scheme.startswith('git+'): - return self._download_git(url, filename) - elif scheme.startswith('hg+'): - return self._download_hg(url, filename) - elif scheme=='file': - return url2pathname(urlparse(url)[2]) - else: - self.url_ok(url, True) # raises error if not allowed - return self._attempt_download(url, filename) - - def scan_url(self, url): - self.process_url(url, True) - - def _attempt_download(self, url, filename): - headers = self._download_to(url, filename) - if 'html' in headers.get('content-type','').lower(): - return self._download_html(url, headers, filename) - else: - return filename - - def _download_html(self, url, headers, filename): - file = open(filename) - for line in file: - if line.strip(): - # Check for a subversion index page - if re.search(r'([^- ]+ - )?Revision \d+:', line): - # it's a subversion index page: - file.close() - os.unlink(filename) - return self._download_svn(url, filename) - break # not an index page - file.close() - os.unlink(filename) - raise DistutilsError("Unexpected HTML page found at "+url) - - def _download_svn(self, url, filename): - url = url.split('#',1)[0] # remove any fragment for svn's sake - creds = '' - if url.lower().startswith('svn:') and '@' in url: - scheme, netloc, path, p, q, f = urlparse(url) - if not netloc and path.startswith('//') and '/' in path[2:]: - netloc, path = path[2:].split('/',1) - auth, host = splituser(netloc) - if auth: - if ':' in auth: - user, pw = auth.split(':',1) - creds = " --username=%s --password=%s" % (user, pw) - else: - creds = " --username="+auth - netloc = host - url = urlunparse((scheme, netloc, url, p, q, f)) - self.info("Doing subversion checkout from %s to %s", url, filename) - os.system("svn checkout%s -q %s %s" % (creds, url, filename)) - return filename - - @staticmethod - def _vcs_split_rev_from_url(url, pop_prefix=False): - scheme, netloc, path, query, frag = urlsplit(url) - - scheme = scheme.split('+', 1)[-1] - - # Some fragment identification fails - path = path.split('#',1)[0] - - rev = None - if '@' in path: - path, rev = path.rsplit('@', 1) - - # Also, discard fragment - url = urlunsplit((scheme, netloc, path, query, '')) - - return url, rev - - def _download_git(self, url, filename): - filename = filename.split('#',1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing git clone from %s to %s", url, filename) - os.system("git clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Checking out %s", rev) - os.system("(cd %s && git checkout --quiet %s)" % ( - filename, - rev, - )) - - return filename - - def _download_hg(self, url, filename): - filename = filename.split('#',1)[0] - url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) - - self.info("Doing hg clone from %s to %s", url, filename) - os.system("hg clone --quiet %s %s" % (url, filename)) - - if rev is not None: - self.info("Updating to %s", rev) - os.system("(cd %s && hg up -C -r %s >&-)" % ( - filename, - rev, - )) - - return filename - - def debug(self, msg, *args): - log.debug(msg, *args) - - def info(self, msg, *args): - log.info(msg, *args) - - def warn(self, msg, *args): - log.warn(msg, *args) - -# This pattern matches a character entity reference (a decimal numeric -# references, a hexadecimal numeric reference, or a named reference). -entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub - -def uchr(c): - if not isinstance(c, int): - return c - if c>255: return unichr(c) - return chr(c) - -def decode_entity(match): - what = match.group(1) - if what.startswith('#x'): - what = int(what[2:], 16) - elif what.startswith('#'): - what = int(what[1:]) - else: - what = name2codepoint.get(what, match.group(0)) - return uchr(what) - -def htmldecode(text): - """Decode HTML entities in the given text.""" - return entity_sub(decode_entity, text) - -def socket_timeout(timeout=15): - def _socket_timeout(func): - def _socket_timeout(*args, **kwargs): - old_timeout = socket.getdefaulttimeout() - socket.setdefaulttimeout(timeout) - try: - return func(*args, **kwargs) - finally: - socket.setdefaulttimeout(old_timeout) - return _socket_timeout - return _socket_timeout - -def _encode_auth(auth): - """ - A function compatible with Python 2.3-3.3 that will encode - auth from a URL suitable for an HTTP header. - >>> str(_encode_auth('username%3Apassword')) - 'dXNlcm5hbWU6cGFzc3dvcmQ=' - - Long auth strings should not cause a newline to be inserted. - >>> long_auth = 'username:' + 'password'*10 - >>> chr(10) in str(_encode_auth(long_auth)) - False - """ - auth_s = unquote(auth) - # convert to bytes - auth_bytes = auth_s.encode() - # use the legacy interface for Python 2.3 support - encoded_bytes = base64.encodestring(auth_bytes) - # convert back to a string - encoded = encoded_bytes.decode() - # strip the trailing carriage return - return encoded.replace('\n','') - -class Credential(object): - """ - A username/password pair. Use like a namedtuple. - """ - def __init__(self, username, password): - self.username = username - self.password = password - - def __iter__(self): - yield self.username - yield self.password - - def __str__(self): - return '%(username)s:%(password)s' % vars(self) - -class PyPIConfig(ConfigParser.ConfigParser): - - def __init__(self): - """ - Load from ~/.pypirc - """ - defaults = dict.fromkeys(['username', 'password', 'repository'], '') - ConfigParser.ConfigParser.__init__(self, defaults) - - rc = os.path.join(os.path.expanduser('~'), '.pypirc') - if os.path.exists(rc): - self.read(rc) - - @property - def creds_by_repository(self): - sections_with_repositories = [ - section for section in self.sections() - if self.get(section, 'repository').strip() - ] - - return dict(map(self._get_repo_cred, sections_with_repositories)) - - def _get_repo_cred(self, section): - repo = self.get(section, 'repository').strip() - return repo, Credential( - self.get(section, 'username').strip(), - self.get(section, 'password').strip(), - ) - - def find_credential(self, url): - """ - If the URL indicated appears to be a repository defined in this - config, return the credential for that repository. - """ - for repository, cred in self.creds_by_repository.items(): - if url.startswith(repository): - return cred - - -def open_with_auth(url, opener=urllib2.urlopen): - """Open a urllib2 request, handling HTTP authentication""" - - scheme, netloc, path, params, query, frag = urlparse(url) - - # Double scheme does not raise on Mac OS X as revealed by a - # failing test. We would expect "nonnumeric port". Refs #20. - if netloc.endswith(':'): - raise httplib.InvalidURL("nonnumeric port: ''") - - if scheme in ('http', 'https'): - auth, host = splituser(netloc) - else: - auth = None - - if not auth: - cred = PyPIConfig().find_credential(url) - if cred: - auth = str(cred) - info = cred.username, url - log.info('Authenticating as %s for %s (from .pypirc)' % info) - - if auth: - auth = "Basic " + _encode_auth(auth) - new_url = urlunparse((scheme,host,path,params,query,frag)) - request = urllib2.Request(new_url) - request.add_header("Authorization", auth) - else: - request = urllib2.Request(url) - - request.add_header('User-Agent', user_agent) - fp = opener(request) - - if auth: - # Put authentication info back into request URL if same host, - # so that links found on the page will work - s2, h2, path2, param2, query2, frag2 = urlparse(fp.url) - if s2==scheme and h2==host: - fp.url = urlunparse((s2,netloc,path2,param2,query2,frag2)) - - return fp - -# adding a timeout to avoid freezing package_index -open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) - - -def fix_sf_url(url): - return url # backward compatibility - -def local_open(url): - """Read a local path, with special support for directories""" - scheme, server, path, param, query, frag = urlparse(url) - filename = url2pathname(path) - if os.path.isfile(filename): - return urllib2.urlopen(url) - elif path.endswith('/') and os.path.isdir(filename): - files = [] - for f in os.listdir(filename): - if f=='index.html': - fp = open(os.path.join(filename,f),'r') - body = fp.read() - fp.close() - break - elif os.path.isdir(os.path.join(filename,f)): - f+='/' - files.append("<a href=%r>%s</a>" % (f,f)) - else: - body = ("<html><head><title>%s" % url) + \ - "%s" % '\n'.join(files) - status, message = 200, "OK" - else: - status, message, body = 404, "Path not found", "Not found" - - headers = {'content-type': 'text/html'} - return HTTPError(url, status, message, headers, StringIO(body)) diff --git a/libs/setuptools-2.2/setuptools/py26compat.py b/libs/setuptools-2.2/setuptools/py26compat.py deleted file mode 100644 index 738b0cc..0000000 --- a/libs/setuptools-2.2/setuptools/py26compat.py +++ /dev/null @@ -1,19 +0,0 @@ -""" -Compatibility Support for Python 2.6 and earlier -""" - -import sys - -from setuptools.compat import splittag - -def strip_fragment(url): - """ - In `Python 8280 `_, Python 2.7 and - later was patched to disregard the fragment when making URL requests. - Do the same for Python 2.6 and earlier. - """ - url, fragment = splittag(url) - return url - -if sys.version_info >= (2,7): - strip_fragment = lambda x: x diff --git a/libs/setuptools-2.2/setuptools/py27compat.py b/libs/setuptools-2.2/setuptools/py27compat.py deleted file mode 100644 index 9d2886d..0000000 --- a/libs/setuptools-2.2/setuptools/py27compat.py +++ /dev/null @@ -1,15 +0,0 @@ -""" -Compatibility Support for Python 2.7 and earlier -""" - -import sys - -def get_all_headers(message, key): - """ - Given an HTTPMessage, return all headers matching a given key. - """ - return message.get_all(key) - -if sys.version_info < (3,): - def get_all_headers(message, key): - return message.getheaders(key) diff --git a/libs/setuptools-2.2/setuptools/py31compat.py b/libs/setuptools-2.2/setuptools/py31compat.py deleted file mode 100644 index dbb324b..0000000 --- a/libs/setuptools-2.2/setuptools/py31compat.py +++ /dev/null @@ -1,11 +0,0 @@ -__all__ = ['get_config_vars', 'get_path'] - -try: - # Python 2.7 or >=3.2 - from sysconfig import get_config_vars, get_path -except ImportError: - from distutils.sysconfig import get_config_vars, get_python_lib - def get_path(name): - if name not in ('platlib', 'purelib'): - raise ValueError("Name must be purelib or platlib") - return get_python_lib(name=='platlib') diff --git a/libs/setuptools-2.2/setuptools/sandbox.py b/libs/setuptools-2.2/setuptools/sandbox.py deleted file mode 100644 index 042c595..0000000 --- a/libs/setuptools-2.2/setuptools/sandbox.py +++ /dev/null @@ -1,322 +0,0 @@ -import os -import sys -import tempfile -import operator -import functools -import itertools -import re - -import pkg_resources - -if os.name == "java": - import org.python.modules.posix.PosixModule as _os -else: - _os = sys.modules[os.name] -try: - _file = file -except NameError: - _file = None -_open = open -from distutils.errors import DistutilsError -from pkg_resources import working_set - -from setuptools.compat import builtins, execfile - -__all__ = [ - "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", -] - -def run_setup(setup_script, args): - """Run a distutils setup script, sandboxed in its directory""" - old_dir = os.getcwd() - save_argv = sys.argv[:] - save_path = sys.path[:] - setup_dir = os.path.abspath(os.path.dirname(setup_script)) - temp_dir = os.path.join(setup_dir,'temp') - if not os.path.isdir(temp_dir): os.makedirs(temp_dir) - save_tmp = tempfile.tempdir - save_modules = sys.modules.copy() - pr_state = pkg_resources.__getstate__() - try: - tempfile.tempdir = temp_dir - os.chdir(setup_dir) - try: - sys.argv[:] = [setup_script]+list(args) - sys.path.insert(0, setup_dir) - # reset to include setup dir, w/clean callback list - working_set.__init__() - working_set.callbacks.append(lambda dist:dist.activate()) - DirectorySandbox(setup_dir).run( - lambda: execfile( - "setup.py", - {'__file__':setup_script, '__name__':'__main__'} - ) - ) - except SystemExit: - v = sys.exc_info()[1] - if v.args and v.args[0]: - raise - # Normal exit, just return - finally: - pkg_resources.__setstate__(pr_state) - sys.modules.update(save_modules) - # remove any modules imported within the sandbox - del_modules = [ - mod_name for mod_name in sys.modules - if mod_name not in save_modules - # exclude any encodings modules. See #285 - and not mod_name.startswith('encodings.') - ] - list(map(sys.modules.__delitem__, del_modules)) - os.chdir(old_dir) - sys.path[:] = save_path - sys.argv[:] = save_argv - tempfile.tempdir = save_tmp - - -class AbstractSandbox: - """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" - - _active = False - - def __init__(self): - self._attrs = [ - name for name in dir(_os) - if not name.startswith('_') and hasattr(self,name) - ] - - def _copy(self, source): - for name in self._attrs: - setattr(os, name, getattr(source,name)) - - def run(self, func): - """Run 'func' under os sandboxing""" - try: - self._copy(self) - if _file: - builtins.file = self._file - builtins.open = self._open - self._active = True - return func() - finally: - self._active = False - if _file: - builtins.file = _file - builtins.open = _open - self._copy(_os) - - def _mk_dual_path_wrapper(name): - original = getattr(_os,name) - def wrap(self,src,dst,*args,**kw): - if self._active: - src,dst = self._remap_pair(name,src,dst,*args,**kw) - return original(src,dst,*args,**kw) - return wrap - - for name in ["rename", "link", "symlink"]: - if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) - - def _mk_single_path_wrapper(name, original=None): - original = original or getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return original(path,*args,**kw) - return wrap - - if _file: - _file = _mk_single_path_wrapper('file', _file) - _open = _mk_single_path_wrapper('open', _open) - for name in [ - "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", - "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", - "startfile", "mkfifo", "mknod", "pathconf", "access" - ]: - if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) - - def _mk_single_with_return(name): - original = getattr(_os,name) - def wrap(self,path,*args,**kw): - if self._active: - path = self._remap_input(name,path,*args,**kw) - return self._remap_output(name, original(path,*args,**kw)) - return original(path,*args,**kw) - return wrap - - for name in ['readlink', 'tempnam']: - if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) - - def _mk_query(name): - original = getattr(_os,name) - def wrap(self,*args,**kw): - retval = original(*args,**kw) - if self._active: - return self._remap_output(name, retval) - return retval - return wrap - - for name in ['getcwd', 'tmpnam']: - if hasattr(_os,name): locals()[name] = _mk_query(name) - - def _validate_path(self,path): - """Called to remap or validate any path, whether input or output""" - return path - - def _remap_input(self,operation,path,*args,**kw): - """Called for path inputs""" - return self._validate_path(path) - - def _remap_output(self,operation,path): - """Called for path outputs""" - return self._validate_path(path) - - def _remap_pair(self,operation,src,dst,*args,**kw): - """Called for path pairs like rename, link, and symlink operations""" - return ( - self._remap_input(operation+'-from',src,*args,**kw), - self._remap_input(operation+'-to',dst,*args,**kw) - ) - - -if hasattr(os, 'devnull'): - _EXCEPTIONS = [os.devnull,] -else: - _EXCEPTIONS = [] - -try: - from win32com.client.gencache import GetGeneratePath - _EXCEPTIONS.append(GetGeneratePath()) - del GetGeneratePath -except ImportError: - # it appears pywin32 is not installed, so no need to exclude. - pass - -class DirectorySandbox(AbstractSandbox): - """Restrict operations to a single subdirectory - pseudo-chroot""" - - write_ops = dict.fromkeys([ - "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", - "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", - ]) - - _exception_patterns = [ - # Allow lib2to3 to attempt to save a pickled grammar object (#121) - '.*lib2to3.*\.pickle$', - ] - "exempt writing to paths that match the pattern" - - def __init__(self, sandbox, exceptions=_EXCEPTIONS): - self._sandbox = os.path.normcase(os.path.realpath(sandbox)) - self._prefix = os.path.join(self._sandbox,'') - self._exceptions = [ - os.path.normcase(os.path.realpath(path)) - for path in exceptions - ] - AbstractSandbox.__init__(self) - - def _violation(self, operation, *args, **kw): - raise SandboxViolation(operation, args, kw) - - if _file: - def _file(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("file", path, mode, *args, **kw) - return _file(path,mode,*args,**kw) - - def _open(self, path, mode='r', *args, **kw): - if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): - self._violation("open", path, mode, *args, **kw) - return _open(path,mode,*args,**kw) - - def tmpnam(self): - self._violation("tmpnam") - - def _ok(self, path): - active = self._active - try: - self._active = False - realpath = os.path.normcase(os.path.realpath(path)) - return ( - self._exempted(realpath) - or realpath == self._sandbox - or realpath.startswith(self._prefix) - ) - finally: - self._active = active - - def _exempted(self, filepath): - start_matches = ( - filepath.startswith(exception) - for exception in self._exceptions - ) - pattern_matches = ( - re.match(pattern, filepath) - for pattern in self._exception_patterns - ) - candidates = itertools.chain(start_matches, pattern_matches) - return any(candidates) - - def _remap_input(self, operation, path, *args, **kw): - """Called for path inputs""" - if operation in self.write_ops and not self._ok(path): - self._violation(operation, os.path.realpath(path), *args, **kw) - return path - - def _remap_pair(self, operation, src, dst, *args, **kw): - """Called for path pairs like rename, link, and symlink operations""" - if not self._ok(src) or not self._ok(dst): - self._violation(operation, src, dst, *args, **kw) - return (src,dst) - - def open(self, file, flags, mode=0x1FF, *args, **kw): # 0777 - """Called for low-level os.open()""" - if flags & WRITE_FLAGS and not self._ok(file): - self._violation("os.open", file, flags, mode, *args, **kw) - return _os.open(file,flags,mode, *args, **kw) - -WRITE_FLAGS = functools.reduce( - operator.or_, [getattr(_os, a, 0) for a in - "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] -) - -class SandboxViolation(DistutilsError): - """A setup script attempted to modify the filesystem outside the sandbox""" - - def __str__(self): - return """SandboxViolation: %s%r %s - -The package setup script has attempted to modify files on your system -that are not within the EasyInstall build area, and has been aborted. - -This package cannot be safely installed by EasyInstall, and may not -support alternate installation locations even if you run its setup -script by hand. Please inform the package's author and the EasyInstall -maintainers to find out if a fix or workaround is available.""" % self.args - - - - - - - - - - - - - - - - - - - - - - - - - - - -# diff --git a/libs/setuptools-2.2/setuptools/script template (dev).py b/libs/setuptools-2.2/setuptools/script template (dev).py deleted file mode 100644 index b3fe209..0000000 --- a/libs/setuptools-2.2/setuptools/script template (dev).py +++ /dev/null @@ -1,11 +0,0 @@ -# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r -__requires__ = """%(spec)r""" -import sys -from pkg_resources import require -require("""%(spec)r""") -del require -__file__ = """%(dev_path)r""" -if sys.version_info < (3, 0): - execfile(__file__) -else: - exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/libs/setuptools-2.2/setuptools/script template.py b/libs/setuptools-2.2/setuptools/script template.py deleted file mode 100644 index 8dd5d51..0000000 --- a/libs/setuptools-2.2/setuptools/script template.py +++ /dev/null @@ -1,4 +0,0 @@ -# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r -__requires__ = """%(spec)r""" -import pkg_resources -pkg_resources.run_script("""%(spec)r""", """%(script_name)r""") diff --git a/libs/setuptools-2.2/setuptools/site-patch.py b/libs/setuptools-2.2/setuptools/site-patch.py deleted file mode 100644 index a7166f1..0000000 --- a/libs/setuptools-2.2/setuptools/site-patch.py +++ /dev/null @@ -1,83 +0,0 @@ -def __boot(): - import sys, os, os.path - PYTHONPATH = os.environ.get('PYTHONPATH') - if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): - PYTHONPATH = [] - else: - PYTHONPATH = PYTHONPATH.split(os.pathsep) - - pic = getattr(sys,'path_importer_cache',{}) - stdpath = sys.path[len(PYTHONPATH):] - mydir = os.path.dirname(__file__) - #print "searching",stdpath,sys.path - - for item in stdpath: - if item==mydir or not item: - continue # skip if current dir. on Windows, or my own directory - importer = pic.get(item) - if importer is not None: - loader = importer.find_module('site') - if loader is not None: - # This should actually reload the current module - loader.load_module('site') - break - else: - try: - import imp # Avoid import loop in Python >= 3.3 - stream, path, descr = imp.find_module('site',[item]) - except ImportError: - continue - if stream is None: - continue - try: - # This should actually reload the current module - imp.load_module('site',stream,path,descr) - finally: - stream.close() - break - else: - raise ImportError("Couldn't find the real 'site' module") - - #print "loaded", __file__ - - known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp - - oldpos = getattr(sys,'__egginsert',0) # save old insertion position - sys.__egginsert = 0 # and reset the current one - - for item in PYTHONPATH: - addsitedir(item) - - sys.__egginsert += oldpos # restore effective old position - - d,nd = makepath(stdpath[0]) - insert_at = None - new_path = [] - - for item in sys.path: - p,np = makepath(item) - - if np==nd and insert_at is None: - # We've hit the first 'system' path entry, so added entries go here - insert_at = len(new_path) - - if np in known_paths or insert_at is None: - new_path.append(item) - else: - # new path after the insert point, back-insert it - new_path.insert(insert_at, item) - insert_at += 1 - - sys.path[:] = new_path - -if __name__=='site': - __boot() - del __boot - - - - - - - - diff --git a/libs/setuptools-2.2/setuptools/ssl_support.py b/libs/setuptools-2.2/setuptools/ssl_support.py deleted file mode 100644 index 7b5f429..0000000 --- a/libs/setuptools-2.2/setuptools/ssl_support.py +++ /dev/null @@ -1,234 +0,0 @@ -import os -import socket -import atexit -import re - -import pkg_resources -from pkg_resources import ResolutionError, ExtractionError -from setuptools.compat import urllib2 - -try: - import ssl -except ImportError: - ssl = None - -__all__ = [ - 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', - 'opener_for' -] - -cert_paths = """ -/etc/pki/tls/certs/ca-bundle.crt -/etc/ssl/certs/ca-certificates.crt -/usr/share/ssl/certs/ca-bundle.crt -/usr/local/share/certs/ca-root.crt -/etc/ssl/cert.pem -/System/Library/OpenSSL/certs/cert.pem -""".strip().split() - - -HTTPSHandler = HTTPSConnection = object - -for what, where in ( - ('HTTPSHandler', ['urllib2','urllib.request']), - ('HTTPSConnection', ['httplib', 'http.client']), -): - for module in where: - try: - exec("from %s import %s" % (module, what)) - except ImportError: - pass - -is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) - - -try: - from ssl import CertificateError, match_hostname -except ImportError: - try: - from backports.ssl_match_hostname import CertificateError - from backports.ssl_match_hostname import match_hostname - except ImportError: - CertificateError = None - match_hostname = None - -if not CertificateError: - class CertificateError(ValueError): - pass - -if not match_hostname: - def _dnsname_match(dn, hostname, max_wildcards=1): - """Matching according to RFC 6125, section 6.4.3 - - http://tools.ietf.org/html/rfc6125#section-6.4.3 - """ - pats = [] - if not dn: - return False - - # Ported from python3-syntax: - # leftmost, *remainder = dn.split(r'.') - parts = dn.split(r'.') - leftmost = parts[0] - remainder = parts[1:] - - wildcards = leftmost.count('*') - if wildcards > max_wildcards: - # Issue #17980: avoid denials of service by refusing more - # than one wildcard per fragment. A survey of established - # policy among SSL implementations showed it to be a - # reasonable choice. - raise CertificateError( - "too many wildcards in certificate DNS name: " + repr(dn)) - - # speed up common case w/o wildcards - if not wildcards: - return dn.lower() == hostname.lower() - - # RFC 6125, section 6.4.3, subitem 1. - # The client SHOULD NOT attempt to match a presented identifier in which - # the wildcard character comprises a label other than the left-most label. - if leftmost == '*': - # When '*' is a fragment by itself, it matches a non-empty dotless - # fragment. - pats.append('[^.]+') - elif leftmost.startswith('xn--') or hostname.startswith('xn--'): - # RFC 6125, section 6.4.3, subitem 3. - # The client SHOULD NOT attempt to match a presented identifier - # where the wildcard character is embedded within an A-label or - # U-label of an internationalized domain name. - pats.append(re.escape(leftmost)) - else: - # Otherwise, '*' matches any dotless string, e.g. www* - pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) - - # add the remaining fragments, ignore any wildcards - for frag in remainder: - pats.append(re.escape(frag)) - - pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) - return pat.match(hostname) - - def match_hostname(cert, hostname): - """Verify that *cert* (in decoded format as returned by - SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 - rules are followed, but IP addresses are not accepted for *hostname*. - - CertificateError is raised on failure. On success, the function - returns nothing. - """ - if not cert: - raise ValueError("empty or no certificate") - dnsnames = [] - san = cert.get('subjectAltName', ()) - for key, value in san: - if key == 'DNS': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if not dnsnames: - # The subject is only checked when there is no dNSName entry - # in subjectAltName - for sub in cert.get('subject', ()): - for key, value in sub: - # XXX according to RFC 2818, the most specific Common Name - # must be used. - if key == 'commonName': - if _dnsname_match(value, hostname): - return - dnsnames.append(value) - if len(dnsnames) > 1: - raise CertificateError("hostname %r " - "doesn't match either of %s" - % (hostname, ', '.join(map(repr, dnsnames)))) - elif len(dnsnames) == 1: - raise CertificateError("hostname %r " - "doesn't match %r" - % (hostname, dnsnames[0])) - else: - raise CertificateError("no appropriate commonName or " - "subjectAltName fields were found") - - -class VerifyingHTTPSHandler(HTTPSHandler): - """Simple verifying handler: no auth, subclasses, timeouts, etc.""" - - def __init__(self, ca_bundle): - self.ca_bundle = ca_bundle - HTTPSHandler.__init__(self) - - def https_open(self, req): - return self.do_open( - lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req - ) - - -class VerifyingHTTPSConn(HTTPSConnection): - """Simple verifying connection: no auth, subclasses, timeouts, etc.""" - def __init__(self, host, ca_bundle, **kw): - HTTPSConnection.__init__(self, host, **kw) - self.ca_bundle = ca_bundle - - def connect(self): - sock = socket.create_connection( - (self.host, self.port), getattr(self, 'source_address', None) - ) - - # Handle the socket if a (proxy) tunnel is present - if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): - self.sock = sock - self._tunnel() - - self.sock = ssl.wrap_socket( - sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle - ) - try: - match_hostname(self.sock.getpeercert(), self.host) - except CertificateError: - self.sock.shutdown(socket.SHUT_RDWR) - self.sock.close() - raise - -def opener_for(ca_bundle=None): - """Get a urlopen() replacement that uses ca_bundle for verification""" - return urllib2.build_opener( - VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) - ).open - - -_wincerts = None - -def get_win_certfile(): - global _wincerts - if _wincerts is not None: - return _wincerts.name - - try: - from wincertstore import CertFile - except ImportError: - return None - - class MyCertFile(CertFile): - def __init__(self, stores=(), certs=()): - CertFile.__init__(self) - for store in stores: - self.addstore(store) - self.addcerts(certs) - atexit.register(self.close) - - _wincerts = MyCertFile(stores=['CA', 'ROOT']) - return _wincerts.name - - -def find_ca_bundle(): - """Return an existing CA bundle path, or None""" - if os.name=='nt': - return get_win_certfile() - else: - for cert_path in cert_paths: - if os.path.isfile(cert_path): - return cert_path - try: - return pkg_resources.resource_filename('certifi', 'cacert.pem') - except (ImportError, ResolutionError, ExtractionError): - return None diff --git a/libs/setuptools-2.2/setuptools/svn_utils.py b/libs/setuptools-2.2/setuptools/svn_utils.py deleted file mode 100644 index e6d09d1..0000000 --- a/libs/setuptools-2.2/setuptools/svn_utils.py +++ /dev/null @@ -1,564 +0,0 @@ -import os -import re -import sys -from distutils import log -import xml.dom.pulldom -import shlex -import locale -import codecs -import unicodedata -import warnings -from setuptools.compat import unicode -from xml.sax.saxutils import unescape - -try: - import urlparse -except ImportError: - import urllib.parse as urlparse - -from subprocess import Popen as _Popen, PIPE as _PIPE - -#NOTE: Use of the command line options require SVN 1.3 or newer (December 2005) -# and SVN 1.3 hasn't been supported by the developers since mid 2008. - -#subprocess is called several times with shell=(sys.platform=='win32') -#see the follow for more information: -# http://bugs.python.org/issue8557 -# http://stackoverflow.com/questions/5658622/ -# python-subprocess-popen-environment-path - - -def _run_command(args, stdout=_PIPE, stderr=_PIPE, encoding=None, stream=0): - #regarding the shell argument, see: http://bugs.python.org/issue8557 - try: - proc = _Popen(args, stdout=stdout, stderr=stderr, - shell=(sys.platform == 'win32')) - - data = proc.communicate()[stream] - except OSError: - return 1, '' - - #doubled checked and - data = decode_as_string(data, encoding) - - #communciate calls wait() - return proc.returncode, data - - -def _get_entry_schedule(entry): - schedule = entry.getElementsByTagName('schedule')[0] - return "".join([t.nodeValue - for t in schedule.childNodes - if t.nodeType == t.TEXT_NODE]) - - -def _get_target_property(target): - property_text = target.getElementsByTagName('property')[0] - return "".join([t.nodeValue - for t in property_text.childNodes - if t.nodeType == t.TEXT_NODE]) - - -def _get_xml_data(decoded_str): - if sys.version_info < (3, 0): - #old versions want an encoded string - data = decoded_str.encode('utf-8') - else: - data = decoded_str - return data - - -def joinpath(prefix, *suffix): - if not prefix or prefix == '.': - return os.path.join(*suffix) - return os.path.join(prefix, *suffix) - -def determine_console_encoding(): - try: - #try for the preferred encoding - encoding = locale.getpreferredencoding() - - #see if the locale.getdefaultlocale returns null - #some versions of python\platforms return US-ASCII - #when it cannot determine an encoding - if not encoding or encoding == "US-ASCII": - encoding = locale.getdefaultlocale()[1] - - if encoding: - codecs.lookup(encoding) # make sure a lookup error is not made - - except (locale.Error, LookupError): - encoding = None - - is_osx = sys.platform == "darwin" - if not encoding: - return ["US-ASCII", "utf-8"][is_osx] - elif encoding.startswith("mac-") and is_osx: - #certain versions of python would return mac-roman as default - #OSX as a left over of earlier mac versions. - return "utf-8" - else: - return encoding - -_console_encoding = determine_console_encoding() - -def decode_as_string(text, encoding=None): - """ - Decode the console or file output explicitly using getpreferredencoding. - The text paraemeter should be a encoded string, if not no decode occurs - If no encoding is given, getpreferredencoding is used. If encoding is - specified, that is used instead. This would be needed for SVN --xml - output. Unicode is explicitly put in composed NFC form. - - --xml should be UTF-8 (SVN Issue 2938) the discussion on the Subversion - DEV List from 2007 seems to indicate the same. - """ - #text should be a byte string - - if encoding is None: - encoding = _console_encoding - - if not isinstance(text, unicode): - text = text.decode(encoding) - - text = unicodedata.normalize('NFC', text) - - return text - - -def parse_dir_entries(decoded_str): - '''Parse the entries from a recursive info xml''' - doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) - entries = list() - - for event, node in doc: - if event == 'START_ELEMENT' and node.nodeName == 'entry': - doc.expandNode(node) - if not _get_entry_schedule(node).startswith('delete'): - entries.append((node.getAttribute('path'), - node.getAttribute('kind'))) - - return entries[1:] # do not want the root directory - - -def parse_externals_xml(decoded_str, prefix=''): - '''Parse a propget svn:externals xml''' - prefix = os.path.normpath(prefix) - prefix = os.path.normcase(prefix) - - doc = xml.dom.pulldom.parseString(_get_xml_data(decoded_str)) - externals = list() - - for event, node in doc: - if event == 'START_ELEMENT' and node.nodeName == 'target': - doc.expandNode(node) - path = os.path.normpath(node.getAttribute('path')) - - if os.path.normcase(path).startswith(prefix): - path = path[len(prefix)+1:] - - data = _get_target_property(node) - #data should be decoded already - for external in parse_external_prop(data): - externals.append(joinpath(path, external)) - - return externals # do not want the root directory - - -def parse_external_prop(lines): - """ - Parse the value of a retrieved svn:externals entry. - - possible token setups (with quotng and backscaping in laters versions) - URL[@#] EXT_FOLDERNAME - [-r#] URL EXT_FOLDERNAME - EXT_FOLDERNAME [-r#] URL - """ - externals = [] - for line in lines.splitlines(): - line = line.lstrip() # there might be a "\ " - if not line: - continue - - if sys.version_info < (3, 0): - #shlex handles NULLs just fine and shlex in 2.7 tries to encode - #as ascii automatiically - line = line.encode('utf-8') - line = shlex.split(line) - if sys.version_info < (3, 0): - line = [x.decode('utf-8') for x in line] - - #EXT_FOLDERNAME is either the first or last depending on where - #the URL falls - if urlparse.urlsplit(line[-1])[0]: - external = line[0] - else: - external = line[-1] - - external = decode_as_string(external, encoding="utf-8") - externals.append(os.path.normpath(external)) - - return externals - - -def parse_prop_file(filename, key): - found = False - f = open(filename, 'rt') - data = '' - try: - for line in iter(f.readline, ''): # can't use direct iter! - parts = line.split() - if len(parts) == 2: - kind, length = parts - data = f.read(int(length)) - if kind == 'K' and data == key: - found = True - elif kind == 'V' and found: - break - finally: - f.close() - - return data - - -class SvnInfo(object): - ''' - Generic svn_info object. No has little knowledge of how to extract - information. Use cls.load to instatiate according svn version. - - Paths are not filesystem encoded. - ''' - - @staticmethod - def get_svn_version(): - code, data = _run_command(['svn', '--version', '--quiet']) - if code == 0 and data: - return data.strip() - else: - return '' - - #svnversion return values (previous implementations return max revision) - # 4123:4168 mixed revision working copy - # 4168M modified working copy - # 4123S switched working copy - # 4123:4168MS mixed revision, modified, switched working copy - revision_re = re.compile(r'(?:([\-0-9]+):)?(\d+)([a-z]*)\s*$', re.I) - - @classmethod - def load(cls, dirname=''): - normdir = os.path.normpath(dirname) - code, data = _run_command(['svn', 'info', normdir]) - # Must check for some contents, as some use empty directories - # in testcases - svn_dir = os.path.join(normdir, '.svn') - has_svn = (os.path.isfile(os.path.join(svn_dir, 'entries')) or - os.path.isfile(os.path.join(svn_dir, 'dir-props')) or - os.path.isfile(os.path.join(svn_dir, 'dir-prop-base'))) - - svn_version = tuple(cls.get_svn_version().split('.')) - - try: - base_svn_version = tuple(int(x) for x in svn_version[:2]) - except ValueError: - base_svn_version = tuple() - - if not has_svn: - return SvnInfo(dirname) - - if code or not base_svn_version or base_svn_version < (1, 3): - warnings.warn(("No SVN 1.3+ command found: falling back " - "on pre 1.7 .svn parsing"), DeprecationWarning) - return SvnFileInfo(dirname) - - if base_svn_version < (1, 5): - return Svn13Info(dirname) - - return Svn15Info(dirname) - - def __init__(self, path=''): - self.path = path - self._entries = None - self._externals = None - - def get_revision(self): - 'Retrieve the directory revision informatino using svnversion' - code, data = _run_command(['svnversion', '-c', self.path]) - if code: - log.warn("svnversion failed") - return 0 - - parsed = self.revision_re.match(data) - if parsed: - return int(parsed.group(2)) - else: - return 0 - - @property - def entries(self): - if self._entries is None: - self._entries = self.get_entries() - return self._entries - - @property - def externals(self): - if self._externals is None: - self._externals = self.get_externals() - return self._externals - - def iter_externals(self): - ''' - Iterate over the svn:external references in the repository path. - ''' - for item in self.externals: - yield item - - def iter_files(self): - ''' - Iterate over the non-deleted file entries in the repository path - ''' - for item, kind in self.entries: - if kind.lower() == 'file': - yield item - - def iter_dirs(self, include_root=True): - ''' - Iterate over the non-deleted file entries in the repository path - ''' - if include_root: - yield self.path - for item, kind in self.entries: - if kind.lower() == 'dir': - yield item - - def get_entries(self): - return [] - - def get_externals(self): - return [] - - -class Svn13Info(SvnInfo): - def get_entries(self): - code, data = _run_command(['svn', 'info', '-R', '--xml', self.path], - encoding="utf-8") - - if code: - log.debug("svn info failed") - return [] - - return parse_dir_entries(data) - - def get_externals(self): - #Previous to 1.5 --xml was not supported for svn propget and the -R - #output format breaks the shlex compatible semantics. - cmd = ['svn', 'propget', 'svn:externals'] - result = [] - for folder in self.iter_dirs(): - code, lines = _run_command(cmd + [folder], encoding="utf-8") - if code != 0: - log.warn("svn propget failed") - return [] - #lines should a str - for external in parse_external_prop(lines): - if folder: - external = os.path.join(folder, external) - result.append(os.path.normpath(external)) - - return result - - -class Svn15Info(Svn13Info): - def get_externals(self): - cmd = ['svn', 'propget', 'svn:externals', self.path, '-R', '--xml'] - code, lines = _run_command(cmd, encoding="utf-8") - if code: - log.debug("svn propget failed") - return [] - return parse_externals_xml(lines, prefix=os.path.abspath(self.path)) - - -class SvnFileInfo(SvnInfo): - - def __init__(self, path=''): - super(SvnFileInfo, self).__init__(path) - self._directories = None - self._revision = None - - def _walk_svn(self, base): - entry_file = joinpath(base, '.svn', 'entries') - if os.path.isfile(entry_file): - entries = SVNEntriesFile.load(base) - yield (base, False, entries.parse_revision()) - for path in entries.get_undeleted_records(): - path = decode_as_string(path) - path = joinpath(base, path) - if os.path.isfile(path): - yield (path, True, None) - elif os.path.isdir(path): - for item in self._walk_svn(path): - yield item - - def _build_entries(self): - entries = list() - - rev = 0 - for path, isfile, dir_rev in self._walk_svn(self.path): - if isfile: - entries.append((path, 'file')) - else: - entries.append((path, 'dir')) - rev = max(rev, dir_rev) - - self._entries = entries - self._revision = rev - - def get_entries(self): - if self._entries is None: - self._build_entries() - return self._entries - - def get_revision(self): - if self._revision is None: - self._build_entries() - return self._revision - - def get_externals(self): - prop_files = [['.svn', 'dir-prop-base'], - ['.svn', 'dir-props']] - externals = [] - - for dirname in self.iter_dirs(): - prop_file = None - for rel_parts in prop_files: - filename = joinpath(dirname, *rel_parts) - if os.path.isfile(filename): - prop_file = filename - - if prop_file is not None: - ext_prop = parse_prop_file(prop_file, 'svn:externals') - #ext_prop should be utf-8 coming from svn:externals - ext_prop = decode_as_string(ext_prop, encoding="utf-8") - externals.extend(parse_external_prop(ext_prop)) - - return externals - - -def svn_finder(dirname=''): - #combined externals due to common interface - #combined externals and entries due to lack of dir_props in 1.7 - info = SvnInfo.load(dirname) - for path in info.iter_files(): - yield path - - for path in info.iter_externals(): - sub_info = SvnInfo.load(path) - for sub_path in sub_info.iter_files(): - yield sub_path - - -class SVNEntriesFile(object): - def __init__(self, data): - self.data = data - - @classmethod - def load(class_, base): - filename = os.path.join(base, '.svn', 'entries') - f = open(filename) - try: - result = SVNEntriesFile.read(f) - finally: - f.close() - return result - - @classmethod - def read(class_, fileobj): - data = fileobj.read() - is_xml = data.startswith(' revision_line_number - and section[revision_line_number]) - ] - return rev_numbers - - def get_undeleted_records(self): - undeleted = lambda s: s and s[0] and (len(s) < 6 or s[5] != 'delete') - result = [ - section[0] - for section in self.get_sections() - if undeleted(section) - ] - return result - - -class SVNEntriesFileXML(SVNEntriesFile): - def is_valid(self): - return True - - def get_url(self): - "Get repository URL" - urlre = re.compile('url="([^"]+)"') - return urlre.search(self.data).group(1) - - def parse_revision_numbers(self): - revre = re.compile(r'committed-rev="(\d+)"') - return [ - int(m.group(1)) - for m in revre.finditer(self.data) - ] - - def get_undeleted_records(self): - entries_pattern = \ - re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I) - results = [ - unescape(match.group(1)) - for match in entries_pattern.finditer(self.data) - ] - return results - - -if __name__ == '__main__': - for name in svn_finder(sys.argv[1]): - print(name) diff --git a/libs/setuptools-2.2/setuptools/tests/__init__.py b/libs/setuptools-2.2/setuptools/tests/__init__.py deleted file mode 100644 index b5328ce..0000000 --- a/libs/setuptools-2.2/setuptools/tests/__init__.py +++ /dev/null @@ -1,352 +0,0 @@ -"""Tests for the 'setuptools' package""" -import sys -import os -import unittest -from setuptools.tests import doctest -import distutils.core -import distutils.cmd -from distutils.errors import DistutilsOptionError, DistutilsPlatformError -from distutils.errors import DistutilsSetupError -from distutils.core import Extension -from distutils.version import LooseVersion -from setuptools.compat import func_code - -from setuptools.compat import func_code -import setuptools.dist -import setuptools.depends as dep -from setuptools import Feature -from setuptools.depends import Require - -def additional_tests(): - import doctest, unittest - suite = unittest.TestSuite(( - doctest.DocFileSuite( - os.path.join('tests', 'api_tests.txt'), - optionflags=doctest.ELLIPSIS, package='pkg_resources', - ), - )) - if sys.platform == 'win32': - suite.addTest(doctest.DocFileSuite('win_script_wrapper.txt')) - return suite - -def makeSetup(**args): - """Return distribution from 'setup(**args)', without executing commands""" - - distutils.core._setup_stop_after = "commandline" - - # Don't let system command line leak into tests! - args.setdefault('script_args',['install']) - - try: - return setuptools.setup(**args) - finally: - distutils.core._setup_stop_after = None - - -class DependsTests(unittest.TestCase): - - def testExtractConst(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platforms - return - - def f1(): - global x, y, z - x = "test" - y = z - - fc = func_code(f1) - # unrecognized name - self.assertEqual(dep.extract_constant(fc,'q', -1), None) - - # constant assigned - self.assertEqual(dep.extract_constant(fc,'x', -1), "test") - - # expression assigned - self.assertEqual(dep.extract_constant(fc,'y', -1), -1) - - # recognized name, not assigned - self.assertEqual(dep.extract_constant(fc,'z', -1), None) - - def testFindModule(self): - self.assertRaises(ImportError, dep.find_module, 'no-such.-thing') - self.assertRaises(ImportError, dep.find_module, 'setuptools.non-existent') - f,p,i = dep.find_module('setuptools.tests') - f.close() - - def testModuleExtract(self): - if not hasattr(dep, 'get_module_constant'): - # skip on non-bytecode platforms - return - - from email import __version__ - self.assertEqual( - dep.get_module_constant('email','__version__'), __version__ - ) - self.assertEqual( - dep.get_module_constant('sys','version'), sys.version - ) - self.assertEqual( - dep.get_module_constant('setuptools.tests','__doc__'),__doc__ - ) - - def testRequire(self): - if not hasattr(dep, 'extract_constant'): - # skip on non-bytecode platformsh - return - - req = Require('Email','1.0.3','email') - - self.assertEqual(req.name, 'Email') - self.assertEqual(req.module, 'email') - self.assertEqual(req.requested_version, '1.0.3') - self.assertEqual(req.attribute, '__version__') - self.assertEqual(req.full_name(), 'Email-1.0.3') - - from email import __version__ - self.assertEqual(req.get_version(), __version__) - self.assertTrue(req.version_ok('1.0.9')) - self.assertTrue(not req.version_ok('0.9.1')) - self.assertTrue(not req.version_ok('unknown')) - - self.assertTrue(req.is_present()) - self.assertTrue(req.is_current()) - - req = Require('Email 3000','03000','email',format=LooseVersion) - self.assertTrue(req.is_present()) - self.assertTrue(not req.is_current()) - self.assertTrue(not req.version_ok('unknown')) - - req = Require('Do-what-I-mean','1.0','d-w-i-m') - self.assertTrue(not req.is_present()) - self.assertTrue(not req.is_current()) - - req = Require('Tests', None, 'tests', homepage="http://example.com") - self.assertEqual(req.format, None) - self.assertEqual(req.attribute, None) - self.assertEqual(req.requested_version, None) - self.assertEqual(req.full_name(), 'Tests') - self.assertEqual(req.homepage, 'http://example.com') - - paths = [os.path.dirname(p) for p in __path__] - self.assertTrue(req.is_present(paths)) - self.assertTrue(req.is_current(paths)) - - -class DistroTests(unittest.TestCase): - - def setUp(self): - self.e1 = Extension('bar.ext',['bar.c']) - self.e2 = Extension('c.y', ['y.c']) - - self.dist = makeSetup( - packages=['a', 'a.b', 'a.b.c', 'b', 'c'], - py_modules=['b.d','x'], - ext_modules = (self.e1, self.e2), - package_dir = {}, - ) - - def testDistroType(self): - self.assertTrue(isinstance(self.dist,setuptools.dist.Distribution)) - - def testExcludePackage(self): - self.dist.exclude_package('a') - self.assertEqual(self.dist.packages, ['b','c']) - - self.dist.exclude_package('b') - self.assertEqual(self.dist.packages, ['c']) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1, self.e2]) - - self.dist.exclude_package('c') - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - # test removals from unspecified options - makeSetup().exclude_package('x') - - def testIncludeExclude(self): - # remove an extension - self.dist.exclude(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2]) - - # add it back in - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - # should not add duplicate - self.dist.include(ext_modules=[self.e1]) - self.assertEqual(self.dist.ext_modules, [self.e2, self.e1]) - - def testExcludePackages(self): - self.dist.exclude(packages=['c','b','a']) - self.assertEqual(self.dist.packages, []) - self.assertEqual(self.dist.py_modules, ['x']) - self.assertEqual(self.dist.ext_modules, [self.e1]) - - def testEmpty(self): - dist = makeSetup() - dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - dist = makeSetup() - dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2]) - - def testContents(self): - self.assertTrue(self.dist.has_contents_for('a')) - self.dist.exclude_package('a') - self.assertTrue(not self.dist.has_contents_for('a')) - - self.assertTrue(self.dist.has_contents_for('b')) - self.dist.exclude_package('b') - self.assertTrue(not self.dist.has_contents_for('b')) - - self.assertTrue(self.dist.has_contents_for('c')) - self.dist.exclude_package('c') - self.assertTrue(not self.dist.has_contents_for('c')) - - def testInvalidIncludeExclude(self): - self.assertRaises(DistutilsSetupError, - self.dist.include, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, nonexistent_option='x' - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, packages={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.include, ext_modules={'x':'y'} - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, ext_modules={'x':'y'} - ) - - self.assertRaises(DistutilsSetupError, - self.dist.include, package_dir=['q'] - ) - self.assertRaises(DistutilsSetupError, - self.dist.exclude, package_dir=['q'] - ) - - -class FeatureTests(unittest.TestCase): - - def setUp(self): - self.req = Require('Distutils','1.0.3','distutils') - self.dist = makeSetup( - features={ - 'foo': Feature("foo",standard=True,require_features=['baz',self.req]), - 'bar': Feature("bar", standard=True, packages=['pkg.bar'], - py_modules=['bar_et'], remove=['bar.ext'], - ), - 'baz': Feature( - "baz", optional=False, packages=['pkg.baz'], - scripts = ['scripts/baz_it'], - libraries=[('libfoo','foo/foofoo.c')] - ), - 'dwim': Feature("DWIM", available=False, remove='bazish'), - }, - script_args=['--without-bar', 'install'], - packages = ['pkg.bar', 'pkg.foo'], - py_modules = ['bar_et', 'bazish'], - ext_modules = [Extension('bar.ext',['bar.c'])] - ) - - def testDefaults(self): - self.assertTrue(not - Feature( - "test",standard=True,remove='x',available=False - ).include_by_default() - ) - self.assertTrue( - Feature("test",standard=True,remove='x').include_by_default() - ) - # Feature must have either kwargs, removes, or require_features - self.assertRaises(DistutilsSetupError, Feature, "test") - - def testAvailability(self): - self.assertRaises( - DistutilsPlatformError, - self.dist.features['dwim'].include_in, self.dist - ) - - def testFeatureOptions(self): - dist = self.dist - self.assertTrue( - ('with-dwim',None,'include DWIM') in dist.feature_options - ) - self.assertTrue( - ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options - ) - self.assertTrue( - ('with-bar',None,'include bar (default)') in dist.feature_options - ) - self.assertTrue( - ('without-bar',None,'exclude bar') in dist.feature_options - ) - self.assertEqual(dist.feature_negopt['without-foo'],'with-foo') - self.assertEqual(dist.feature_negopt['without-bar'],'with-bar') - self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim') - self.assertTrue(not 'without-baz' in dist.feature_negopt) - - def testUseFeatures(self): - dist = self.dist - self.assertEqual(dist.with_foo,1) - self.assertEqual(dist.with_bar,0) - self.assertEqual(dist.with_baz,1) - self.assertTrue(not 'bar_et' in dist.py_modules) - self.assertTrue(not 'pkg.bar' in dist.packages) - self.assertTrue('pkg.baz' in dist.packages) - self.assertTrue('scripts/baz_it' in dist.scripts) - self.assertTrue(('libfoo','foo/foofoo.c') in dist.libraries) - self.assertEqual(dist.ext_modules,[]) - self.assertEqual(dist.require_features, [self.req]) - - # If we ask for bar, it should fail because we explicitly disabled - # it on the command line - self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar') - - def testFeatureWithInvalidRemove(self): - self.assertRaises( - SystemExit, makeSetup, features = {'x':Feature('x', remove='y')} - ) - -class TestCommandTests(unittest.TestCase): - - def testTestIsCommand(self): - test_cmd = makeSetup().get_command_obj('test') - self.assertTrue(isinstance(test_cmd, distutils.cmd.Command)) - - def testLongOptSuiteWNoDefault(self): - ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite']) - ts1 = ts1.get_command_obj('test') - ts1.ensure_finalized() - self.assertEqual(ts1.test_suite, 'foo.tests.suite') - - def testDefaultSuite(self): - ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test') - ts2.ensure_finalized() - self.assertEqual(ts2.test_suite, 'bar.tests.suite') - - def testDefaultWModuleOnCmdLine(self): - ts3 = makeSetup( - test_suite='bar.tests', - script_args=['test','-m','foo.tests'] - ).get_command_obj('test') - ts3.ensure_finalized() - self.assertEqual(ts3.test_module, 'foo.tests') - self.assertEqual(ts3.test_suite, 'foo.tests.test_suite') - - def testConflictingOptions(self): - ts4 = makeSetup( - script_args=['test','-m','bar.tests', '-s','foo.tests.suite'] - ).get_command_obj('test') - self.assertRaises(DistutilsOptionError, ts4.ensure_finalized) - - def testNoSuite(self): - ts5 = makeSetup().get_command_obj('test') - ts5.ensure_finalized() - self.assertEqual(ts5.test_suite, None) diff --git a/libs/setuptools-2.2/setuptools/tests/doctest.py b/libs/setuptools-2.2/setuptools/tests/doctest.py deleted file mode 100644 index 47293c3..0000000 --- a/libs/setuptools-2.2/setuptools/tests/doctest.py +++ /dev/null @@ -1,2683 +0,0 @@ -# Module doctest. -# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org). -# Major enhancements and refactoring by: -# Jim Fulton -# Edward Loper - -# Provided as-is; use at your own risk; no warranty; no promises; enjoy! - -try: - basestring -except NameError: - basestring = str - -try: - enumerate -except NameError: - def enumerate(seq): - return zip(range(len(seq)),seq) - -r"""Module doctest -- a framework for running examples in docstrings. - -In simplest use, end each module M to be tested with: - -def _test(): - import doctest - doctest.testmod() - -if __name__ == "__main__": - _test() - -Then running the module as a script will cause the examples in the -docstrings to get executed and verified: - -python M.py - -This won't display anything unless an example fails, in which case the -failing example(s) and the cause(s) of the failure(s) are printed to stdout -(why not stderr? because stderr is a lame hack <0.2 wink>), and the final -line of output is "Test failed.". - -Run it with the -v switch instead: - -python M.py -v - -and a detailed report of all examples tried is printed to stdout, along -with assorted summaries at the end. - -You can force verbose mode by passing "verbose=True" to testmod, or prohibit -it by passing "verbose=False". In either of those cases, sys.argv is not -examined by testmod. - -There are a variety of other ways to run doctests, including integration -with the unittest framework, and support for running non-Python text -files containing doctests. There are also many ways to override parts -of doctest's default behaviors. See the Library Reference Manual for -details. -""" - -__docformat__ = 'reStructuredText en' - -__all__ = [ - # 0, Option Flags - 'register_optionflag', - 'DONT_ACCEPT_TRUE_FOR_1', - 'DONT_ACCEPT_BLANKLINE', - 'NORMALIZE_WHITESPACE', - 'ELLIPSIS', - 'IGNORE_EXCEPTION_DETAIL', - 'COMPARISON_FLAGS', - 'REPORT_UDIFF', - 'REPORT_CDIFF', - 'REPORT_NDIFF', - 'REPORT_ONLY_FIRST_FAILURE', - 'REPORTING_FLAGS', - # 1. Utility Functions - 'is_private', - # 2. Example & DocTest - 'Example', - 'DocTest', - # 3. Doctest Parser - 'DocTestParser', - # 4. Doctest Finder - 'DocTestFinder', - # 5. Doctest Runner - 'DocTestRunner', - 'OutputChecker', - 'DocTestFailure', - 'UnexpectedException', - 'DebugRunner', - # 6. Test Functions - 'testmod', - 'testfile', - 'run_docstring_examples', - # 7. Tester - 'Tester', - # 8. Unittest Support - 'DocTestSuite', - 'DocFileSuite', - 'set_unittest_reportflags', - # 9. Debugging Support - 'script_from_examples', - 'testsource', - 'debug_src', - 'debug', -] - -import __future__ - -import sys, traceback, inspect, linecache, os, re, types -import unittest, difflib, pdb, tempfile -import warnings -from setuptools.compat import StringIO, execfile, func_code, im_func - -# Don't whine about the deprecated is_private function in this -# module's tests. -warnings.filterwarnings("ignore", "is_private", DeprecationWarning, - __name__, 0) - -# There are 4 basic classes: -# - Example: a pair, plus an intra-docstring line number. -# - DocTest: a collection of examples, parsed from a docstring, plus -# info about where the docstring came from (name, filename, lineno). -# - DocTestFinder: extracts DocTests from a given object's docstring and -# its contained objects' docstrings. -# - DocTestRunner: runs DocTest cases, and accumulates statistics. -# -# So the basic picture is: -# -# list of: -# +------+ +---------+ +-------+ -# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results| -# +------+ +---------+ +-------+ -# | Example | -# | ... | -# | Example | -# +---------+ - -# Option constants. - -OPTIONFLAGS_BY_NAME = {} -def register_optionflag(name): - flag = 1 << len(OPTIONFLAGS_BY_NAME) - OPTIONFLAGS_BY_NAME[name] = flag - return flag - -DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1') -DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE') -NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE') -ELLIPSIS = register_optionflag('ELLIPSIS') -IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL') - -COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 | - DONT_ACCEPT_BLANKLINE | - NORMALIZE_WHITESPACE | - ELLIPSIS | - IGNORE_EXCEPTION_DETAIL) - -REPORT_UDIFF = register_optionflag('REPORT_UDIFF') -REPORT_CDIFF = register_optionflag('REPORT_CDIFF') -REPORT_NDIFF = register_optionflag('REPORT_NDIFF') -REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE') - -REPORTING_FLAGS = (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF | - REPORT_ONLY_FIRST_FAILURE) - -# Special string markers for use in `want` strings: -BLANKLINE_MARKER = '' -ELLIPSIS_MARKER = '...' - -###################################################################### -## Table of Contents -###################################################################### -# 1. Utility Functions -# 2. Example & DocTest -- store test cases -# 3. DocTest Parser -- extracts examples from strings -# 4. DocTest Finder -- extracts test cases from objects -# 5. DocTest Runner -- runs test cases -# 6. Test Functions -- convenient wrappers for testing -# 7. Tester Class -- for backwards compatibility -# 8. Unittest Support -# 9. Debugging Support -# 10. Example Usage - -###################################################################### -## 1. Utility Functions -###################################################################### - -def is_private(prefix, base): - """prefix, base -> true iff name prefix + "." + base is "private". - - Prefix may be an empty string, and base does not contain a period. - Prefix is ignored (although functions you write conforming to this - protocol may make use of it). - Return true iff base begins with an (at least one) underscore, but - does not both begin and end with (at least) two underscores. - - >>> is_private("a.b", "my_func") - False - >>> is_private("____", "_my_func") - True - >>> is_private("someclass", "__init__") - False - >>> is_private("sometypo", "__init_") - True - >>> is_private("x.y.z", "_") - True - >>> is_private("_x.y.z", "__") - False - >>> is_private("", "") # senseless but consistent - False - """ - warnings.warn("is_private is deprecated; it wasn't useful; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning, stacklevel=2) - return base[:1] == "_" and not base[:2] == "__" == base[-2:] - -def _extract_future_flags(globs): - """ - Return the compiler-flags associated with the future features that - have been imported into the given namespace (globs). - """ - flags = 0 - for fname in __future__.all_feature_names: - feature = globs.get(fname, None) - if feature is getattr(__future__, fname): - flags |= feature.compiler_flag - return flags - -def _normalize_module(module, depth=2): - """ - Return the module specified by `module`. In particular: - - If `module` is a module, then return module. - - If `module` is a string, then import and return the - module with that name. - - If `module` is None, then return the calling module. - The calling module is assumed to be the module of - the stack frame at the given depth in the call stack. - """ - if inspect.ismodule(module): - return module - elif isinstance(module, basestring): - return __import__(module, globals(), locals(), ["*"]) - elif module is None: - return sys.modules[sys._getframe(depth).f_globals['__name__']] - else: - raise TypeError("Expected a module, string, or None") - -def _indent(s, indent=4): - """ - Add the given number of space characters to the beginning every - non-blank line in `s`, and return the result. - """ - # This regexp matches the start of non-blank lines: - return re.sub('(?m)^(?!$)', indent*' ', s) - -def _exception_traceback(exc_info): - """ - Return a string containing a traceback message for the given - exc_info tuple (as returned by sys.exc_info()). - """ - # Get a traceback message. - excout = StringIO() - exc_type, exc_val, exc_tb = exc_info - traceback.print_exception(exc_type, exc_val, exc_tb, file=excout) - return excout.getvalue() - -# Override some StringIO methods. -class _SpoofOut(StringIO): - def getvalue(self): - result = StringIO.getvalue(self) - # If anything at all was written, make sure there's a trailing - # newline. There's no way for the expected output to indicate - # that a trailing newline is missing. - if result and not result.endswith("\n"): - result += "\n" - # Prevent softspace from screwing up the next test case, in - # case they used print with a trailing comma in an example. - if hasattr(self, "softspace"): - del self.softspace - return result - - def truncate(self, size=None): - StringIO.truncate(self, size) - if hasattr(self, "softspace"): - del self.softspace - -# Worst-case linear-time ellipsis matching. -def _ellipsis_match(want, got): - """ - Essentially the only subtle case: - >>> _ellipsis_match('aa...aa', 'aaa') - False - """ - if want.find(ELLIPSIS_MARKER)==-1: - return want == got - - # Find "the real" strings. - ws = want.split(ELLIPSIS_MARKER) - assert len(ws) >= 2 - - # Deal with exact matches possibly needed at one or both ends. - startpos, endpos = 0, len(got) - w = ws[0] - if w: # starts with exact match - if got.startswith(w): - startpos = len(w) - del ws[0] - else: - return False - w = ws[-1] - if w: # ends with exact match - if got.endswith(w): - endpos -= len(w) - del ws[-1] - else: - return False - - if startpos > endpos: - # Exact end matches required more characters than we have, as in - # _ellipsis_match('aa...aa', 'aaa') - return False - - # For the rest, we only need to find the leftmost non-overlapping - # match for each piece. If there's no overall match that way alone, - # there's no overall match period. - for w in ws: - # w may be '' at times, if there are consecutive ellipses, or - # due to an ellipsis at the start or end of `want`. That's OK. - # Search for an empty string succeeds, and doesn't change startpos. - startpos = got.find(w, startpos, endpos) - if startpos < 0: - return False - startpos += len(w) - - return True - -def _comment_line(line): - "Return a commented form of the given line" - line = line.rstrip() - if line: - return '# '+line - else: - return '#' - -class _OutputRedirectingPdb(pdb.Pdb): - """ - A specialized version of the python debugger that redirects stdout - to a given stream when interacting with the user. Stdout is *not* - redirected when traced code is executed. - """ - def __init__(self, out): - self.__out = out - pdb.Pdb.__init__(self) - - def trace_dispatch(self, *args): - # Redirect stdout to the given stream. - save_stdout = sys.stdout - sys.stdout = self.__out - # Call Pdb's trace dispatch method. - try: - return pdb.Pdb.trace_dispatch(self, *args) - finally: - sys.stdout = save_stdout - -# [XX] Normalize with respect to os.path.pardir? -def _module_relative_path(module, path): - if not inspect.ismodule(module): - raise TypeError('Expected a module: %r' % module) - if path.startswith('/'): - raise ValueError('Module-relative files may not have absolute paths') - - # Find the base directory for the path. - if hasattr(module, '__file__'): - # A normal module/package - basedir = os.path.split(module.__file__)[0] - elif module.__name__ == '__main__': - # An interactive session. - if len(sys.argv)>0 and sys.argv[0] != '': - basedir = os.path.split(sys.argv[0])[0] - else: - basedir = os.curdir - else: - # A module w/o __file__ (this includes builtins) - raise ValueError("Can't resolve paths relative to the module " + - module + " (it has no __file__)") - - # Combine the base directory and the path. - return os.path.join(basedir, *(path.split('/'))) - -###################################################################### -## 2. Example & DocTest -###################################################################### -## - An "example" is a pair, where "source" is a -## fragment of source code, and "want" is the expected output for -## "source." The Example class also includes information about -## where the example was extracted from. -## -## - A "doctest" is a collection of examples, typically extracted from -## a string (such as an object's docstring). The DocTest class also -## includes information about where the string was extracted from. - -class Example: - """ - A single doctest example, consisting of source code and expected - output. `Example` defines the following attributes: - - - source: A single Python statement, always ending with a newline. - The constructor adds a newline if needed. - - - want: The expected output from running the source code (either - from stdout, or a traceback in case of exception). `want` ends - with a newline unless it's empty, in which case it's an empty - string. The constructor adds a newline if needed. - - - exc_msg: The exception message generated by the example, if - the example is expected to generate an exception; or `None` if - it is not expected to generate an exception. This exception - message is compared against the return value of - `traceback.format_exception_only()`. `exc_msg` ends with a - newline unless it's `None`. The constructor adds a newline - if needed. - - - lineno: The line number within the DocTest string containing - this Example where the Example begins. This line number is - zero-based, with respect to the beginning of the DocTest. - - - indent: The example's indentation in the DocTest string. - I.e., the number of space characters that preceed the - example's first prompt. - - - options: A dictionary mapping from option flags to True or - False, which is used to override default options for this - example. Any option flags not contained in this dictionary - are left at their default value (as specified by the - DocTestRunner's optionflags). By default, no options are set. - """ - def __init__(self, source, want, exc_msg=None, lineno=0, indent=0, - options=None): - # Normalize inputs. - if not source.endswith('\n'): - source += '\n' - if want and not want.endswith('\n'): - want += '\n' - if exc_msg is not None and not exc_msg.endswith('\n'): - exc_msg += '\n' - # Store properties. - self.source = source - self.want = want - self.lineno = lineno - self.indent = indent - if options is None: options = {} - self.options = options - self.exc_msg = exc_msg - -class DocTest: - """ - A collection of doctest examples that should be run in a single - namespace. Each `DocTest` defines the following attributes: - - - examples: the list of examples. - - - globs: The namespace (aka globals) that the examples should - be run in. - - - name: A name identifying the DocTest (typically, the name of - the object whose docstring this DocTest was extracted from). - - - filename: The name of the file that this DocTest was extracted - from, or `None` if the filename is unknown. - - - lineno: The line number within filename where this DocTest - begins, or `None` if the line number is unavailable. This - line number is zero-based, with respect to the beginning of - the file. - - - docstring: The string that the examples were extracted from, - or `None` if the string is unavailable. - """ - def __init__(self, examples, globs, name, filename, lineno, docstring): - """ - Create a new DocTest containing the given examples. The - DocTest's globals are initialized with a copy of `globs`. - """ - assert not isinstance(examples, basestring), \ - "DocTest no longer accepts str; use DocTestParser instead" - self.examples = examples - self.docstring = docstring - self.globs = globs.copy() - self.name = name - self.filename = filename - self.lineno = lineno - - def __repr__(self): - if len(self.examples) == 0: - examples = 'no examples' - elif len(self.examples) == 1: - examples = '1 example' - else: - examples = '%d examples' % len(self.examples) - return ('' % - (self.name, self.filename, self.lineno, examples)) - - - # This lets us sort tests by name: - def __cmp__(self, other): - if not isinstance(other, DocTest): - return -1 - return cmp((self.name, self.filename, self.lineno, id(self)), - (other.name, other.filename, other.lineno, id(other))) - -###################################################################### -## 3. DocTestParser -###################################################################### - -class DocTestParser: - """ - A class used to parse strings containing doctest examples. - """ - # This regular expression is used to find doctest examples in a - # string. It defines three groups: `source` is the source code - # (including leading indentation and prompts); `indent` is the - # indentation of the first (PS1) line of the source code; and - # `want` is the expected output (including leading indentation). - _EXAMPLE_RE = re.compile(r''' - # Source consists of a PS1 line followed by zero or more PS2 lines. - (?P - (?:^(?P [ ]*) >>> .*) # PS1 line - (?:\n [ ]* \.\.\. .*)*) # PS2 lines - \n? - # Want consists of any non-blank lines that do not start with PS1. - (?P (?:(?![ ]*$) # Not a blank line - (?![ ]*>>>) # Not a line starting with PS1 - .*$\n? # But any other line - )*) - ''', re.MULTILINE | re.VERBOSE) - - # A regular expression for handling `want` strings that contain - # expected exceptions. It divides `want` into three pieces: - # - the traceback header line (`hdr`) - # - the traceback stack (`stack`) - # - the exception message (`msg`), as generated by - # traceback.format_exception_only() - # `msg` may have multiple lines. We assume/require that the - # exception message is the first non-indented line starting with a word - # character following the traceback header line. - _EXCEPTION_RE = re.compile(r""" - # Grab the traceback header. Different versions of Python have - # said different things on the first traceback line. - ^(?P Traceback\ \( - (?: most\ recent\ call\ last - | innermost\ last - ) \) : - ) - \s* $ # toss trailing whitespace on the header. - (?P .*?) # don't blink: absorb stuff until... - ^ (?P \w+ .*) # a line *starts* with alphanum. - """, re.VERBOSE | re.MULTILINE | re.DOTALL) - - # A callable returning a true value iff its argument is a blank line - # or contains a single comment. - _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match - - def parse(self, string, name=''): - """ - Divide the given string into examples and intervening text, - and return them as a list of alternating Examples and strings. - Line numbers for the Examples are 0-based. The optional - argument `name` is a name identifying this string, and is only - used for error messages. - """ - string = string.expandtabs() - # If all lines begin with the same indentation, then strip it. - min_indent = self._min_indent(string) - if min_indent > 0: - string = '\n'.join([l[min_indent:] for l in string.split('\n')]) - - output = [] - charno, lineno = 0, 0 - # Find all doctest examples in the string: - for m in self._EXAMPLE_RE.finditer(string): - # Add the pre-example text to `output`. - output.append(string[charno:m.start()]) - # Update lineno (lines before this example) - lineno += string.count('\n', charno, m.start()) - # Extract info from the regexp match. - (source, options, want, exc_msg) = \ - self._parse_example(m, name, lineno) - # Create an Example, and add it to the list. - if not self._IS_BLANK_OR_COMMENT(source): - output.append( Example(source, want, exc_msg, - lineno=lineno, - indent=min_indent+len(m.group('indent')), - options=options) ) - # Update lineno (lines inside this example) - lineno += string.count('\n', m.start(), m.end()) - # Update charno. - charno = m.end() - # Add any remaining post-example text to `output`. - output.append(string[charno:]) - return output - - def get_doctest(self, string, globs, name, filename, lineno): - """ - Extract all doctest examples from the given string, and - collect them into a `DocTest` object. - - `globs`, `name`, `filename`, and `lineno` are attributes for - the new `DocTest` object. See the documentation for `DocTest` - for more information. - """ - return DocTest(self.get_examples(string, name), globs, - name, filename, lineno, string) - - def get_examples(self, string, name=''): - """ - Extract all doctest examples from the given string, and return - them as a list of `Example` objects. Line numbers are - 0-based, because it's most common in doctests that nothing - interesting appears on the same line as opening triple-quote, - and so the first interesting line is called \"line 1\" then. - - The optional argument `name` is a name identifying this - string, and is only used for error messages. - """ - return [x for x in self.parse(string, name) - if isinstance(x, Example)] - - def _parse_example(self, m, name, lineno): - """ - Given a regular expression match from `_EXAMPLE_RE` (`m`), - return a pair `(source, want)`, where `source` is the matched - example's source code (with prompts and indentation stripped); - and `want` is the example's expected output (with indentation - stripped). - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - # Get the example's indentation level. - indent = len(m.group('indent')) - - # Divide source into lines; check that they're properly - # indented; and then strip their indentation & prompts. - source_lines = m.group('source').split('\n') - self._check_prompt_blank(source_lines, indent, name, lineno) - self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno) - source = '\n'.join([sl[indent+4:] for sl in source_lines]) - - # Divide want into lines; check that it's properly indented; and - # then strip the indentation. Spaces before the last newline should - # be preserved, so plain rstrip() isn't good enough. - want = m.group('want') - want_lines = want.split('\n') - if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]): - del want_lines[-1] # forget final newline & spaces after it - self._check_prefix(want_lines, ' '*indent, name, - lineno + len(source_lines)) - want = '\n'.join([wl[indent:] for wl in want_lines]) - - # If `want` contains a traceback message, then extract it. - m = self._EXCEPTION_RE.match(want) - if m: - exc_msg = m.group('msg') - else: - exc_msg = None - - # Extract options from the source. - options = self._find_options(source, name, lineno) - - return source, options, want, exc_msg - - # This regular expression looks for option directives in the - # source code of an example. Option directives are comments - # starting with "doctest:". Warning: this may give false - # positives for string-literals that contain the string - # "#doctest:". Eliminating these false positives would require - # actually parsing the string; but we limit them by ignoring any - # line containing "#doctest:" that is *followed* by a quote mark. - _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$', - re.MULTILINE) - - def _find_options(self, source, name, lineno): - """ - Return a dictionary containing option overrides extracted from - option directives in the given source string. - - `name` is the string's name, and `lineno` is the line number - where the example starts; both are used for error messages. - """ - options = {} - # (note: with the current regexp, this will match at most once:) - for m in self._OPTION_DIRECTIVE_RE.finditer(source): - option_strings = m.group(1).replace(',', ' ').split() - for option in option_strings: - if (option[0] not in '+-' or - option[1:] not in OPTIONFLAGS_BY_NAME): - raise ValueError('line %r of the doctest for %s ' - 'has an invalid option: %r' % - (lineno+1, name, option)) - flag = OPTIONFLAGS_BY_NAME[option[1:]] - options[flag] = (option[0] == '+') - if options and self._IS_BLANK_OR_COMMENT(source): - raise ValueError('line %r of the doctest for %s has an option ' - 'directive on a line with no example: %r' % - (lineno, name, source)) - return options - - # This regular expression finds the indentation of every non-blank - # line in a string. - _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE) - - def _min_indent(self, s): - "Return the minimum indentation of any non-blank line in `s`" - indents = [len(indent) for indent in self._INDENT_RE.findall(s)] - if len(indents) > 0: - return min(indents) - else: - return 0 - - def _check_prompt_blank(self, lines, indent, name, lineno): - """ - Given the lines of a source string (including prompts and - leading indentation), check to make sure that every prompt is - followed by a space character. If any line is not followed by - a space character, then raise ValueError. - """ - for i, line in enumerate(lines): - if len(line) >= indent+4 and line[indent+3] != ' ': - raise ValueError('line %r of the docstring for %s ' - 'lacks blank after %s: %r' % - (lineno+i+1, name, - line[indent:indent+3], line)) - - def _check_prefix(self, lines, prefix, name, lineno): - """ - Check that every line in the given list starts with the given - prefix; if any line does not, then raise a ValueError. - """ - for i, line in enumerate(lines): - if line and not line.startswith(prefix): - raise ValueError('line %r of the docstring for %s has ' - 'inconsistent leading whitespace: %r' % - (lineno+i+1, name, line)) - - -###################################################################### -## 4. DocTest Finder -###################################################################### - -class DocTestFinder: - """ - A class used to extract the DocTests that are relevant to a given - object, from its docstring and the docstrings of its contained - objects. Doctests can currently be extracted from the following - object types: modules, functions, classes, methods, staticmethods, - classmethods, and properties. - """ - - def __init__(self, verbose=False, parser=DocTestParser(), - recurse=True, _namefilter=None, exclude_empty=True): - """ - Create a new doctest finder. - - The optional argument `parser` specifies a class or - function that should be used to create new DocTest objects (or - objects that implement the same interface as DocTest). The - signature for this factory function should match the signature - of the DocTest constructor. - - If the optional argument `recurse` is false, then `find` will - only examine the given object, and not any contained objects. - - If the optional argument `exclude_empty` is false, then `find` - will include tests for objects with empty docstrings. - """ - self._parser = parser - self._verbose = verbose - self._recurse = recurse - self._exclude_empty = exclude_empty - # _namefilter is undocumented, and exists only for temporary backward- - # compatibility support of testmod's deprecated isprivate mess. - self._namefilter = _namefilter - - def find(self, obj, name=None, module=None, globs=None, - extraglobs=None): - """ - Return a list of the DocTests that are defined by the given - object's docstring, or by any of its contained objects' - docstrings. - - The optional parameter `module` is the module that contains - the given object. If the module is not specified or is None, then - the test finder will attempt to automatically determine the - correct module. The object's module is used: - - - As a default namespace, if `globs` is not specified. - - To prevent the DocTestFinder from extracting DocTests - from objects that are imported from other modules. - - To find the name of the file containing the object. - - To help find the line number of the object within its - file. - - Contained objects whose module does not match `module` are ignored. - - If `module` is False, no attempt to find the module will be made. - This is obscure, of use mostly in tests: if `module` is False, or - is None but cannot be found automatically, then all objects are - considered to belong to the (non-existent) module, so all contained - objects will (recursively) be searched for doctests. - - The globals for each DocTest is formed by combining `globs` - and `extraglobs` (bindings in `extraglobs` override bindings - in `globs`). A new copy of the globals dictionary is created - for each DocTest. If `globs` is not specified, then it - defaults to the module's `__dict__`, if specified, or {} - otherwise. If `extraglobs` is not specified, then it defaults - to {}. - - """ - # If name was not specified, then extract it from the object. - if name is None: - name = getattr(obj, '__name__', None) - if name is None: - raise ValueError("DocTestFinder.find: name must be given " - "when obj.__name__ doesn't exist: %r" % - (type(obj),)) - - # Find the module that contains the given object (if obj is - # a module, then module=obj.). Note: this may fail, in which - # case module will be None. - if module is False: - module = None - elif module is None: - module = inspect.getmodule(obj) - - # Read the module's source code. This is used by - # DocTestFinder._find_lineno to find the line number for a - # given object's docstring. - try: - file = inspect.getsourcefile(obj) or inspect.getfile(obj) - source_lines = linecache.getlines(file) - if not source_lines: - source_lines = None - except TypeError: - source_lines = None - - # Initialize globals, and merge in extraglobs. - if globs is None: - if module is None: - globs = {} - else: - globs = module.__dict__.copy() - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - # Recursively expore `obj`, extracting DocTests. - tests = [] - self._find(tests, obj, name, module, source_lines, globs, {}) - return tests - - def _filter(self, obj, prefix, base): - """ - Return true if the given object should not be examined. - """ - return (self._namefilter is not None and - self._namefilter(prefix, base)) - - def _from_module(self, module, object): - """ - Return true if the given object is defined in the given - module. - """ - if module is None: - return True - elif inspect.isfunction(object): - return module.__dict__ is func_globals(object) - elif inspect.isclass(object): - return module.__name__ == object.__module__ - elif inspect.getmodule(object) is not None: - return module is inspect.getmodule(object) - elif hasattr(object, '__module__'): - return module.__name__ == object.__module__ - elif isinstance(object, property): - return True # [XX] no way not be sure. - else: - raise ValueError("object must be a class or function") - - def _find(self, tests, obj, name, module, source_lines, globs, seen): - """ - Find tests for the given object and any contained objects, and - add them to `tests`. - """ - if self._verbose: - print('Finding tests in %s' % name) - - # If we've already processed this object, then ignore it. - if id(obj) in seen: - return - seen[id(obj)] = 1 - - # Find a test for this object, and add it to the list of tests. - test = self._get_test(obj, name, module, globs, source_lines) - if test is not None: - tests.append(test) - - # Look for tests in a module's contained objects. - if inspect.ismodule(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - valname = '%s.%s' % (name, valname) - # Recurse to functions & classes. - if ((inspect.isfunction(val) or inspect.isclass(val)) and - self._from_module(module, val)): - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a module's __test__ dictionary. - if inspect.ismodule(obj) and self._recurse: - for valname, val in getattr(obj, '__test__', {}).items(): - if not isinstance(valname, basestring): - raise ValueError("DocTestFinder.find: __test__ keys " - "must be strings: %r" % - (type(valname),)) - if not (inspect.isfunction(val) or inspect.isclass(val) or - inspect.ismethod(val) or inspect.ismodule(val) or - isinstance(val, basestring)): - raise ValueError("DocTestFinder.find: __test__ values " - "must be strings, functions, methods, " - "classes, or modules: %r" % - (type(val),)) - valname = '%s.__test__.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - # Look for tests in a class's contained objects. - if inspect.isclass(obj) and self._recurse: - for valname, val in obj.__dict__.items(): - # Check if this contained object should be ignored. - if self._filter(val, name, valname): - continue - # Special handling for staticmethod/classmethod. - if isinstance(val, staticmethod): - val = getattr(obj, valname) - if isinstance(val, classmethod): - val = im_func(getattr(obj, valname)) - - # Recurse to methods, properties, and nested classes. - if ((inspect.isfunction(val) or inspect.isclass(val) or - isinstance(val, property)) and - self._from_module(module, val)): - valname = '%s.%s' % (name, valname) - self._find(tests, val, valname, module, source_lines, - globs, seen) - - def _get_test(self, obj, name, module, globs, source_lines): - """ - Return a DocTest for the given object, if it defines a docstring; - otherwise, return None. - """ - # Extract the object's docstring. If it doesn't have one, - # then return None (no test for this object). - if isinstance(obj, basestring): - docstring = obj - else: - try: - if obj.__doc__ is None: - docstring = '' - else: - docstring = obj.__doc__ - if not isinstance(docstring, basestring): - docstring = str(docstring) - except (TypeError, AttributeError): - docstring = '' - - # Find the docstring's location in the file. - lineno = self._find_lineno(obj, source_lines) - - # Don't bother if the docstring is empty. - if self._exclude_empty and not docstring: - return None - - # Return a DocTest for this object. - if module is None: - filename = None - else: - filename = getattr(module, '__file__', module.__name__) - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - return self._parser.get_doctest(docstring, globs, name, - filename, lineno) - - def _find_lineno(self, obj, source_lines): - """ - Return a line number of the given object's docstring. Note: - this method assumes that the object has a docstring. - """ - lineno = None - - # Find the line number for modules. - if inspect.ismodule(obj): - lineno = 0 - - # Find the line number for classes. - # Note: this could be fooled if a class is defined multiple - # times in a single file. - if inspect.isclass(obj): - if source_lines is None: - return None - pat = re.compile(r'^\s*class\s*%s\b' % - getattr(obj, '__name__', '-')) - for i, line in enumerate(source_lines): - if pat.match(line): - lineno = i - break - - # Find the line number for functions & methods. - if inspect.ismethod(obj): obj = im_func(obj) - if inspect.isfunction(obj): obj = func_code(obj) - if inspect.istraceback(obj): obj = obj.tb_frame - if inspect.isframe(obj): obj = obj.f_code - if inspect.iscode(obj): - lineno = getattr(obj, 'co_firstlineno', None)-1 - - # Find the line number where the docstring starts. Assume - # that it's the first line that begins with a quote mark. - # Note: this could be fooled by a multiline function - # signature, where a continuation line begins with a quote - # mark. - if lineno is not None: - if source_lines is None: - return lineno+1 - pat = re.compile('(^|.*:)\s*\w*("|\')') - for lineno in range(lineno, len(source_lines)): - if pat.match(source_lines[lineno]): - return lineno - - # We couldn't find the line number. - return None - -###################################################################### -## 5. DocTest Runner -###################################################################### - -class DocTestRunner: - """ - A class used to run DocTest test cases, and accumulate statistics. - The `run` method is used to process a single DocTest case. It - returns a tuple `(f, t)`, where `t` is the number of test cases - tried, and `f` is the number of test cases that failed. - - >>> tests = DocTestFinder().find(_TestClass) - >>> runner = DocTestRunner(verbose=False) - >>> for test in tests: - ... print runner.run(test) - (0, 2) - (0, 1) - (0, 2) - (0, 2) - - The `summarize` method prints a summary of all the test cases that - have been run by the runner, and returns an aggregated `(f, t)` - tuple: - - >>> runner.summarize(verbose=1) - 4 items passed all tests: - 2 tests in _TestClass - 2 tests in _TestClass.__init__ - 2 tests in _TestClass.get - 1 tests in _TestClass.square - 7 tests in 4 items. - 7 passed and 0 failed. - Test passed. - (0, 7) - - The aggregated number of tried examples and failed examples is - also available via the `tries` and `failures` attributes: - - >>> runner.tries - 7 - >>> runner.failures - 0 - - The comparison between expected outputs and actual outputs is done - by an `OutputChecker`. This comparison may be customized with a - number of option flags; see the documentation for `testmod` for - more information. If the option flags are insufficient, then the - comparison may also be customized by passing a subclass of - `OutputChecker` to the constructor. - - The test runner's display output can be controlled in two ways. - First, an output function (`out) can be passed to - `TestRunner.run`; this function will be called with strings that - should be displayed. It defaults to `sys.stdout.write`. If - capturing the output is not sufficient, then the display output - can be also customized by subclassing DocTestRunner, and - overriding the methods `report_start`, `report_success`, - `report_unexpected_exception`, and `report_failure`. - """ - # This divider string is used to separate failure messages, and to - # separate sections of the summary. - DIVIDER = "*" * 70 - - def __init__(self, checker=None, verbose=None, optionflags=0): - """ - Create a new test runner. - - Optional keyword arg `checker` is the `OutputChecker` that - should be used to compare the expected outputs and actual - outputs of doctest examples. - - Optional keyword arg 'verbose' prints lots of stuff if true, - only failures if false; by default, it's true iff '-v' is in - sys.argv. - - Optional argument `optionflags` can be used to control how the - test runner compares expected output to actual output, and how - it displays failures. See the documentation for `testmod` for - more information. - """ - self._checker = checker or OutputChecker() - if verbose is None: - verbose = '-v' in sys.argv - self._verbose = verbose - self.optionflags = optionflags - self.original_optionflags = optionflags - - # Keep track of the examples we've run. - self.tries = 0 - self.failures = 0 - self._name2ft = {} - - # Create a fake output target for capturing doctest output. - self._fakeout = _SpoofOut() - - #///////////////////////////////////////////////////////////////// - # Reporting methods - #///////////////////////////////////////////////////////////////// - - def report_start(self, out, test, example): - """ - Report that the test runner is about to process the given - example. (Only displays a message if verbose=True) - """ - if self._verbose: - if example.want: - out('Trying:\n' + _indent(example.source) + - 'Expecting:\n' + _indent(example.want)) - else: - out('Trying:\n' + _indent(example.source) + - 'Expecting nothing\n') - - def report_success(self, out, test, example, got): - """ - Report that the given example ran successfully. (Only - displays a message if verbose=True) - """ - if self._verbose: - out("ok\n") - - def report_failure(self, out, test, example, got): - """ - Report that the given example failed. - """ - out(self._failure_header(test, example) + - self._checker.output_difference(example, got, self.optionflags)) - - def report_unexpected_exception(self, out, test, example, exc_info): - """ - Report that the given example raised an unexpected exception. - """ - out(self._failure_header(test, example) + - 'Exception raised:\n' + _indent(_exception_traceback(exc_info))) - - def _failure_header(self, test, example): - out = [self.DIVIDER] - if test.filename: - if test.lineno is not None and example.lineno is not None: - lineno = test.lineno + example.lineno + 1 - else: - lineno = '?' - out.append('File "%s", line %s, in %s' % - (test.filename, lineno, test.name)) - else: - out.append('Line %s, in %s' % (example.lineno+1, test.name)) - out.append('Failed example:') - source = example.source - out.append(_indent(source)) - return '\n'.join(out) - - #///////////////////////////////////////////////////////////////// - # DocTest Running - #///////////////////////////////////////////////////////////////// - - def __run(self, test, compileflags, out): - """ - Run the examples in `test`. Write the outcome of each example - with one of the `DocTestRunner.report_*` methods, using the - writer function `out`. `compileflags` is the set of compiler - flags that should be used to execute examples. Return a tuple - `(f, t)`, where `t` is the number of examples tried, and `f` - is the number of examples that failed. The examples are run - in the namespace `test.globs`. - """ - # Keep track of the number of failures and tries. - failures = tries = 0 - - # Save the option flags (since option directives can be used - # to modify them). - original_optionflags = self.optionflags - - SUCCESS, FAILURE, BOOM = range(3) # `outcome` state - - check = self._checker.check_output - - # Process each example. - for examplenum, example in enumerate(test.examples): - - # If REPORT_ONLY_FIRST_FAILURE is set, then supress - # reporting after the first failure. - quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and - failures > 0) - - # Merge in the example's options. - self.optionflags = original_optionflags - if example.options: - for (optionflag, val) in example.options.items(): - if val: - self.optionflags |= optionflag - else: - self.optionflags &= ~optionflag - - # Record that we started this example. - tries += 1 - if not quiet: - self.report_start(out, test, example) - - # Use a special filename for compile(), so we can retrieve - # the source code during interactive debugging (see - # __patched_linecache_getlines). - filename = '' % (test.name, examplenum) - - # Run the example in the given context (globs), and record - # any exception that gets raised. (But don't intercept - # keyboard interrupts.) - try: - # Don't blink! This is where the user's code gets run. - exec(compile(example.source, filename, "single", - compileflags, 1), test.globs) - self.debugger.set_continue() # ==== Example Finished ==== - exception = None - except KeyboardInterrupt: - raise - except: - exception = sys.exc_info() - self.debugger.set_continue() # ==== Example Finished ==== - - got = self._fakeout.getvalue() # the actual output - self._fakeout.truncate(0) - outcome = FAILURE # guilty until proved innocent or insane - - # If the example executed without raising any exceptions, - # verify its output. - if exception is None: - if check(example.want, got, self.optionflags): - outcome = SUCCESS - - # The example raised an exception: check if it was expected. - else: - exc_info = sys.exc_info() - exc_msg = traceback.format_exception_only(*exc_info[:2])[-1] - if not quiet: - got += _exception_traceback(exc_info) - - # If `example.exc_msg` is None, then we weren't expecting - # an exception. - if example.exc_msg is None: - outcome = BOOM - - # We expected an exception: see whether it matches. - elif check(example.exc_msg, exc_msg, self.optionflags): - outcome = SUCCESS - - # Another chance if they didn't care about the detail. - elif self.optionflags & IGNORE_EXCEPTION_DETAIL: - m1 = re.match(r'[^:]*:', example.exc_msg) - m2 = re.match(r'[^:]*:', exc_msg) - if m1 and m2 and check(m1.group(0), m2.group(0), - self.optionflags): - outcome = SUCCESS - - # Report the outcome. - if outcome is SUCCESS: - if not quiet: - self.report_success(out, test, example, got) - elif outcome is FAILURE: - if not quiet: - self.report_failure(out, test, example, got) - failures += 1 - elif outcome is BOOM: - if not quiet: - self.report_unexpected_exception(out, test, example, - exc_info) - failures += 1 - else: - assert False, ("unknown outcome", outcome) - - # Restore the option flags (in case they were modified) - self.optionflags = original_optionflags - - # Record and return the number of failures and tries. - self.__record_outcome(test, failures, tries) - return failures, tries - - def __record_outcome(self, test, f, t): - """ - Record the fact that the given DocTest (`test`) generated `f` - failures out of `t` tried examples. - """ - f2, t2 = self._name2ft.get(test.name, (0,0)) - self._name2ft[test.name] = (f+f2, t+t2) - self.failures += f - self.tries += t - - __LINECACHE_FILENAME_RE = re.compile(r'[\w\.]+)' - r'\[(?P\d+)\]>$') - def __patched_linecache_getlines(self, filename, module_globals=None): - m = self.__LINECACHE_FILENAME_RE.match(filename) - if m and m.group('name') == self.test.name: - example = self.test.examples[int(m.group('examplenum'))] - return example.source.splitlines(True) - elif func_code(self.save_linecache_getlines).co_argcount > 1: - return self.save_linecache_getlines(filename, module_globals) - else: - return self.save_linecache_getlines(filename) - - def run(self, test, compileflags=None, out=None, clear_globs=True): - """ - Run the examples in `test`, and display the results using the - writer function `out`. - - The examples are run in the namespace `test.globs`. If - `clear_globs` is true (the default), then this namespace will - be cleared after the test runs, to help with garbage - collection. If you would like to examine the namespace after - the test completes, then use `clear_globs=False`. - - `compileflags` gives the set of flags that should be used by - the Python compiler when running the examples. If not - specified, then it will default to the set of future-import - flags that apply to `globs`. - - The output of each example is checked using - `DocTestRunner.check_output`, and the results are formatted by - the `DocTestRunner.report_*` methods. - """ - self.test = test - - if compileflags is None: - compileflags = _extract_future_flags(test.globs) - - save_stdout = sys.stdout - if out is None: - out = save_stdout.write - sys.stdout = self._fakeout - - # Patch pdb.set_trace to restore sys.stdout during interactive - # debugging (so it's not still redirected to self._fakeout). - # Note that the interactive output will go to *our* - # save_stdout, even if that's not the real sys.stdout; this - # allows us to write test cases for the set_trace behavior. - save_set_trace = pdb.set_trace - self.debugger = _OutputRedirectingPdb(save_stdout) - self.debugger.reset() - pdb.set_trace = self.debugger.set_trace - - # Patch linecache.getlines, so we can see the example's source - # when we're inside the debugger. - self.save_linecache_getlines = linecache.getlines - linecache.getlines = self.__patched_linecache_getlines - - try: - return self.__run(test, compileflags, out) - finally: - sys.stdout = save_stdout - pdb.set_trace = save_set_trace - linecache.getlines = self.save_linecache_getlines - if clear_globs: - test.globs.clear() - - #///////////////////////////////////////////////////////////////// - # Summarization - #///////////////////////////////////////////////////////////////// - def summarize(self, verbose=None): - """ - Print a summary of all the test cases that have been run by - this DocTestRunner, and return a tuple `(f, t)`, where `f` is - the total number of failed examples, and `t` is the total - number of tried examples. - - The optional `verbose` argument controls how detailed the - summary is. If the verbosity is not specified, then the - DocTestRunner's verbosity is used. - """ - if verbose is None: - verbose = self._verbose - notests = [] - passed = [] - failed = [] - totalt = totalf = 0 - for x in self._name2ft.items(): - name, (f, t) = x - assert f <= t - totalt += t - totalf += f - if t == 0: - notests.append(name) - elif f == 0: - passed.append( (name, t) ) - else: - failed.append(x) - if verbose: - if notests: - print(len(notests), "items had no tests:") - notests.sort() - for thing in notests: - print(" ", thing) - if passed: - print(len(passed), "items passed all tests:") - passed.sort() - for thing, count in passed: - print(" %3d tests in %s" % (count, thing)) - if failed: - print(self.DIVIDER) - print(len(failed), "items had failures:") - failed.sort() - for thing, (f, t) in failed: - print(" %3d of %3d in %s" % (f, t, thing)) - if verbose: - print(totalt, "tests in", len(self._name2ft), "items.") - print(totalt - totalf, "passed and", totalf, "failed.") - if totalf: - print("***Test Failed***", totalf, "failures.") - elif verbose: - print("Test passed.") - return totalf, totalt - - #///////////////////////////////////////////////////////////////// - # Backward compatibility cruft to maintain doctest.master. - #///////////////////////////////////////////////////////////////// - def merge(self, other): - d = self._name2ft - for name, (f, t) in other._name2ft.items(): - if name in d: - print("*** DocTestRunner.merge: '" + name + "' in both" \ - " testers; summing outcomes.") - f2, t2 = d[name] - f = f + f2 - t = t + t2 - d[name] = f, t - -class OutputChecker: - """ - A class used to check the whether the actual output from a doctest - example matches the expected output. `OutputChecker` defines two - methods: `check_output`, which compares a given pair of outputs, - and returns true if they match; and `output_difference`, which - returns a string describing the differences between two outputs. - """ - def check_output(self, want, got, optionflags): - """ - Return True iff the actual output from an example (`got`) - matches the expected output (`want`). These strings are - always considered to match if they are identical; but - depending on what option flags the test runner is using, - several non-exact match types are also possible. See the - documentation for `TestRunner` for more information about - option flags. - """ - # Handle the common case first, for efficiency: - # if they're string-identical, always return true. - if got == want: - return True - - # The values True and False replaced 1 and 0 as the return - # value for boolean comparisons in Python 2.3. - if not (optionflags & DONT_ACCEPT_TRUE_FOR_1): - if (got,want) == ("True\n", "1\n"): - return True - if (got,want) == ("False\n", "0\n"): - return True - - # can be used as a special sequence to signify a - # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - # Replace in want with a blank line. - want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER), - '', want) - # If a line in got contains only spaces, then remove the - # spaces. - got = re.sub('(?m)^\s*?$', '', got) - if got == want: - return True - - # This flag causes doctest to ignore any differences in the - # contents of whitespace strings. Note that this can be used - # in conjunction with the ELLIPSIS flag. - if optionflags & NORMALIZE_WHITESPACE: - got = ' '.join(got.split()) - want = ' '.join(want.split()) - if got == want: - return True - - # The ELLIPSIS flag says to let the sequence "..." in `want` - # match any substring in `got`. - if optionflags & ELLIPSIS: - if _ellipsis_match(want, got): - return True - - # We didn't find any match; return false. - return False - - # Should we do a fancy diff? - def _do_a_fancy_diff(self, want, got, optionflags): - # Not unless they asked for a fancy diff. - if not optionflags & (REPORT_UDIFF | - REPORT_CDIFF | - REPORT_NDIFF): - return False - - # If expected output uses ellipsis, a meaningful fancy diff is - # too hard ... or maybe not. In two real-life failures Tim saw, - # a diff was a major help anyway, so this is commented out. - # [todo] _ellipsis_match() knows which pieces do and don't match, - # and could be the basis for a kick-ass diff in this case. - ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want: - ## return False - - # ndiff does intraline difference marking, so can be useful even - # for 1-line differences. - if optionflags & REPORT_NDIFF: - return True - - # The other diff types need at least a few lines to be helpful. - return want.count('\n') > 2 and got.count('\n') > 2 - - def output_difference(self, example, got, optionflags): - """ - Return a string describing the differences between the - expected output for a given example (`example`) and the actual - output (`got`). `optionflags` is the set of option flags used - to compare `want` and `got`. - """ - want = example.want - # If s are being used, then replace blank lines - # with in the actual output string. - if not (optionflags & DONT_ACCEPT_BLANKLINE): - got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got) - - # Check if we should use diff. - if self._do_a_fancy_diff(want, got, optionflags): - # Split want & got into lines. - want_lines = want.splitlines(True) # True == keep line ends - got_lines = got.splitlines(True) - # Use difflib to find their differences. - if optionflags & REPORT_UDIFF: - diff = difflib.unified_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'unified diff with -expected +actual' - elif optionflags & REPORT_CDIFF: - diff = difflib.context_diff(want_lines, got_lines, n=2) - diff = list(diff)[2:] # strip the diff header - kind = 'context diff with expected followed by actual' - elif optionflags & REPORT_NDIFF: - engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK) - diff = list(engine.compare(want_lines, got_lines)) - kind = 'ndiff with -expected +actual' - else: - assert 0, 'Bad diff option' - # Remove trailing whitespace on diff output. - diff = [line.rstrip() + '\n' for line in diff] - return 'Differences (%s):\n' % kind + _indent(''.join(diff)) - - # If we're not using diff, then simply list the expected - # output followed by the actual output. - if want and got: - return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got)) - elif want: - return 'Expected:\n%sGot nothing\n' % _indent(want) - elif got: - return 'Expected nothing\nGot:\n%s' % _indent(got) - else: - return 'Expected nothing\nGot nothing\n' - -class DocTestFailure(Exception): - """A DocTest example has failed in debugging mode. - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - got: the actual output - """ - def __init__(self, test, example, got): - self.test = test - self.example = example - self.got = got - - def __str__(self): - return str(self.test) - -class UnexpectedException(Exception): - """A DocTest example has encountered an unexpected exception - - The exception instance has variables: - - - test: the DocTest object being run - - - excample: the Example object that failed - - - exc_info: the exception info - """ - def __init__(self, test, example, exc_info): - self.test = test - self.example = example - self.exc_info = exc_info - - def __str__(self): - return str(self.test) - -class DebugRunner(DocTestRunner): - r"""Run doc tests but raise an exception as soon as there is a failure. - - If an unexpected exception occurs, an UnexpectedException is raised. - It contains the test, the example, and the original exception: - - >>> runner = DebugRunner(verbose=False) - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> try: - ... runner.run(test) - ... except UnexpectedException, failure: - ... pass - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - We wrap the original exception to give the calling application - access to the test and example information. - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> try: - ... runner.run(test) - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - If a failure or error occurs, the globals are left intact: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 1} - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... >>> raise KeyError - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - Traceback (most recent call last): - ... - UnexpectedException: - - >>> del test.globs['__builtins__'] - >>> test.globs - {'x': 2} - - But the globals are cleared if there is no error: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 2 - ... ''', {}, 'foo', 'foo.py', 0) - - >>> runner.run(test) - (0, 1) - - >>> test.globs - {} - - """ - - def run(self, test, compileflags=None, out=None, clear_globs=True): - r = DocTestRunner.run(self, test, compileflags, out, False) - if clear_globs: - test.globs.clear() - return r - - def report_unexpected_exception(self, out, test, example, exc_info): - raise UnexpectedException(test, example, exc_info) - - def report_failure(self, out, test, example, got): - raise DocTestFailure(test, example, got) - -###################################################################### -## 6. Test Functions -###################################################################### -# These should be backwards compatible. - -# For backward compatibility, a global instance of a DocTestRunner -# class, updated by testmod. -master = None - -def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, - raise_on_error=False, exclude_empty=False): - """m=None, name=None, globs=None, verbose=None, isprivate=None, - report=True, optionflags=0, extraglobs=None, raise_on_error=False, - exclude_empty=False - - Test examples in docstrings in functions and classes reachable - from module m (or the current module if m is not supplied), starting - with m.__doc__. Unless isprivate is specified, private names - are not skipped. - - Also test examples reachable from dict m.__test__ if it exists and is - not None. m.__test__ maps names to functions, classes and strings; - function and class docstrings are tested even if the name is private; - strings are tested directly, as if they were docstrings. - - Return (#failures, #tests). - - See doctest.__doc__ for an overview. - - Optional keyword arg "name" gives the name of the module; by default - use m.__name__. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use m.__dict__. A copy of this - dict is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. This is new in 2.4. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. This is new in 2.3. Possible values (see the - docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Deprecated in Python 2.4: - Optional keyword arg "isprivate" specifies a function used to - determine whether a name is private. The default function is - treat all functions as public. Optionally, "isprivate" can be - set to doctest.is_private to skip over functions marked as private - using the underscore naming convention; see its docs for details. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if isprivate is not None: - warnings.warn("the isprivate argument is deprecated; " - "examine DocTestFinder.find() lists instead", - DeprecationWarning) - - # If no module was given, then use __main__. - if m is None: - # DWA - m will still be None if this wasn't invoked from the command - # line, in which case the following TypeError is about as good an error - # as we should expect - m = sys.modules.get('__main__') - - # Check that we were actually given a module. - if not inspect.ismodule(m): - raise TypeError("testmod: module required; %r" % (m,)) - - # If no name was given, then use the module's name. - if name is None: - name = m.__name__ - - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - for test in finder.find(m, name, globs=globs, extraglobs=extraglobs): - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def testfile(filename, module_relative=True, name=None, package=None, - globs=None, verbose=None, report=True, optionflags=0, - extraglobs=None, raise_on_error=False, parser=DocTestParser()): - """ - Test examples in the given file. Return (#failures, #tests). - - Optional keyword arg "module_relative" specifies how filenames - should be interpreted: - - - If "module_relative" is True (the default), then "filename" - specifies a module-relative path. By default, this path is - relative to the calling module's directory; but if the - "package" argument is specified, then it is relative to that - package. To ensure os-independence, "filename" should use - "/" characters to separate path segments, and should not - be an absolute path (i.e., it may not begin with "/"). - - - If "module_relative" is False, then "filename" specifies an - os-specific path. The path may be absolute or relative (to - the current working directory). - - Optional keyword arg "name" gives the name of the test; by default - use the file's basename. - - Optional keyword argument "package" is a Python package or the - name of a Python package whose directory should be used as the - base directory for a module relative filename. If no package is - specified, then the calling module's directory is used as the base - directory for module relative filenames. It is an error to - specify "package" if "module_relative" is False. - - Optional keyword arg "globs" gives a dict to be used as the globals - when executing examples; by default, use {}. A copy of this dict - is actually used for each docstring, so that each docstring's - examples start with a clean slate. - - Optional keyword arg "extraglobs" gives a dictionary that should be - merged into the globals that are used to execute examples. By - default, no extra globals are used. - - Optional keyword arg "verbose" prints lots of stuff if true, prints - only failures if false; by default, it's true iff "-v" is in sys.argv. - - Optional keyword arg "report" prints a summary at the end when true, - else prints nothing at the end. In verbose mode, the summary is - detailed, else very brief (in fact, empty if all tests passed). - - Optional keyword arg "optionflags" or's together module constants, - and defaults to 0. Possible values (see the docs for details): - - DONT_ACCEPT_TRUE_FOR_1 - DONT_ACCEPT_BLANKLINE - NORMALIZE_WHITESPACE - ELLIPSIS - IGNORE_EXCEPTION_DETAIL - REPORT_UDIFF - REPORT_CDIFF - REPORT_NDIFF - REPORT_ONLY_FIRST_FAILURE - - Optional keyword arg "raise_on_error" raises an exception on the - first unexpected exception or failure. This allows failures to be - post-mortem debugged. - - Optional keyword arg "parser" specifies a DocTestParser (or - subclass) that should be used to extract tests from the files. - - Advanced tomfoolery: testmod runs methods of a local instance of - class doctest.Tester, then merges the results into (or creates) - global Tester instance doctest.master. Methods of doctest.master - can be called directly too, if you want to do something unusual. - Passing report=0 to testmod is especially useful then, to delay - displaying a summary. Invoke doctest.master.summarize(verbose) - when you're done fiddling. - """ - global master - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path - if module_relative: - package = _normalize_module(package) - filename = _module_relative_path(package, filename) - - # If no name was given, then use the file's name. - if name is None: - name = os.path.basename(filename) - - # Assemble the globals. - if globs is None: - globs = {} - else: - globs = globs.copy() - if extraglobs is not None: - globs.update(extraglobs) - - if raise_on_error: - runner = DebugRunner(verbose=verbose, optionflags=optionflags) - else: - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - - # Read the file, convert it to a test, and run it. - f = open(filename) - s = f.read() - f.close() - test = parser.get_doctest(s, globs, name, filename, 0) - runner.run(test) - - if report: - runner.summarize() - - if master is None: - master = runner - else: - master.merge(runner) - - return runner.failures, runner.tries - -def run_docstring_examples(f, globs, verbose=False, name="NoName", - compileflags=None, optionflags=0): - """ - Test examples in the given object's docstring (`f`), using `globs` - as globals. Optional argument `name` is used in failure messages. - If the optional argument `verbose` is true, then generate output - even if there are no failures. - - `compileflags` gives the set of flags that should be used by the - Python compiler when running the examples. If not specified, then - it will default to the set of future-import flags that apply to - `globs`. - - Optional keyword arg `optionflags` specifies options for the - testing and output. See the documentation for `testmod` for more - information. - """ - # Find, parse, and run all tests in the given module. - finder = DocTestFinder(verbose=verbose, recurse=False) - runner = DocTestRunner(verbose=verbose, optionflags=optionflags) - for test in finder.find(f, name, globs=globs): - runner.run(test, compileflags=compileflags) - -###################################################################### -## 7. Tester -###################################################################### -# This is provided only for backwards compatibility. It's not -# actually used in any way. - -class Tester: - def __init__(self, mod=None, globs=None, verbose=None, - isprivate=None, optionflags=0): - - warnings.warn("class Tester is deprecated; " - "use class doctest.DocTestRunner instead", - DeprecationWarning, stacklevel=2) - if mod is None and globs is None: - raise TypeError("Tester.__init__: must specify mod or globs") - if mod is not None and not inspect.ismodule(mod): - raise TypeError("Tester.__init__: mod must be a module; %r" % - (mod,)) - if globs is None: - globs = mod.__dict__ - self.globs = globs - - self.verbose = verbose - self.isprivate = isprivate - self.optionflags = optionflags - self.testfinder = DocTestFinder(_namefilter=isprivate) - self.testrunner = DocTestRunner(verbose=verbose, - optionflags=optionflags) - - def runstring(self, s, name): - test = DocTestParser().get_doctest(s, self.globs, name, None, None) - if self.verbose: - print("Running string", name) - (f,t) = self.testrunner.run(test) - if self.verbose: - print(f, "of", t, "examples failed in string", name) - return (f,t) - - def rundoc(self, object, name=None, module=None): - f = t = 0 - tests = self.testfinder.find(object, name, module=module, - globs=self.globs) - for test in tests: - (f2, t2) = self.testrunner.run(test) - (f,t) = (f+f2, t+t2) - return (f,t) - - def rundict(self, d, name, module=None): - import types - m = types.ModuleType(name) - m.__dict__.update(d) - if module is None: - module = False - return self.rundoc(m, name, module) - - def run__test__(self, d, name): - import types - m = types.ModuleType(name) - m.__test__ = d - return self.rundoc(m, name) - - def summarize(self, verbose=None): - return self.testrunner.summarize(verbose) - - def merge(self, other): - self.testrunner.merge(other.testrunner) - -###################################################################### -## 8. Unittest Support -###################################################################### - -_unittest_reportflags = 0 - -def set_unittest_reportflags(flags): - """Sets the unittest option flags. - - The old flag is returned so that a runner could restore the old - value if it wished to: - - >>> old = _unittest_reportflags - >>> set_unittest_reportflags(REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) == old - True - - >>> import doctest - >>> doctest._unittest_reportflags == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - - Only reporting flags can be set: - - >>> set_unittest_reportflags(ELLIPSIS) - Traceback (most recent call last): - ... - ValueError: ('Only reporting flags allowed', 8) - - >>> set_unittest_reportflags(old) == (REPORT_NDIFF | - ... REPORT_ONLY_FIRST_FAILURE) - True - """ - global _unittest_reportflags - - if (flags & REPORTING_FLAGS) != flags: - raise ValueError("Only reporting flags allowed", flags) - old = _unittest_reportflags - _unittest_reportflags = flags - return old - - -class DocTestCase(unittest.TestCase): - - def __init__(self, test, optionflags=0, setUp=None, tearDown=None, - checker=None): - - unittest.TestCase.__init__(self) - self._dt_optionflags = optionflags - self._dt_checker = checker - self._dt_test = test - self._dt_setUp = setUp - self._dt_tearDown = tearDown - - def setUp(self): - test = self._dt_test - - if self._dt_setUp is not None: - self._dt_setUp(test) - - def tearDown(self): - test = self._dt_test - - if self._dt_tearDown is not None: - self._dt_tearDown(test) - - test.globs.clear() - - def runTest(self): - test = self._dt_test - old = sys.stdout - new = StringIO() - optionflags = self._dt_optionflags - - if not (optionflags & REPORTING_FLAGS): - # The option flags don't include any reporting flags, - # so add the default reporting flags - optionflags |= _unittest_reportflags - - runner = DocTestRunner(optionflags=optionflags, - checker=self._dt_checker, verbose=False) - - try: - runner.DIVIDER = "-"*70 - failures, tries = runner.run( - test, out=new.write, clear_globs=False) - finally: - sys.stdout = old - - if failures: - raise self.failureException(self.format_failure(new.getvalue())) - - def format_failure(self, err): - test = self._dt_test - if test.lineno is None: - lineno = 'unknown line number' - else: - lineno = '%s' % test.lineno - lname = '.'.join(test.name.split('.')[-1:]) - return ('Failed doctest test for %s\n' - ' File "%s", line %s, in %s\n\n%s' - % (test.name, test.filename, lineno, lname, err) - ) - - def debug(self): - r"""Run the test case without results and without catching exceptions - - The unit test framework includes a debug method on test cases - and test suites to support post-mortem debugging. The test code - is run in such a way that errors are not caught. This way a - caller can catch the errors and initiate post-mortem debugging. - - The DocTestCase provides a debug method that raises - UnexpectedException errors if there is an unexepcted - exception: - - >>> test = DocTestParser().get_doctest('>>> raise KeyError\n42', - ... {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - >>> try: - ... case.debug() - ... except UnexpectedException, failure: - ... pass - - The UnexpectedException contains the test, the example, and - the original exception: - - >>> failure.test is test - True - - >>> failure.example.want - '42\n' - - >>> exc_info = failure.exc_info - >>> raise exc_info[0], exc_info[1], exc_info[2] - Traceback (most recent call last): - ... - KeyError - - If the output doesn't match, then a DocTestFailure is raised: - - >>> test = DocTestParser().get_doctest(''' - ... >>> x = 1 - ... >>> x - ... 2 - ... ''', {}, 'foo', 'foo.py', 0) - >>> case = DocTestCase(test) - - >>> try: - ... case.debug() - ... except DocTestFailure, failure: - ... pass - - DocTestFailure objects provide access to the test: - - >>> failure.test is test - True - - As well as to the example: - - >>> failure.example.want - '2\n' - - and the actual output: - - >>> failure.got - '1\n' - - """ - - self.setUp() - runner = DebugRunner(optionflags=self._dt_optionflags, - checker=self._dt_checker, verbose=False) - runner.run(self._dt_test) - self.tearDown() - - def id(self): - return self._dt_test.name - - def __repr__(self): - name = self._dt_test.name.split('.') - return "%s (%s)" % (name[-1], '.'.join(name[:-1])) - - __str__ = __repr__ - - def shortDescription(self): - return "Doctest: " + self._dt_test.name - -def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None, - **options): - """ - Convert doctest tests for a module to a unittest test suite. - - This converts each documentation string in a module that - contains doctest tests to a unittest test case. If any of the - tests in a doc string fail, then the test case fails. An exception - is raised showing the name of the file containing the test and a - (sometimes approximate) line number. - - The `module` argument provides the module to be tested. The argument - can be either a module or a module name. - - If no argument is given, the calling module is used. - - A number of options may be provided as keyword arguments: - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - """ - - if test_finder is None: - test_finder = DocTestFinder() - - module = _normalize_module(module) - tests = test_finder.find(module, globs=globs, extraglobs=extraglobs) - if globs is None: - globs = module.__dict__ - if not tests: - # Why do we want to do this? Because it reveals a bug that might - # otherwise be hidden. - raise ValueError(module, "has no tests") - - tests.sort() - suite = unittest.TestSuite() - for test in tests: - if len(test.examples) == 0: - continue - if not test.filename: - filename = module.__file__ - if filename[-4:] in (".pyc", ".pyo"): - filename = filename[:-1] - test.filename = filename - suite.addTest(DocTestCase(test, **options)) - - return suite - -class DocFileCase(DocTestCase): - - def id(self): - return '_'.join(self._dt_test.name.split('.')) - - def __repr__(self): - return self._dt_test.filename - __str__ = __repr__ - - def format_failure(self, err): - return ('Failed doctest test for %s\n File "%s", line 0\n\n%s' - % (self._dt_test.name, self._dt_test.filename, err) - ) - -def DocFileTest(path, module_relative=True, package=None, - globs=None, parser=DocTestParser(), **options): - if globs is None: - globs = {} - - if package and not module_relative: - raise ValueError("Package may only be specified for module-" - "relative paths.") - - # Relativize the path. - if module_relative: - package = _normalize_module(package) - path = _module_relative_path(package, path) - - # Find the file and read it. - name = os.path.basename(path) - f = open(path) - doc = f.read() - f.close() - - # Convert it to a test, and wrap it in a DocFileCase. - test = parser.get_doctest(doc, globs, name, path, 0) - return DocFileCase(test, **options) - -def DocFileSuite(*paths, **kw): - """A unittest suite for one or more doctest files. - - The path to each doctest file is given as a string; the - interpretation of that string depends on the keyword argument - "module_relative". - - A number of options may be provided as keyword arguments: - - module_relative - If "module_relative" is True, then the given file paths are - interpreted as os-independent module-relative paths. By - default, these paths are relative to the calling module's - directory; but if the "package" argument is specified, then - they are relative to that package. To ensure os-independence, - "filename" should use "/" characters to separate path - segments, and may not be an absolute path (i.e., it may not - begin with "/"). - - If "module_relative" is False, then the given file paths are - interpreted as os-specific paths. These paths may be absolute - or relative (to the current working directory). - - package - A Python package or the name of a Python package whose directory - should be used as the base directory for module relative paths. - If "package" is not specified, then the calling module's - directory is used as the base directory for module relative - filenames. It is an error to specify "package" if - "module_relative" is False. - - setUp - A set-up function. This is called before running the - tests in each file. The setUp function will be passed a DocTest - object. The setUp function can access the test globals as the - globs attribute of the test passed. - - tearDown - A tear-down function. This is called after running the - tests in each file. The tearDown function will be passed a DocTest - object. The tearDown function can access the test globals as the - globs attribute of the test passed. - - globs - A dictionary containing initial global variables for the tests. - - optionflags - A set of doctest option flags expressed as an integer. - - parser - A DocTestParser (or subclass) that should be used to extract - tests from the files. - """ - suite = unittest.TestSuite() - - # We do this here so that _normalize_module is called at the right - # level. If it were called in DocFileTest, then this function - # would be the caller and we might guess the package incorrectly. - if kw.get('module_relative', True): - kw['package'] = _normalize_module(kw.get('package')) - - for path in paths: - suite.addTest(DocFileTest(path, **kw)) - - return suite - -###################################################################### -## 9. Debugging Support -###################################################################### - -def script_from_examples(s): - r"""Extract script from text with examples. - - Converts text with examples to a Python script. Example input is - converted to regular code. Example output and all other words - are converted to comments: - - >>> text = ''' - ... Here are examples of simple math. - ... - ... Python has super accurate integer addition - ... - ... >>> 2 + 2 - ... 5 - ... - ... And very friendly error messages: - ... - ... >>> 1/0 - ... To Infinity - ... And - ... Beyond - ... - ... You can use logic if you want: - ... - ... >>> if 0: - ... ... blah - ... ... blah - ... ... - ... - ... Ho hum - ... ''' - - >>> print script_from_examples(text) - # Here are examples of simple math. - # - # Python has super accurate integer addition - # - 2 + 2 - # Expected: - ## 5 - # - # And very friendly error messages: - # - 1/0 - # Expected: - ## To Infinity - ## And - ## Beyond - # - # You can use logic if you want: - # - if 0: - blah - blah - # - # Ho hum - """ - output = [] - for piece in DocTestParser().parse(s): - if isinstance(piece, Example): - # Add the example's source code (strip trailing NL) - output.append(piece.source[:-1]) - # Add the expected output: - want = piece.want - if want: - output.append('# Expected:') - output += ['## '+l for l in want.split('\n')[:-1]] - else: - # Add non-example text. - output += [_comment_line(l) - for l in piece.split('\n')[:-1]] - - # Trim junk on both ends. - while output and output[-1] == '#': - output.pop() - while output and output[0] == '#': - output.pop(0) - # Combine the output, and return it. - return '\n'.join(output) - -def testsource(module, name): - """Extract the test sources from a doctest docstring as a script. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the doc string with tests to be debugged. - """ - module = _normalize_module(module) - tests = DocTestFinder().find(module) - test = [t for t in tests if t.name == name] - if not test: - raise ValueError(name, "not found in tests") - test = test[0] - testsrc = script_from_examples(test.docstring) - return testsrc - -def debug_src(src, pm=False, globs=None): - """Debug a single doctest docstring, in argument `src`'""" - testsrc = script_from_examples(src) - debug_script(testsrc, pm, globs) - -def debug_script(src, pm=False, globs=None): - "Debug a test script. `src` is the script, as a string." - import pdb - - # Note that tempfile.NameTemporaryFile() cannot be used. As the - # docs say, a file so created cannot be opened by name a second time - # on modern Windows boxes, and execfile() needs to open it. - srcfilename = tempfile.mktemp(".py", "doctestdebug") - f = open(srcfilename, 'w') - f.write(src) - f.close() - - try: - if globs: - globs = globs.copy() - else: - globs = {} - - if pm: - try: - execfile(srcfilename, globs, globs) - except: - print(sys.exc_info()[1]) - pdb.post_mortem(sys.exc_info()[2]) - else: - # Note that %r is vital here. '%s' instead can, e.g., cause - # backslashes to get treated as metacharacters on Windows. - pdb.run("execfile(%r)" % srcfilename, globs, globs) - - finally: - os.remove(srcfilename) - -def debug(module, name, pm=False): - """Debug a single doctest docstring. - - Provide the module (or dotted name of the module) containing the - test to be debugged and the name (within the module) of the object - with the docstring with tests to be debugged. - """ - module = _normalize_module(module) - testsrc = testsource(module, name) - debug_script(testsrc, pm, module.__dict__) - -###################################################################### -## 10. Example Usage -###################################################################### -class _TestClass: - """ - A pointless class, for sanity-checking of docstring testing. - - Methods: - square() - get() - - >>> _TestClass(13).get() + _TestClass(-12).get() - 1 - >>> hex(_TestClass(13).square().get()) - '0xa9' - """ - - def __init__(self, val): - """val -> _TestClass object with associated value val. - - >>> t = _TestClass(123) - >>> print t.get() - 123 - """ - - self.val = val - - def square(self): - """square() -> square TestClass's associated value - - >>> _TestClass(13).square().get() - 169 - """ - - self.val = self.val ** 2 - return self - - def get(self): - """get() -> return TestClass's associated value. - - >>> x = _TestClass(-42) - >>> print x.get() - -42 - """ - - return self.val - -__test__ = {"_TestClass": _TestClass, - "string": r""" - Example of a string object, searched as-is. - >>> x = 1; y = 2 - >>> x + y, x * y - (3, 2) - """, - - "bool-int equivalence": r""" - In 2.2, boolean expressions displayed - 0 or 1. By default, we still accept - them. This can be disabled by passing - DONT_ACCEPT_TRUE_FOR_1 to the new - optionflags argument. - >>> 4 == 4 - 1 - >>> 4 == 4 - True - >>> 4 > 4 - 0 - >>> 4 > 4 - False - """, - - "blank lines": r""" - Blank lines can be marked with : - >>> print 'foo\n\nbar\n' - foo - - bar - - """, - - "ellipsis": r""" - If the ellipsis flag is used, then '...' can be used to - elide substrings in the desired output: - >>> print range(1000) #doctest: +ELLIPSIS - [0, 1, 2, ..., 999] - """, - - "whitespace normalization": r""" - If the whitespace normalization flag is used, then - differences in whitespace are ignored. - >>> print range(30) #doctest: +NORMALIZE_WHITESPACE - [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, - 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, - 27, 28, 29] - """, - } - -def _test(): - r = unittest.TextTestRunner() - r.run(DocTestSuite()) - -if __name__ == "__main__": - _test() - diff --git a/libs/setuptools-2.2/setuptools/tests/entries-v10 b/libs/setuptools-2.2/setuptools/tests/entries-v10 deleted file mode 100644 index 4446c50..0000000 --- a/libs/setuptools-2.2/setuptools/tests/entries-v10 +++ /dev/null @@ -1,615 +0,0 @@ -10 - -dir -89001 -http://svn.python.org/projects/sandbox/branches/setuptools-0.6 -http://svn.python.org/projects - - - -2013-06-03T17:26:03.052972Z -89000 -phillip.eby - - - - - - - - - - - - - - -6015fed2-1504-0410-9fe1-9d1591cc4771 - -api_tests.txt -file - - - - -2013-06-19T13:20:47.948712Z -dec366372ca14fbeaeb26f492bcf5725 -2013-05-15T22:04:59.389374Z -88997 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -12312 - -setuptools.egg-info -dir - -README.txt -file - - - - -2013-06-19T13:20:47.948712Z -26f0dd5d095522ba3ad999b6b6777b92 -2011-05-31T20:10:56.416725Z -88846 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -7615 - -easy_install.py -file - - - - -2013-06-19T13:20:47.948712Z -97b52fe7253bf4683f9f626f015eb72e -2006-09-20T20:48:18.716070Z -51935 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -126 - -setuptools -dir - -launcher.c -file - - - - -2013-06-19T13:20:47.924700Z -e5a8e77de9022688b80f77fc6d742fee -2009-10-19T21:03:29.785400Z -75544 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -7476 - -ez_setup.py -file - - - - -2013-06-19T13:20:47.924700Z -17e8ec5e08faccfcb08b5f8d5167ca14 -2011-01-20T18:50:00.815420Z -88124 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -8350 - -version -file - - - - -2013-06-19T13:20:47.924700Z -e456da09e0c9e224a56302f8316b6dbf -2007-01-09T19:21:05.921317Z -53317 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -1143 - -setup.py -file - - - - -2013-06-19T13:20:47.924700Z -d4e5b3c16bd61bfef6c0bb9377a3a3ea -2013-05-15T22:04:59.389374Z -88997 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -5228 - -release.sh -file - - - - -2013-06-19T13:20:47.932704Z -b1fd4054a1c107ff0f27baacd97be94c -2009-10-28T17:12:45.227140Z -75925 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -1044 - -pkg_resources.txt -file - - - - -2013-06-19T13:20:47.928702Z -f497e7c92a4de207cbd9ab1943f93388 -2009-10-12T20:00:02.336146Z -75385 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -94518 - -site.py -file - - - - -2013-06-19T13:20:47.932704Z -ebaac6fb6525f77ca950d22e6f8315df -2006-03-11T00:39:09.666740Z -42965 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -2362 - -version.dat -file - - - - -2013-06-19T13:20:47.932704Z -8e14ecea32b9874cd7d29277494554c0 -2009-10-28T17:12:45.227140Z -75925 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -80 - -virtual-python.py -file - - - - -2013-06-19T13:20:47.932704Z -aa857add3b5563238f0a904187f5ded9 -2005-10-17T02:26:39.000000Z -41262 -pje -has-props - - - - - - - - - - - - - - - - - - - - -3898 - -setup.cfg -file - - - - -2013-06-19T13:20:47.932704Z -eda883e744fce83f8107ad8dc8303536 -2006-09-21T22:26:48.050256Z -51965 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -296 - -setuptools.txt -file - - - - -2013-06-19T13:20:47.940708Z -11926256f06046b196eaf814772504e7 -2013-05-15T22:04:59.389374Z -88997 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -149832 - -pkg_resources.py -file - - - - -2013-06-19T13:20:47.940708Z -b63a30f5f0f0225a788c2c0e3430b3cf -2013-05-15T22:04:59.389374Z -88997 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -90397 - -tests -dir - -wikiup.cfg -file - - - - -2013-06-19T13:20:47.944710Z -34ad845a5e0a0b46458557fa910bf429 -2008-08-21T17:23:50.797633Z -65935 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -136 - -EasyInstall.txt -file - - - - -2013-06-19T13:20:47.944710Z -e97387c517f70fc18a377e42d19d64d4 -2013-05-15T22:04:59.389374Z -88997 -phillip.eby -has-props - - - - - - - - - - - - - - - - - - - - -82495 - diff --git a/libs/setuptools-2.2/setuptools/tests/environment.py b/libs/setuptools-2.2/setuptools/tests/environment.py deleted file mode 100644 index 476d280..0000000 --- a/libs/setuptools-2.2/setuptools/tests/environment.py +++ /dev/null @@ -1,165 +0,0 @@ -import os -import zipfile -import sys -import tempfile -import unittest -import shutil -import stat -import unicodedata - -from subprocess import Popen as _Popen, PIPE as _PIPE - - -def _extract(self, member, path=None, pwd=None): - """for zipfile py2.5 borrowed from cpython""" - if not isinstance(member, zipfile.ZipInfo): - member = self.getinfo(member) - - if path is None: - path = os.getcwd() - - return _extract_member(self, member, path, pwd) - - -def _extract_from_zip(self, name, dest_path): - dest_file = open(dest_path, 'wb') - try: - dest_file.write(self.read(name)) - finally: - dest_file.close() - - -def _extract_member(self, member, targetpath, pwd): - """for zipfile py2.5 borrowed from cpython""" - # build the destination pathname, replacing - # forward slashes to platform specific separators. - # Strip trailing path separator, unless it represents the root. - if (targetpath[-1:] in (os.path.sep, os.path.altsep) - and len(os.path.splitdrive(targetpath)[1]) > 1): - targetpath = targetpath[:-1] - - # don't include leading "/" from file name if present - if member.filename[0] == '/': - targetpath = os.path.join(targetpath, member.filename[1:]) - else: - targetpath = os.path.join(targetpath, member.filename) - - targetpath = os.path.normpath(targetpath) - - # Create all upper directories if necessary. - upperdirs = os.path.dirname(targetpath) - if upperdirs and not os.path.exists(upperdirs): - os.makedirs(upperdirs) - - if member.filename[-1] == '/': - if not os.path.isdir(targetpath): - os.mkdir(targetpath) - return targetpath - - _extract_from_zip(self, member.filename, targetpath) - - return targetpath - - -def _remove_dir(target): - - #on windows this seems to a problem - for dir_path, dirs, files in os.walk(target): - os.chmod(dir_path, stat.S_IWRITE) - for filename in files: - os.chmod(os.path.join(dir_path, filename), stat.S_IWRITE) - shutil.rmtree(target) - - -class ZippedEnvironment(unittest.TestCase): - - datafile = None - dataname = None - old_cwd = None - - def setUp(self): - if self.datafile is None or self.dataname is None: - return - - if not os.path.isfile(self.datafile): - self.old_cwd = None - return - - self.old_cwd = os.getcwd() - - self.temp_dir = tempfile.mkdtemp() - zip_file, source, target = [None, None, None] - try: - zip_file = zipfile.ZipFile(self.datafile) - for files in zip_file.namelist(): - _extract(zip_file, files, self.temp_dir) - finally: - if zip_file: - zip_file.close() - del zip_file - - os.chdir(os.path.join(self.temp_dir, self.dataname)) - - def tearDown(self): - #Assume setUp was never completed - if self.dataname is None or self.datafile is None: - return - - try: - if self.old_cwd: - os.chdir(self.old_cwd) - _remove_dir(self.temp_dir) - except OSError: - #sigh? - pass - - -def _which_dirs(cmd): - result = set() - for path in os.environ.get('PATH', '').split(os.pathsep): - filename = os.path.join(path, cmd) - if os.access(filename, os.X_OK): - result.add(path) - return result - - -def run_setup_py(cmd, pypath=None, path=None, - data_stream=0, env=None): - """ - Execution command for tests, separate from those used by the - code directly to prevent accidental behavior issues - """ - if env is None: - env = dict() - for envname in os.environ: - env[envname] = os.environ[envname] - - #override the python path if needed - if pypath is not None: - env["PYTHONPATH"] = pypath - - #overide the execution path if needed - if path is not None: - env["PATH"] = path - if not env.get("PATH", ""): - env["PATH"] = _which_dirs("tar").union(_which_dirs("gzip")) - env["PATH"] = os.pathsep.join(env["PATH"]) - - cmd = [sys.executable, "setup.py"] + list(cmd) - - #regarding the shell argument, see: http://bugs.python.org/issue8557 - try: - proc = _Popen(cmd, stdout=_PIPE, stderr=_PIPE, - shell=(sys.platform == 'win32'), env=env) - - data = proc.communicate()[data_stream] - except OSError: - return 1, '' - - #decode the console string if needed - if hasattr(data, "decode"): - data = data.decode() # should use the preffered encoding - data = unicodedata.normalize('NFC', data) - - #communciate calls wait() - return proc.returncode, data diff --git a/libs/setuptools-2.2/setuptools/tests/indexes/test_links_priority/external.html b/libs/setuptools-2.2/setuptools/tests/indexes/test_links_priority/external.html deleted file mode 100644 index 92e4702..0000000 --- a/libs/setuptools-2.2/setuptools/tests/indexes/test_links_priority/external.html +++ /dev/null @@ -1,3 +0,0 @@ - -bad old link - diff --git a/libs/setuptools-2.2/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html b/libs/setuptools-2.2/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html deleted file mode 100644 index fefb028..0000000 --- a/libs/setuptools-2.2/setuptools/tests/indexes/test_links_priority/simple/foobar/index.html +++ /dev/null @@ -1,4 +0,0 @@ - -foobar-0.1.tar.gz
    -external homepage
    - diff --git a/libs/setuptools-2.2/setuptools/tests/py26compat.py b/libs/setuptools-2.2/setuptools/tests/py26compat.py deleted file mode 100644 index d4fb891..0000000 --- a/libs/setuptools-2.2/setuptools/tests/py26compat.py +++ /dev/null @@ -1,14 +0,0 @@ -import unittest - -try: - # provide skipIf for Python 2.4-2.6 - skipIf = unittest.skipIf -except AttributeError: - def skipIf(condition, reason): - def skipper(func): - def skip(*args, **kwargs): - return - if condition: - return skip - return func - return skipper diff --git a/libs/setuptools-2.2/setuptools/tests/script-with-bom.py b/libs/setuptools-2.2/setuptools/tests/script-with-bom.py deleted file mode 100644 index 22dee0d..0000000 --- a/libs/setuptools-2.2/setuptools/tests/script-with-bom.py +++ /dev/null @@ -1,3 +0,0 @@ -# -*- coding: utf-8 -*- - -result = 'passed' diff --git a/libs/setuptools-2.2/setuptools/tests/server.py b/libs/setuptools-2.2/setuptools/tests/server.py deleted file mode 100644 index ae2381e..0000000 --- a/libs/setuptools-2.2/setuptools/tests/server.py +++ /dev/null @@ -1,82 +0,0 @@ -"""Basic http server for tests to simulate PyPI or custom indexes -""" -import sys -import time -import threading -from setuptools.compat import BaseHTTPRequestHandler -from setuptools.compat import (urllib2, URLError, HTTPServer, - SimpleHTTPRequestHandler) - -class IndexServer(HTTPServer): - """Basic single-threaded http server simulating a package index - - You can use this server in unittest like this:: - s = IndexServer() - s.start() - index_url = s.base_url() + 'mytestindex' - # do some test requests to the index - # The index files should be located in setuptools/tests/indexes - s.stop() - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=SimpleHTTPRequestHandler): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - self._run = True - - def serve(self): - while self._run: - self.handle_request() - - def start(self): - self.thread = threading.Thread(target=self.serve) - self.thread.start() - - def stop(self): - "Stop the server" - - # Let the server finish the last request and wait for a new one. - time.sleep(0.1) - - # self.shutdown is not supported on python < 2.6, so just - # set _run to false, and make a request, causing it to - # terminate. - self._run = False - url = 'http://127.0.0.1:%(server_port)s/' % vars(self) - try: - if sys.version_info >= (2, 6): - urllib2.urlopen(url, timeout=5) - else: - urllib2.urlopen(url) - except URLError: - # ignore any errors; all that's important is the request - pass - self.thread.join() - self.socket.close() - - def base_url(self): - port = self.server_port - return 'http://127.0.0.1:%s/setuptools/tests/indexes/' % port - -class RequestRecorder(BaseHTTPRequestHandler): - def do_GET(self): - requests = vars(self.server).setdefault('requests', []) - requests.append(self) - self.send_response(200, 'OK') - -class MockServer(HTTPServer, threading.Thread): - """ - A simple HTTP Server that records the requests made to it. - """ - def __init__(self, server_address=('', 0), - RequestHandlerClass=RequestRecorder): - HTTPServer.__init__(self, server_address, RequestHandlerClass) - threading.Thread.__init__(self) - self.setDaemon(True) - self.requests = [] - - def run(self): - self.serve_forever() - - def url(self): - return 'http://localhost:%(server_port)s/' % vars(self) - url = property(url) diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy.zip deleted file mode 100644 index 1347be5..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy13.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy13.zip deleted file mode 100644 index 4776434..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy13.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy14.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy14.zip deleted file mode 100644 index 02ed8cf..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy14.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy15.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy15.zip deleted file mode 100644 index ed8daee..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy15.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy16.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy16.zip deleted file mode 100644 index b6e98d6..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy16.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy17.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy17.zip deleted file mode 100644 index d96e151..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy17.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy18.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/dummy18.zip deleted file mode 100644 index a726783..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/dummy18.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_example.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_example.zip deleted file mode 100644 index d85fb84..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_example.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_ext_list.txt b/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_ext_list.txt deleted file mode 100644 index ce8e051..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_ext_list.txt +++ /dev/null @@ -1,3 +0,0 @@ -third_party3 file:///C:/development/svn_example/repos/svn13/extra1 -third_party2 -r3 file:///C:/development/svn_example/repos/svn13/extra1 -third_party -r1 file:///C:/development/svn_example/repos/svn13/extra1 diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_ext_list.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_ext_list.xml deleted file mode 100644 index e69de29..0000000 diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_info.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_info.xml deleted file mode 100644 index 2b7fa15..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn13_info.xml +++ /dev/null @@ -1,121 +0,0 @@ - - - -file:///C:/development/svn_example/repos/svn13/main - -file:///C:/development/svn_example/repos/svn13/main -d2996769-47b0-9946-b618-da1aa3eceda3 - - -normal -2013-07-13T15:33:23.187500Z - - -ptt -2013-07-13T15:33:28.359375Z - - - -file:///C:/development/svn_example/repos/svn13/main/a%20file - -file:///C:/development/svn_example/repos/svn13/main -d2996769-47b0-9946-b618-da1aa3eceda3 - - -normal -2013-07-13T15:33:21.109375Z -a6166e5e98a5a503089cde9bc8031293 - - -ptt -2013-07-13T15:33:21.312500Z - - - -file:///C:/development/svn_example/repos/svn13/main/to_delete - -file:///C:/development/svn_example/repos/svn13/main -d2996769-47b0-9946-b618-da1aa3eceda3 - - -delete -2013-07-13T15:33:28.140625Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:33:28.359375Z - - - -file:///C:/development/svn_example/repos/svn13/main/folder - -file:///C:/development/svn_example/repos/svn13/main -d2996769-47b0-9946-b618-da1aa3eceda3 - - -normal -2013-07-13T15:33:26.187500Z - - -ptt -2013-07-13T15:33:26.312500Z - - - -file:///C:/development/svn_example/repos/svn13/main/folder/quest.txt - -file:///C:/development/svn_example/repos/svn13/main -d2996769-47b0-9946-b618-da1aa3eceda3 - - -normal -2013-07-13T15:33:20.109375Z -795240c6a830c14f83961e57e07dad12 - - -ptt -2013-07-13T15:33:20.312500Z - - - -file:///C:/development/svn_example/repos/svn13/main/folder/lalala.txt - -file:///C:/development/svn_example/repos/svn13/main -d2996769-47b0-9946-b618-da1aa3eceda3 - - -normal -2013-07-13T15:33:19.375000Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:33:19.609375Z - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_example.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_example.zip deleted file mode 100644 index 57093c0..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_example.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_ext_list.txt b/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_ext_list.txt deleted file mode 100644 index b0a2112..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_ext_list.txt +++ /dev/null @@ -1,4 +0,0 @@ -third_party3 file:///C:/development/svn_example/repos/svn13/extra1 -third_party2 -r3 file:///C:/development/svn_example/repos/svn13/extra1 -third_party -r1 file:///C:/development/svn_example/repos/svn13/extra1 - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_ext_list.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_ext_list.xml deleted file mode 100644 index e69de29..0000000 diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_info.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_info.xml deleted file mode 100644 index be01328..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn14_info.xml +++ /dev/null @@ -1,119 +0,0 @@ - - - -file:///C:/development/svn_example/repos/svn14/main - -file:///C:/development/svn_example/repos/svn14/main -c75942e5-8b7a-354d-b1cf-73dee23fa94f - - -normal - - -ptt -2013-07-13T15:34:14.406250Z - - - -file:///C:/development/svn_example/repos/svn14/main/a%20file - -file:///C:/development/svn_example/repos/svn14/main -c75942e5-8b7a-354d-b1cf-73dee23fa94f - - -normal -2013-07-13T15:34:08.109375Z -a6166e5e98a5a503089cde9bc8031293 - - -ptt -2013-07-13T15:34:08.390625Z - - - -file:///C:/development/svn_example/repos/svn14/main/to_delete - -file:///C:/development/svn_example/repos/svn14/main -c75942e5-8b7a-354d-b1cf-73dee23fa94f - - -delete -2013-07-13T15:34:14.125000Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:34:14.406250Z - - - -file:///C:/development/svn_example/repos/svn14/main/folder - -file:///C:/development/svn_example/repos/svn14/main -c75942e5-8b7a-354d-b1cf-73dee23fa94f - - -normal - - -ptt -2013-07-13T15:34:12.390625Z - - - -file:///C:/development/svn_example/repos/svn14/main/folder/quest.txt - -file:///C:/development/svn_example/repos/svn14/main -c75942e5-8b7a-354d-b1cf-73dee23fa94f - - -normal -2013-07-13T15:34:07.109375Z -795240c6a830c14f83961e57e07dad12 - - -ptt -2013-07-13T15:34:07.390625Z - - - -file:///C:/development/svn_example/repos/svn14/main/folder/lalala.txt - -file:///C:/development/svn_example/repos/svn14/main -c75942e5-8b7a-354d-b1cf-73dee23fa94f - - -normal -2013-07-13T15:34:06.250000Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:34:06.531250Z - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_example.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_example.zip deleted file mode 100644 index 52a1d45..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_example.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_ext_list.txt b/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_ext_list.txt deleted file mode 100644 index 8c07c6a..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_ext_list.txt +++ /dev/null @@ -1,4 +0,0 @@ -third_party3 file:///C:/development/svn_example/repos/svn15/extra1 --r3 file:///C:/development/svn_example/repos/svn15/extra1 third_party2 -file:///C:/development/svn_example/repos/svn15/extra1@r1 third_party - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_ext_list.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_ext_list.xml deleted file mode 100644 index 2d2994a..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_ext_list.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - -third_party3 file:///C:/development/svn_example/repos/svn15/extra2 --r3 file:///C:/development/svn_example/repos/svn15/extra2 third_party2 -file:///C:/development/svn_example/repos/svn15/extra2@r1 third_party大介 - - - -third_party3 file:///C:/development/svn_example/repos/svn15/extra1 --r3 file:///C:/development/svn_example/repos/svn15/extra1 third_party2 -file:///C:/development/svn_example/repos/svn15/extra1@r1 third_party大介 - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_info.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_info.xml deleted file mode 100644 index f6e02e0..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn15_info.xml +++ /dev/null @@ -1,125 +0,0 @@ - - - -file:///C:/development/svn_example/repos/svn15/main - -file:///C:/development/svn_example/repos/svn15/main -4eab6983-54fe-384b-a282-9306f52d948f - - -normal -infinity - - -ptt -2013-07-13T15:34:49.562500Z - - - -file:///C:/development/svn_example/repos/svn15/main/a%20file - -file:///C:/development/svn_example/repos/svn15/main -4eab6983-54fe-384b-a282-9306f52d948f - - -normal -infinity -2013-07-13T15:34:43.109375Z -a6166e5e98a5a503089cde9bc8031293 - - -ptt -2013-07-13T15:34:43.484375Z - - - -file:///C:/development/svn_example/repos/svn15/main/to_delete - -file:///C:/development/svn_example/repos/svn15/main -4eab6983-54fe-384b-a282-9306f52d948f - - -delete -infinity -2013-07-13T15:34:49.125000Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:34:49.562500Z - - - -file:///C:/development/svn_example/repos/svn15/main/folder - -file:///C:/development/svn_example/repos/svn15/main -4eab6983-54fe-384b-a282-9306f52d948f - - -normal -infinity - - -ptt -2013-07-13T15:34:47.515625Z - - - -file:///C:/development/svn_example/repos/svn15/main/folder/quest.txt - -file:///C:/development/svn_example/repos/svn15/main -4eab6983-54fe-384b-a282-9306f52d948f - - -normal -infinity -2013-07-13T15:34:42.109375Z -795240c6a830c14f83961e57e07dad12 - - -ptt -2013-07-13T15:34:42.484375Z - - - -file:///C:/development/svn_example/repos/svn15/main/folder/lalala.txt - -file:///C:/development/svn_example/repos/svn15/main -4eab6983-54fe-384b-a282-9306f52d948f - - -normal -infinity -2013-07-13T15:34:41.375000Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:34:41.734375Z - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_example.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_example.zip deleted file mode 100644 index e886b2a..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_example.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_ext_list.txt b/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_ext_list.txt deleted file mode 100644 index b88e658..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_ext_list.txt +++ /dev/null @@ -1,4 +0,0 @@ -"third party3" file:///C:/development/svn_example/repos/svn16/extra1 -'third party3b' file:///C:/development/svn_example/repos/svn16/extra1 --r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2 -file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_ext_list.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_ext_list.xml deleted file mode 100644 index e5119fe..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_ext_list.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - -"third party3" file:///C:/development/svn_example/repos/svn16/extra2 --r3 file:///C:/development/svn_example/repos/svn16/extra2 third\ party2 -file:///C:/development/svn_example/repos/svn16/extra2@r1 third_party大介 - - - -"third party3" file:///C:/development/svn_example/repos/svn16/extra1 --r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2 -file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party大介 - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_info.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_info.xml deleted file mode 100644 index 3d08c89..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn16_info.xml +++ /dev/null @@ -1,125 +0,0 @@ - - - -file:///C:/development/svn_example/repos/svn16/main - -file:///C:/development/svn_example/repos/svn16/main -bd8d2cfc-1a74-de45-b166-262010c17c0a - - -normal -infinity - - -ptt -2013-07-13T15:35:17.390625Z - - - -file:///C:/development/svn_example/repos/svn16/main/a%20file - -file:///C:/development/svn_example/repos/svn16/main -bd8d2cfc-1a74-de45-b166-262010c17c0a - - -normal -infinity -2013-07-13T15:35:14.578125Z -a6166e5e98a5a503089cde9bc8031293 - - -ptt -2013-07-13T15:35:14.906250Z - - - -file:///C:/development/svn_example/repos/svn16/main/to_delete - -file:///C:/development/svn_example/repos/svn16/main -bd8d2cfc-1a74-de45-b166-262010c17c0a - - -delete -infinity -2013-07-13T15:35:17.046875Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:35:17.390625Z - - - -file:///C:/development/svn_example/repos/svn16/main/folder - -file:///C:/development/svn_example/repos/svn16/main -bd8d2cfc-1a74-de45-b166-262010c17c0a - - -normal -infinity - - -ptt -2013-07-13T15:35:16.406250Z - - - -file:///C:/development/svn_example/repos/svn16/main/folder/quest.txt - -file:///C:/development/svn_example/repos/svn16/main -bd8d2cfc-1a74-de45-b166-262010c17c0a - - -normal -infinity -2013-07-13T15:35:14.078125Z -795240c6a830c14f83961e57e07dad12 - - -ptt -2013-07-13T15:35:14.421875Z - - - -file:///C:/development/svn_example/repos/svn16/main/folder/lalala.txt - -file:///C:/development/svn_example/repos/svn16/main -bd8d2cfc-1a74-de45-b166-262010c17c0a - - -normal -infinity -2013-07-13T15:35:12.171875Z -d41d8cd98f00b204e9800998ecf8427e - - -ptt -2013-07-13T15:35:13.906250Z - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_example.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_example.zip deleted file mode 100644 index ba0e882..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_example.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_ext_list.txt b/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_ext_list.txt deleted file mode 100644 index db24f45..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_ext_list.txt +++ /dev/null @@ -1,4 +0,0 @@ -"third party3" file:///C:/development/svn_example/repos/svn17/extra1 -'third party3b' file:///C:/development/svn_example/repos/svn17/extra1 --r3 file:///C:/development/svn_example/repos/svn17/extra1 third\ party2 -file:///C:/development/svn_example/repos/svn17/extra1@r1 third_party diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_ext_list.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_ext_list.xml deleted file mode 100644 index 50e5d02..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_ext_list.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - -"third party3" file:///C:/development/svn_example/repos/svn16/extra1 --r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2 -file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party大介 - - - -"third party3" file:///C:/development/svn_example/repos/svn17/extra2 --r3 file:///C:/development/svn_example/repos/svn17/extra2 third\ party2 -file:///C:/development/svn_example/repos/svn17/extra2@r1 third_party大介 - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_info.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_info.xml deleted file mode 100644 index 55db1f7..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn17_info.xml +++ /dev/null @@ -1,130 +0,0 @@ - - - -file:///C:/development/svn_example/repos/svn17/main - -file:///C:/development/svn_example/repos/svn17/main -5ba45434-5197-164e-afab-81923f4744f5 - - -C:/development/svn_example/svn17_example -normal -infinity - - -ptt -2013-07-13T15:35:36.171875Z - - - -file:///C:/development/svn_example/repos/svn17/main/folder - -file:///C:/development/svn_example/repos/svn17/main -5ba45434-5197-164e-afab-81923f4744f5 - - -C:/development/svn_example/svn17_example -normal -infinity - - -ptt -2013-07-13T15:35:34.859375Z - - - -file:///C:/development/svn_example/repos/svn17/main/folder/quest.txt - -file:///C:/development/svn_example/repos/svn17/main -5ba45434-5197-164e-afab-81923f4744f5 - - -C:/development/svn_example/svn17_example -normal -infinity -2013-07-13T15:35:32.812500Z -bc80eba9e7a10c0a571a4678c520bc9683f3bac2 - - -ptt -2013-07-13T15:35:33.109375Z - - - -file:///C:/development/svn_example/repos/svn17/main/folder/lalala.txt - -file:///C:/development/svn_example/repos/svn17/main -5ba45434-5197-164e-afab-81923f4744f5 - - -C:/development/svn_example/svn17_example -normal -infinity -2013-07-13T15:35:32.343750Z -da39a3ee5e6b4b0d3255bfef95601890afd80709 - - -ptt -2013-07-13T15:35:32.687500Z - - - -file:///C:/development/svn_example/repos/svn17/main/a%20file - -file:///C:/development/svn_example/repos/svn17/main -5ba45434-5197-164e-afab-81923f4744f5 - - -C:/development/svn_example/svn17_example -normal -infinity -2013-07-13T15:35:33.187500Z -43785ab4b1816b49f242990883292813cd4f486c - - -ptt -2013-07-13T15:35:33.515625Z - - - -file:///C:/development/svn_example/repos/svn17/main/to_delete - -file:///C:/development/svn_example/repos/svn17/main -5ba45434-5197-164e-afab-81923f4744f5 - - -C:/development/svn_example/svn17_example -delete -infinity -da39a3ee5e6b4b0d3255bfef95601890afd80709 - - -ptt -2013-07-13T15:35:36.171875Z - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_example.zip b/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_example.zip deleted file mode 100644 index 4362f8e..0000000 Binary files a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_example.zip and /dev/null differ diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_ext_list.txt b/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_ext_list.txt deleted file mode 100644 index e5f1806..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_ext_list.txt +++ /dev/null @@ -1,4 +0,0 @@ -"third party3" file:///C:/development/svn_example/repos/svn18/extra1 -'third party3b' file:///C:/development/svn_example/repos/svn18/extra1 --r3 file:///C:/development/svn_example/repos/svn18/extra1 third\ party2 -file:///C:/development/svn_example/repos/svn18/extra1@r1 third_party diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_ext_list.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_ext_list.xml deleted file mode 100644 index 51aeb73..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_ext_list.xml +++ /dev/null @@ -1,19 +0,0 @@ - - - -"third party3" file:///C:/development/svn_example/repos/svn16/extra1 --r3 file:///C:/development/svn_example/repos/svn16/extra1 third\ party2 -file:///C:/development/svn_example/repos/svn16/extra1@r1 third_party大介 - - - -"third party3" file:///C:/development/svn_example/repos/svn18/extra2 --r3 file:///C:/development/svn_example/repos/svn18/extra2 third\ party2 -file:///C:/development/svn_example/repos/svn18/extra2@r1 third_party大介 - - - diff --git a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_info.xml b/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_info.xml deleted file mode 100644 index ed3fa68..0000000 --- a/libs/setuptools-2.2/setuptools/tests/svn_data/svn18_info.xml +++ /dev/null @@ -1,136 +0,0 @@ - - - -file:///C:/development/svn_example/repos/svn18/main -^/ - -file:///C:/development/svn_example/repos/svn18/main -3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9 - - -C:/development/svn_example/svn18_example -normal -infinity - - -ptt -2013-07-13T15:35:57.796875Z - - - -file:///C:/development/svn_example/repos/svn18/main/a%20file -^/a%20file - -file:///C:/development/svn_example/repos/svn18/main -3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9 - - -C:/development/svn_example/svn18_example -normal -infinity -2013-07-13T15:35:54.906250Z -43785ab4b1816b49f242990883292813cd4f486c - - -ptt -2013-07-13T15:35:55.265625Z - - - -file:///C:/development/svn_example/repos/svn18/main/to_delete -^/to_delete - -file:///C:/development/svn_example/repos/svn18/main -3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9 - - -C:/development/svn_example/svn18_example -delete -infinity -da39a3ee5e6b4b0d3255bfef95601890afd80709 - - -ptt -2013-07-13T15:35:57.796875Z - - - -file:///C:/development/svn_example/repos/svn18/main/folder -^/folder - -file:///C:/development/svn_example/repos/svn18/main -3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9 - - -C:/development/svn_example/svn18_example -normal -infinity - - -ptt -2013-07-13T15:35:56.750000Z - - - -file:///C:/development/svn_example/repos/svn18/main/folder/quest.txt -^/folder/quest.txt - -file:///C:/development/svn_example/repos/svn18/main -3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9 - - -C:/development/svn_example/svn18_example -normal -infinity -2013-07-13T15:35:54.484375Z -bc80eba9e7a10c0a571a4678c520bc9683f3bac2 - - -ptt -2013-07-13T15:35:54.843750Z - - - -file:///C:/development/svn_example/repos/svn18/main/folder/lalala.txt -^/folder/lalala.txt - -file:///C:/development/svn_example/repos/svn18/main -3c5e3929-c92b-7045-9ba9-5e65d3dd1ee9 - - -C:/development/svn_example/svn18_example -normal -infinity -2013-07-13T15:35:54.015625Z -da39a3ee5e6b4b0d3255bfef95601890afd80709 - - -ptt -2013-07-13T15:35:54.375000Z - - - diff --git a/libs/setuptools-2.2/setuptools/tests/test_bdist_egg.py b/libs/setuptools-2.2/setuptools/tests/test_bdist_egg.py deleted file mode 100644 index 1a12218..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_bdist_egg.py +++ /dev/null @@ -1,69 +0,0 @@ -"""develop tests -""" -import sys -import os, re, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.bdist_egg import bdist_egg -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', py_modules=['hi']) -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - f = open('setup.py', 'w') - f.write(SETUP_PY) - f.close() - f = open('hi.py', 'w') - f.write('1\n') - f.close() - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_bdist_egg(self): - dist = Distribution(dict( - script_name='setup.py', - script_args=['bdist_egg'], - name='foo', - py_modules=['hi'] - )) - os.makedirs(os.path.join('build', 'src')) - old_stdout = sys.stdout - sys.stdout = o = StringIO() - try: - dist.parse_command_line() - dist.run_commands() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - [content] = os.listdir('dist') - self.assertTrue(re.match('foo-0.0.0-py[23].\d.egg$', content)) - -def test_suite(): - return unittest.makeSuite(TestDevelopTest) - diff --git a/libs/setuptools-2.2/setuptools/tests/test_build_ext.py b/libs/setuptools-2.2/setuptools/tests/test_build_ext.py deleted file mode 100644 index a520ced..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_build_ext.py +++ /dev/null @@ -1,20 +0,0 @@ -"""build_ext tests -""" -import os, shutil, tempfile, unittest -from distutils.command.build_ext import build_ext as distutils_build_ext -from setuptools.command.build_ext import build_ext -from setuptools.dist import Distribution - -class TestBuildExtTest(unittest.TestCase): - - def test_get_ext_filename(self): - # setuptools needs to give back the same - # result than distutils, even if the fullname - # is not in ext_map - dist = Distribution() - cmd = build_ext(dist) - cmd.ext_map['foo/bar'] = '' - res = cmd.get_ext_filename('foo') - wanted = distutils_build_ext.get_ext_filename(cmd, 'foo') - assert res == wanted - diff --git a/libs/setuptools-2.2/setuptools/tests/test_develop.py b/libs/setuptools-2.2/setuptools/tests/test_develop.py deleted file mode 100644 index 7b90161..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_develop.py +++ /dev/null @@ -1,122 +0,0 @@ -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.command.develop import develop -from setuptools.command import easy_install as easy_install_pkg -from setuptools.compat import StringIO -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['foo'], - use_2to3=True, -) -""" - -INIT_PY = """print "foo" -""" - -class TestDevelopTest(unittest.TestCase): - - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'foo')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # foo/__init__.py - init = os.path.join(self.dir, 'foo', '__init__.py') - f = open(init, 'w') - f.write(INIT_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix') or (hasattr(sys, 'base_prefix') and sys.base_prefix != sys.prefix): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_develop(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - dist = Distribution( - dict(name='foo', - packages=['foo'], - use_2to3=True, - version='0.0', - )) - dist.script_name = 'setup.py' - cmd = develop(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - #sys.stdout = StringIO() - try: - cmd.run() - finally: - sys.stdout = old_stdout - - # let's see if we got our egg link at the right place - content = os.listdir(site.USER_SITE) - content.sort() - self.assertEqual(content, ['easy-install.pth', 'foo.egg-link']) - - # Check that we are using the right code. - egg_link_file = open(os.path.join(site.USER_SITE, 'foo.egg-link'), 'rt') - try: - path = egg_link_file.read().split()[0].strip() - finally: - egg_link_file.close() - init_file = open(os.path.join(path, 'foo', '__init__.py'), 'rt') - try: - init = init_file.read().strip() - finally: - init_file.close() - if sys.version < "3": - self.assertEqual(init, 'print "foo"') - else: - self.assertEqual(init, 'print("foo")') - - def notest_develop_with_setup_requires(self): - - wanted = ("Could not find suitable distribution for " - "Requirement.parse('I-DONT-EXIST')") - old_dir = os.getcwd() - os.chdir(self.dir) - try: - try: - dist = Distribution({'setup_requires': ['I_DONT_EXIST']}) - except DistutilsError: - e = sys.exc_info()[1] - error = str(e) - if error == wanted: - pass - finally: - os.chdir(old_dir) diff --git a/libs/setuptools-2.2/setuptools/tests/test_dist_info.py b/libs/setuptools-2.2/setuptools/tests/test_dist_info.py deleted file mode 100644 index a8adb68..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_dist_info.py +++ /dev/null @@ -1,83 +0,0 @@ -"""Test .dist-info style distributions. -""" -import os -import shutil -import tempfile -import unittest -import textwrap - -try: - import ast -except: - pass - -import pkg_resources - -from setuptools.tests.py26compat import skipIf - -def DALS(s): - "dedent and left-strip" - return textwrap.dedent(s).lstrip() - -class TestDistInfo(unittest.TestCase): - - def test_distinfo(self): - dists = {} - for d in pkg_resources.find_distributions(self.tmpdir): - dists[d.project_name] = d - - assert len(dists) == 2, dists - - unversioned = dists['UnversionedDistribution'] - versioned = dists['VersionedDistribution'] - - assert versioned.version == '2.718' # from filename - assert unversioned.version == '0.3' # from METADATA - - @skipIf('ast' not in globals(), - "ast is used to test conditional dependencies (Python >= 2.6)") - def test_conditional_dependencies(self): - requires = [pkg_resources.Requirement.parse('splort==4'), - pkg_resources.Requirement.parse('quux>=1.1')] - - for d in pkg_resources.find_distributions(self.tmpdir): - self.assertEqual(d.requires(), requires[:1]) - self.assertEqual(d.requires(extras=('baz',)), requires) - self.assertEqual(d.extras, ['baz']) - - def setUp(self): - self.tmpdir = tempfile.mkdtemp() - versioned = os.path.join(self.tmpdir, - 'VersionedDistribution-2.718.dist-info') - os.mkdir(versioned) - metadata_file = open(os.path.join(versioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: VersionedDistribution - Requires-Dist: splort (4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - unversioned = os.path.join(self.tmpdir, - 'UnversionedDistribution.dist-info') - os.mkdir(unversioned) - metadata_file = open(os.path.join(unversioned, 'METADATA'), 'w+') - try: - metadata_file.write(DALS( - """ - Metadata-Version: 1.2 - Name: UnversionedDistribution - Version: 0.3 - Requires-Dist: splort (==4) - Provides-Extra: baz - Requires-Dist: quux (>=1.1); extra == 'baz' - """)) - finally: - metadata_file.close() - - def tearDown(self): - shutil.rmtree(self.tmpdir) diff --git a/libs/setuptools-2.2/setuptools/tests/test_easy_install.py b/libs/setuptools-2.2/setuptools/tests/test_easy_install.py deleted file mode 100644 index d2cc7a0..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_easy_install.py +++ /dev/null @@ -1,456 +0,0 @@ -"""Easy install Tests -""" -import sys -import os -import shutil -import tempfile -import unittest -import site -import contextlib -import textwrap -import tarfile -import logging -import distutils.core - -from setuptools.compat import StringIO, BytesIO, next, urlparse -from setuptools.sandbox import run_setup, SandboxViolation -from setuptools.command.easy_install import ( - easy_install, fix_jython_executable, get_script_args, nt_quote_arg) -from setuptools.command.easy_install import PthDistributions -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution -from pkg_resources import working_set, VersionConflict -from pkg_resources import Distribution as PRDistribution -import setuptools.tests.server -import pkg_resources - -class FakeDist(object): - def get_entry_map(self, group): - if group != 'console_scripts': - return {} - return {'name': 'ep'} - - def as_requirement(self): - return 'spec' - -WANTED = """\ -#!%s -# EASY-INSTALL-ENTRY-SCRIPT: 'spec','console_scripts','name' -__requires__ = 'spec' -import sys -from pkg_resources import load_entry_point - -if __name__ == '__main__': - sys.exit( - load_entry_point('spec', 'console_scripts', 'name')() - ) -""" % nt_quote_arg(fix_jython_executable(sys.executable, "")) - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestEasyInstallTest(unittest.TestCase): - - def test_install_site_py(self): - dist = Distribution() - cmd = easy_install(dist) - cmd.sitepy_installed = False - cmd.install_dir = tempfile.mkdtemp() - try: - cmd.install_site_py() - sitepy = os.path.join(cmd.install_dir, 'site.py') - self.assertTrue(os.path.exists(sitepy)) - finally: - shutil.rmtree(cmd.install_dir) - - def test_get_script_args(self): - dist = FakeDist() - - old_platform = sys.platform - try: - name, script = [i for i in next(get_script_args(dist))][0:2] - finally: - sys.platform = old_platform - - self.assertEqual(script, WANTED) - - def test_no_find_links(self): - # new option '--no-find-links', that blocks find-links added at - # the project level - dist = Distribution() - cmd = easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.no_find_links = True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - self.assertEqual(cmd.package_index.scanned_urls, {}) - - # let's try without it (default behavior) - cmd = easy_install(dist) - cmd.check_pth_processing = lambda: True - cmd.find_links = ['link1', 'link2'] - cmd.install_dir = os.path.join(tempfile.mkdtemp(), 'ok') - cmd.args = ['ok'] - cmd.ensure_finalized() - keys = sorted(cmd.package_index.scanned_urls.keys()) - self.assertEqual(keys, ['link1', 'link2']) - - -class TestPTHFileWriter(unittest.TestCase): - def test_add_from_cwd_site_sets_dirty(self): - '''a pth file manager should set dirty - if a distribution is in site but also the cwd - ''' - pth = PthDistributions('does-not_exist', [os.getcwd()]) - self.assertTrue(not pth.dirty) - pth.add(PRDistribution(os.getcwd())) - self.assertTrue(pth.dirty) - - def test_add_from_site_is_ignored(self): - if os.name != 'nt': - location = '/test/location/does-not-have-to-exist' - else: - location = 'c:\\does_not_exist' - pth = PthDistributions('does-not_exist', [location, ]) - self.assertTrue(not pth.dirty) - pth.add(PRDistribution(location)) - self.assertTrue(not pth.dirty) - - -class TestUserInstallTest(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.old_enable_site = site.ENABLE_USER_SITE - self.old_file = easy_install_pkg.__file__ - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - easy_install_pkg.__file__ = site.USER_SITE - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - site.ENABLE_USER_SITE = self.old_enable_site - easy_install_pkg.__file__ = self.old_file - - def test_user_install_implied(self): - site.ENABLE_USER_SITE = True # disabled sometimes - #XXX: replace with something meaningfull - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.ensure_finalized() - self.assertTrue(cmd.user, 'user should be implied') - - def test_multiproc_atexit(self): - try: - __import__('multiprocessing') - except ImportError: - # skip the test if multiprocessing is not available - return - - log = logging.getLogger('test_easy_install') - logging.basicConfig(level=logging.INFO, stream=sys.stderr) - log.info('this should not break') - - def test_user_install_not_implied_without_usersite_enabled(self): - site.ENABLE_USER_SITE = False # usually enabled - #XXX: replace with something meaningfull - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.args = ['py'] - cmd.initialize_options() - self.assertFalse(cmd.user, 'NOT user should be implied') - - def test_local_index(self): - # make sure the local index is used - # when easy_install looks for installed - # packages - new_location = tempfile.mkdtemp() - target = tempfile.mkdtemp() - egg_file = os.path.join(new_location, 'foo-1.0.egg-info') - f = open(egg_file, 'w') - try: - f.write('Name: foo\n') - finally: - f.close() - - sys.path.append(target) - old_ppath = os.environ.get('PYTHONPATH') - os.environ['PYTHONPATH'] = os.path.pathsep.join(sys.path) - try: - dist = Distribution() - dist.script_name = 'setup.py' - cmd = easy_install(dist) - cmd.install_dir = target - cmd.args = ['foo'] - cmd.ensure_finalized() - cmd.local_index.scan([new_location]) - res = cmd.easy_install('foo') - self.assertEqual(os.path.realpath(res.location), - os.path.realpath(new_location)) - finally: - sys.path.remove(target) - for basedir in [new_location, target, ]: - if not os.path.exists(basedir) or not os.path.isdir(basedir): - continue - try: - shutil.rmtree(basedir) - except: - pass - if old_ppath is not None: - os.environ['PYTHONPATH'] = old_ppath - else: - del os.environ['PYTHONPATH'] - - def test_setup_requires(self): - """Regression test for Distribute issue #318 - - Ensure that a package with setup_requires can be installed when - setuptools is installed in the user site-packages without causing a - SandboxViolation. - """ - - test_pkg = create_setup_requires_package(self.dir) - test_setup_py = os.path.join(test_pkg, 'setup.py') - - try: - with quiet_context(): - with reset_setup_stop_context(): - run_setup(test_setup_py, ['install']) - except SandboxViolation: - self.fail('Installation caused SandboxViolation') - - -class TestSetupRequires(unittest.TestCase): - - def test_setup_requires_honors_fetch_params(self): - """ - When easy_install installs a source distribution which specifies - setup_requires, it should honor the fetch parameters (such as - allow-hosts, index-url, and find-links). - """ - # set up a server which will simulate an alternate package index. - p_index = setuptools.tests.server.MockServer() - p_index.start() - netloc = 1 - p_index_loc = urlparse(p_index.url)[netloc] - if p_index_loc.endswith(':0'): - # Some platforms (Jython) don't find a port to which to bind, - # so skip this test for them. - return - with quiet_context(): - # create an sdist that has a build-time dependency. - with TestSetupRequires.create_sdist() as dist_file: - with tempdir_context() as temp_install_dir: - with environment_context(PYTHONPATH=temp_install_dir): - ei_params = ['--index-url', p_index.url, - '--allow-hosts', p_index_loc, - '--exclude-scripts', '--install-dir', temp_install_dir, - dist_file] - with reset_setup_stop_context(): - with argv_context(['easy_install']): - # attempt to install the dist. It should fail because - # it doesn't exist. - self.assertRaises(SystemExit, - easy_install_pkg.main, ei_params) - # there should have been two or three requests to the server - # (three happens on Python 3.3a) - self.assertTrue(2 <= len(p_index.requests) <= 3) - self.assertEqual(p_index.requests[0].path, '/does-not-exist/') - - @staticmethod - @contextlib.contextmanager - def create_sdist(): - """ - Return an sdist with a setup_requires dependency (of something that - doesn't exist) - """ - with tempdir_context() as dir: - dist_path = os.path.join(dir, 'setuptools-test-fetcher-1.0.tar.gz') - make_trivial_sdist( - dist_path, - textwrap.dedent(""" - import setuptools - setuptools.setup( - name="setuptools-test-fetcher", - version="1.0", - setup_requires = ['does-not-exist'], - ) - """).lstrip()) - yield dist_path - - def test_setup_requires_overrides_version_conflict(self): - """ - Regression test for issue #323. - - Ensures that a distribution's setup_requires requirements can still be - installed and used locally even if a conflicting version of that - requirement is already on the path. - """ - - pr_state = pkg_resources.__getstate__() - fake_dist = PRDistribution('does-not-matter', project_name='foobar', - version='0.0') - working_set.add(fake_dist) - - try: - with tempdir_context() as temp_dir: - test_pkg = create_setup_requires_package(temp_dir) - test_setup_py = os.path.join(test_pkg, 'setup.py') - with quiet_context() as (stdout, stderr): - with reset_setup_stop_context(): - try: - # Don't even need to install the package, just - # running the setup.py at all is sufficient - run_setup(test_setup_py, ['--name']) - except VersionConflict: - self.fail('Installing setup.py requirements ' - 'caused a VersionConflict') - - lines = stdout.readlines() - self.assertTrue(len(lines) > 0) - self.assertTrue(lines[-1].strip(), 'test_pkg') - finally: - pkg_resources.__setstate__(pr_state) - - -def create_setup_requires_package(path): - """Creates a source tree under path for a trivial test package that has a - single requirement in setup_requires--a tarball for that requirement is - also created and added to the dependency_links argument. - """ - - test_setup_attrs = { - 'name': 'test_pkg', 'version': '0.0', - 'setup_requires': ['foobar==0.1'], - 'dependency_links': [os.path.abspath(path)] - } - - test_pkg = os.path.join(path, 'test_pkg') - test_setup_py = os.path.join(test_pkg, 'setup.py') - os.mkdir(test_pkg) - - f = open(test_setup_py, 'w') - f.write(textwrap.dedent("""\ - import setuptools - setuptools.setup(**%r) - """ % test_setup_attrs)) - f.close() - - foobar_path = os.path.join(path, 'foobar-0.1.tar.gz') - make_trivial_sdist( - foobar_path, - textwrap.dedent("""\ - import setuptools - setuptools.setup( - name='foobar', - version='0.1' - ) - """)) - - return test_pkg - - -def make_trivial_sdist(dist_path, setup_py): - """Create a simple sdist tarball at dist_path, containing just a - setup.py, the contents of which are provided by the setup_py string. - """ - - setup_py_file = tarfile.TarInfo(name='setup.py') - try: - # Python 3 (StringIO gets converted to io module) - MemFile = BytesIO - except AttributeError: - MemFile = StringIO - setup_py_bytes = MemFile(setup_py.encode('utf-8')) - setup_py_file.size = len(setup_py_bytes.getvalue()) - dist = tarfile.open(dist_path, 'w:gz') - try: - dist.addfile(setup_py_file, fileobj=setup_py_bytes) - finally: - dist.close() - - -@contextlib.contextmanager -def tempdir_context(cd=lambda dir:None): - temp_dir = tempfile.mkdtemp() - orig_dir = os.getcwd() - try: - cd(temp_dir) - yield temp_dir - finally: - cd(orig_dir) - shutil.rmtree(temp_dir) - -@contextlib.contextmanager -def environment_context(**updates): - old_env = os.environ.copy() - os.environ.update(updates) - try: - yield - finally: - for key in updates: - del os.environ[key] - os.environ.update(old_env) - -@contextlib.contextmanager -def argv_context(repl): - old_argv = sys.argv[:] - sys.argv[:] = repl - yield - sys.argv[:] = old_argv - -@contextlib.contextmanager -def reset_setup_stop_context(): - """ - When the setuptools tests are run using setup.py test, and then - one wants to invoke another setup() command (such as easy_install) - within those tests, it's necessary to reset the global variable - in distutils.core so that the setup() command will run naturally. - """ - setup_stop_after = distutils.core._setup_stop_after - distutils.core._setup_stop_after = None - yield - distutils.core._setup_stop_after = setup_stop_after - - -@contextlib.contextmanager -def quiet_context(): - """ - Redirect stdout/stderr to StringIO objects to prevent console output from - distutils commands. - """ - - old_stdout = sys.stdout - old_stderr = sys.stderr - new_stdout = sys.stdout = StringIO() - new_stderr = sys.stderr = StringIO() - try: - yield new_stdout, new_stderr - finally: - new_stdout.seek(0) - new_stderr.seek(0) - sys.stdout = old_stdout - sys.stderr = old_stderr diff --git a/libs/setuptools-2.2/setuptools/tests/test_egg_info.py b/libs/setuptools-2.2/setuptools/tests/test_egg_info.py deleted file mode 100644 index 2785436..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_egg_info.py +++ /dev/null @@ -1,173 +0,0 @@ - -import os -import sys -import tempfile -import shutil -import unittest - -import pkg_resources -import warnings -from setuptools.command import egg_info -from setuptools import svn_utils -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf - -ENTRIES_V10 = pkg_resources.resource_string(__name__, 'entries-v10') -"An entries file generated with svn 1.6.17 against the legacy Setuptools repo" - - -class TestEggInfo(unittest.TestCase): - - def setUp(self): - self.test_dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.test_dir, '.svn')) - - self.old_cwd = os.getcwd() - os.chdir(self.test_dir) - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.test_dir) - - def _write_entries(self, entries): - fn = os.path.join(self.test_dir, '.svn', 'entries') - entries_f = open(fn, 'wb') - entries_f.write(entries) - entries_f.close() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_version_10_format(self): - """ - """ - #keeping this set for 1.6 is a good check on the get_svn_revision - #to ensure I return using svnversion what would had been returned - version_str = svn_utils.SvnInfo.get_svn_version() - version = [int(x) for x in version_str.split('.')[:2]] - if version != [1, 6]: - if hasattr(self, 'skipTest'): - self.skipTest('') - else: - sys.stderr.write('\n Skipping due to SVN Version\n') - return - - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - self.assertEqual(rev, '89000') - - def test_version_10_format_legacy_parser(self): - """ - """ - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env - - if path_variable: - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - #catch_warnings not available until py26 - warning_filters = warnings.filters - warnings.filters = warning_filters[:] - try: - warnings.simplefilter("ignore", DeprecationWarning) - self._write_entries(ENTRIES_V10) - rev = egg_info.egg_info.get_svn_revision() - finally: - #restore the warning filters - warnings.filters = warning_filters - #restore the os path - if path_variable: - os.environ[path_variable] = old_path - - self.assertEqual(rev, '89000') - -DUMMY_SOURCE_TXT = """CHANGES.txt -CONTRIBUTORS.txt -HISTORY.txt -LICENSE -MANIFEST.in -README.txt -setup.py -dummy/__init__.py -dummy/test.txt -dummy.egg-info/PKG-INFO -dummy.egg-info/SOURCES.txt -dummy.egg-info/dependency_links.txt -dummy.egg-info/top_level.txt""" - - -class TestSvnDummy(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return None - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummy, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -class TestSvnDummyLegacy(environment.ZippedEnvironment): - - def setUp(self): - self.base_version = (1, 6) - self.dataname = "dummy%i%i" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvnDummyLegacy, self).setUp() - - def test_sources(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - path="", - data_stream=1) - if code: - raise AssertionError(data) - - sources = os.path.join('dummy.egg-info', 'SOURCES.txt') - infile = open(sources, 'r') - try: - read_contents = infile.read() - finally: - infile.close() - del infile - - self.assertEqual(DUMMY_SOURCE_TXT, read_contents) - - return data - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/libs/setuptools-2.2/setuptools/tests/test_markerlib.py b/libs/setuptools-2.2/setuptools/tests/test_markerlib.py deleted file mode 100644 index dae71cb..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_markerlib.py +++ /dev/null @@ -1,68 +0,0 @@ -import os -import unittest -from setuptools.tests.py26compat import skipIf - -try: - import ast -except ImportError: - pass - -class TestMarkerlib(unittest.TestCase): - - @skipIf('ast' not in globals(), - "ast not available (Python < 2.6?)") - def test_markers(self): - from _markerlib import interpret, default_environment, compile - - os_name = os.name - - self.assertTrue(interpret("")) - - self.assertTrue(interpret("os.name != 'buuuu'")) - self.assertTrue(interpret("os_name != 'buuuu'")) - self.assertTrue(interpret("python_version > '1.0'")) - self.assertTrue(interpret("python_version < '5.0'")) - self.assertTrue(interpret("python_version <= '5.0'")) - self.assertTrue(interpret("python_version >= '1.0'")) - self.assertTrue(interpret("'%s' in os.name" % os_name)) - self.assertTrue(interpret("'%s' in os_name" % os_name)) - self.assertTrue(interpret("'buuuu' not in os.name")) - - self.assertFalse(interpret("os.name == 'buuuu'")) - self.assertFalse(interpret("os_name == 'buuuu'")) - self.assertFalse(interpret("python_version < '1.0'")) - self.assertFalse(interpret("python_version > '5.0'")) - self.assertFalse(interpret("python_version >= '5.0'")) - self.assertFalse(interpret("python_version <= '1.0'")) - self.assertFalse(interpret("'%s' not in os.name" % os_name)) - self.assertFalse(interpret("'buuuu' in os.name and python_version >= '5.0'")) - self.assertFalse(interpret("'buuuu' in os_name and python_version >= '5.0'")) - - environment = default_environment() - environment['extra'] = 'test' - self.assertTrue(interpret("extra == 'test'", environment)) - self.assertFalse(interpret("extra == 'doc'", environment)) - - def raises_nameError(): - try: - interpret("python.version == '42'") - except NameError: - pass - else: - raise Exception("Expected NameError") - - raises_nameError() - - def raises_syntaxError(): - try: - interpret("(x for x in (4,))") - except SyntaxError: - pass - else: - raise Exception("Expected SyntaxError") - - raises_syntaxError() - - statement = "python_version == '5'" - self.assertEqual(compile(statement).__doc__, statement) - diff --git a/libs/setuptools-2.2/setuptools/tests/test_packageindex.py b/libs/setuptools-2.2/setuptools/tests/test_packageindex.py deleted file mode 100644 index 664566a..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_packageindex.py +++ /dev/null @@ -1,203 +0,0 @@ -"""Package Index Tests -""" -import sys -import os -import unittest -import pkg_resources -from setuptools.compat import urllib2, httplib, HTTPError, unicode, pathname2url -import distutils.errors -import setuptools.package_index -from setuptools.tests.server import IndexServer - -class TestPackageIndex(unittest.TestCase): - - def test_bad_url_bad_port(self): - index = setuptools.package_index.PackageIndex() - url = 'http://127.0.0.1:0/nonesuch/test_package_index' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) - else: - self.assertTrue(isinstance(v, HTTPError)) - - def test_bad_url_typo(self): - # issue 16 - # easy_install inquant.contentmirror.plone breaks because of a typo - # in its home URL - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - url = 'url:%20https://svn.plone.org/svn/collective/inquant.contentmirror.plone/trunk' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue(url in str(v)) - else: - self.assertTrue(isinstance(v, HTTPError)) - - def test_bad_url_bad_status_line(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - def _urlopen(*args): - raise httplib.BadStatusLine('line') - - index.opener = _urlopen - url = 'http://example.com' - try: - v = index.open_url(url) - except Exception: - v = sys.exc_info()[1] - self.assertTrue('line' in str(v)) - else: - raise AssertionError('Should have raise here!') - - def test_bad_url_double_scheme(self): - """ - A bad URL with a double scheme should raise a DistutilsError. - """ - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue 20 - url = 'http://http://svn.pythonpaste.org/Paste/wphp/trunk' - try: - index.open_url(url) - except distutils.errors.DistutilsError: - error = sys.exc_info()[1] - msg = unicode(error) - assert 'nonnumeric port' in msg or 'getaddrinfo failed' in msg or 'Name or service not known' in msg - return - raise RuntimeError("Did not raise") - - def test_bad_url_screwy_href(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - - # issue #160 - if sys.version_info[0] == 2 and sys.version_info[1] == 7: - # this should not fail - url = 'http://example.com' - page = ('') - index.process_index(url, page) - - def test_url_ok(self): - index = setuptools.package_index.PackageIndex( - hosts=('www.example.com',) - ) - url = 'file:///tmp/test_package_index' - self.assertTrue(index.url_ok(url, True)) - - def test_links_priority(self): - """ - Download links from the pypi simple index should be used before - external download links. - https://bitbucket.org/tarek/distribute/issue/163 - - Usecase : - - someone uploads a package on pypi, a md5 is generated - - someone manually copies this link (with the md5 in the url) onto an - external page accessible from the package page. - - someone reuploads the package (with a different md5) - - while easy_installing, an MD5 error occurs because the external link - is used - -> Setuptools should use the link from pypi, not the external one. - """ - if sys.platform.startswith('java'): - # Skip this test on jython because binding to :0 fails - return - - # start an index server - server = IndexServer() - server.start() - index_url = server.base_url() + 'test_links_priority/simple/' - - # scan a test index - pi = setuptools.package_index.PackageIndex(index_url) - requirement = pkg_resources.Requirement.parse('foobar') - pi.find_packages(requirement) - server.stop() - - # the distribution has been found - self.assertTrue('foobar' in pi) - # we have only one link, because links are compared without md5 - self.assertTrue(len(pi['foobar'])==1) - # the link should be from the index - self.assertTrue('correct_md5' in pi['foobar'][0].location) - - def test_parse_bdist_wininst(self): - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32-py2.4.exe'), ('reportlab-2.5', '2.4', 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win32.exe'), ('reportlab-2.5', None, 'win32')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64-py2.7.exe'), ('reportlab-2.5', '2.7', 'win-amd64')) - self.assertEqual(setuptools.package_index.parse_bdist_wininst( - 'reportlab-2.5.win-amd64.exe'), ('reportlab-2.5', None, 'win-amd64')) - - def test__vcs_split_rev_from_url(self): - """ - Test the basic usage of _vcs_split_rev_from_url - """ - vsrfu = setuptools.package_index.PackageIndex._vcs_split_rev_from_url - url, rev = vsrfu('https://example.com/bar@2995') - self.assertEqual(url, 'https://example.com/bar') - self.assertEqual(rev, '2995') - - def test_local_index(self): - """ - local_open should be able to read an index from the file system. - """ - f = open('index.html', 'w') - f.write('
    content
    ') - f.close() - try: - url = 'file:' + pathname2url(os.getcwd()) + '/' - res = setuptools.package_index.local_open(url) - finally: - os.remove('index.html') - assert 'content' in res.read() - - -class TestContentCheckers(unittest.TestCase): - - def test_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - checker.feed('You should probably not be using MD5'.encode('ascii')) - self.assertEqual(checker.hash.hexdigest(), - 'f12895fdffbd45007040d2e44df98478') - self.assertTrue(checker.is_valid()) - - def test_other_fragment(self): - "Content checks should succeed silently if no hash is present" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#something%20completely%20different') - checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) - - def test_blank_md5(self): - "Content checks should succeed if a hash is empty" - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=') - checker.feed('anything'.encode('ascii')) - self.assertTrue(checker.is_valid()) - - def test_get_hash_name_md5(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - self.assertEqual(checker.hash_name, 'md5') - - def test_report(self): - checker = setuptools.package_index.HashChecker.from_url( - 'http://foo/bar#md5=f12895fdffbd45007040d2e44df98478') - rep = checker.report(lambda x: x, 'My message about %s') - self.assertEqual(rep, 'My message about md5') diff --git a/libs/setuptools-2.2/setuptools/tests/test_resources.py b/libs/setuptools-2.2/setuptools/tests/test_resources.py deleted file mode 100644 index c9fcf76..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_resources.py +++ /dev/null @@ -1,620 +0,0 @@ -#!/usr/bin/python -# -*- coding: utf-8 -*- -# NOTE: the shebang and encoding lines are for ScriptHeaderTests do not remove - -import os -import sys -import tempfile -import shutil -from unittest import TestCase - -import pkg_resources -from pkg_resources import (parse_requirements, VersionConflict, parse_version, - Distribution, EntryPoint, Requirement, safe_version, safe_name, - WorkingSet) - -from setuptools.command.easy_install import (get_script_header, is_sh, - nt_quote_arg) -from setuptools.compat import StringIO, iteritems - -try: - frozenset -except NameError: - from sets import ImmutableSet as frozenset - -def safe_repr(obj, short=False): - """ copied from Python2.7""" - try: - result = repr(obj) - except Exception: - result = object.__repr__(obj) - if not short or len(result) < pkg_resources._MAX_LENGTH: - return result - return result[:pkg_resources._MAX_LENGTH] + ' [truncated]...' - -class Metadata(pkg_resources.EmptyProvider): - """Mock object to return metadata as if from an on-disk distribution""" - - def __init__(self,*pairs): - self.metadata = dict(pairs) - - def has_metadata(self,name): - return name in self.metadata - - def get_metadata(self,name): - return self.metadata[name] - - def get_metadata_lines(self,name): - return pkg_resources.yield_lines(self.get_metadata(name)) - -dist_from_fn = pkg_resources.Distribution.from_filename - -class DistroTests(TestCase): - - def testCollection(self): - # empty path should produce no distributions - ad = pkg_resources.Environment([], platform=None, python=None) - self.assertEqual(list(ad), []) - self.assertEqual(ad['FooPkg'],[]) - ad.add(dist_from_fn("FooPkg-1.3_1.egg")) - ad.add(dist_from_fn("FooPkg-1.4-py2.4-win32.egg")) - ad.add(dist_from_fn("FooPkg-1.2-py2.4.egg")) - - # Name is in there now - self.assertTrue(ad['FooPkg']) - # But only 1 package - self.assertEqual(list(ad), ['foopkg']) - - # Distributions sort by version - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2'] - ) - # Removing a distribution leaves sequence alone - ad.remove(ad['FooPkg'][1]) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.4','1.2'] - ) - # And inserting adds them in order - ad.add(dist_from_fn("FooPkg-1.9.egg")) - self.assertEqual( - [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2'] - ) - - ws = WorkingSet([]) - foo12 = dist_from_fn("FooPkg-1.2-py2.4.egg") - foo14 = dist_from_fn("FooPkg-1.4-py2.4-win32.egg") - req, = parse_requirements("FooPkg>=1.3") - - # Nominal case: no distros on path, should yield all applicable - self.assertEqual(ad.best_match(req,ws).version, '1.9') - # If a matching distro is already installed, should return only that - ws.add(foo14) - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - # If the first matching distro is unsuitable, it's a version conflict - ws = WorkingSet([]) - ws.add(foo12) - ws.add(foo14) - self.assertRaises(VersionConflict, ad.best_match, req, ws) - - # If more than one match on the path, the first one takes precedence - ws = WorkingSet([]) - ws.add(foo14) - ws.add(foo12) - ws.add(foo14) - self.assertEqual(ad.best_match(req,ws).version, '1.4') - - def checkFooPkg(self,d): - self.assertEqual(d.project_name, "FooPkg") - self.assertEqual(d.key, "foopkg") - self.assertEqual(d.version, "1.3-1") - self.assertEqual(d.py_version, "2.4") - self.assertEqual(d.platform, "win32") - self.assertEqual(d.parsed_version, parse_version("1.3-1")) - - def testDistroBasics(self): - d = Distribution( - "/some/path", - project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32" - ) - self.checkFooPkg(d) - - d = Distribution("/some/path") - self.assertEqual(d.py_version, sys.version[:3]) - self.assertEqual(d.platform, None) - - def testDistroParse(self): - d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg") - self.checkFooPkg(d) - d = dist_from_fn("FooPkg-1.3_1-py2.4-win32.egg-info") - self.checkFooPkg(d) - - def testDistroMetadata(self): - d = Distribution( - "/some/path", project_name="FooPkg", py_version="2.4", platform="win32", - metadata = Metadata( - ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n") - ) - ) - self.checkFooPkg(d) - - def distRequires(self, txt): - return Distribution("/foo", metadata=Metadata(('depends.txt', txt))) - - def checkRequires(self, dist, txt, extras=()): - self.assertEqual( - list(dist.requires(extras)), - list(parse_requirements(txt)) - ) - - def testDistroDependsSimple(self): - for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0": - self.checkRequires(self.distRequires(v), v) - - def testResolve(self): - ad = pkg_resources.Environment([]) - ws = WorkingSet([]) - # Resolving no requirements -> nothing to install - self.assertEqual(list(ws.resolve([],ad)), []) - # Request something not in the collection -> DistributionNotFound - self.assertRaises( - pkg_resources.DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad - ) - Foo = Distribution.from_filename( - "/foo_dir/Foo-1.2.egg", - metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0")) - ) - ad.add(Foo) - ad.add(Distribution.from_filename("Foo-0.9.egg")) - - # Request thing(s) that are available -> list to activate - for i in range(3): - targets = list(ws.resolve(parse_requirements("Foo"), ad)) - self.assertEqual(targets, [Foo]) - list(map(ws.add,targets)) - self.assertRaises(VersionConflict, ws.resolve, - parse_requirements("Foo==0.9"), ad) - ws = WorkingSet([]) # reset - - # Request an extra that causes an unresolved dependency for "Baz" - self.assertRaises( - pkg_resources.DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad - ) - Baz = Distribution.from_filename( - "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo")) - ) - ad.add(Baz) - - # Activation list now includes resolved dependency - self.assertEqual( - list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz] - ) - # Requests for conflicting versions produce VersionConflict - self.assertRaises(VersionConflict, - ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad) - - def testDistroDependsOptions(self): - d = self.distRequires(""" - Twisted>=1.5 - [docgen] - ZConfig>=2.0 - docutils>=0.3 - [fastcgi] - fcgiapp>=0.1""") - self.checkRequires(d,"Twisted>=1.5") - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(), - ["docgen","fastcgi"] - ) - self.checkRequires( - d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(), - ["fastcgi", "docgen"] - ) - self.assertRaises(pkg_resources.UnknownExtra, d.requires, ["foo"]) - - -class EntryPointTests(TestCase): - - def assertfields(self, ep): - self.assertEqual(ep.name,"foo") - self.assertEqual(ep.module_name,"setuptools.tests.test_resources") - self.assertEqual(ep.attrs, ("EntryPointTests",)) - self.assertEqual(ep.extras, ("x",)) - self.assertTrue(ep.load() is EntryPointTests) - self.assertEqual( - str(ep), - "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ) - - def setUp(self): - self.dist = Distribution.from_filename( - "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]'))) - - def testBasics(self): - ep = EntryPoint( - "foo", "setuptools.tests.test_resources", ["EntryPointTests"], - ["x"], self.dist - ) - self.assertfields(ep) - - def testParse(self): - s = "foo = setuptools.tests.test_resources:EntryPointTests [x]" - ep = EntryPoint.parse(s, self.dist) - self.assertfields(ep) - - ep = EntryPoint.parse("bar baz= spammity[PING]") - self.assertEqual(ep.name,"bar baz") - self.assertEqual(ep.module_name,"spammity") - self.assertEqual(ep.attrs, ()) - self.assertEqual(ep.extras, ("ping",)) - - ep = EntryPoint.parse(" fizzly = wocka:foo") - self.assertEqual(ep.name,"fizzly") - self.assertEqual(ep.module_name,"wocka") - self.assertEqual(ep.attrs, ("foo",)) - self.assertEqual(ep.extras, ()) - - def testRejects(self): - for ep in [ - "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2", - ]: - try: EntryPoint.parse(ep) - except ValueError: pass - else: raise AssertionError("Should've been bad", ep) - - def checkSubMap(self, m): - self.assertEqual(len(m), len(self.submap_expect)) - for key, ep in iteritems(self.submap_expect): - self.assertEqual(repr(m.get(key)), repr(ep)) - - submap_expect = dict( - feature1=EntryPoint('feature1', 'somemodule', ['somefunction']), - feature2=EntryPoint('feature2', 'another.module', ['SomeClass'], ['extra1','extra2']), - feature3=EntryPoint('feature3', 'this.module', extras=['something']) - ) - submap_str = """ - # define features for blah blah - feature1 = somemodule:somefunction - feature2 = another.module:SomeClass [extra1,extra2] - feature3 = this.module [something] - """ - - def testParseList(self): - self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str)) - self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar") - self.assertRaises(ValueError, EntryPoint.parse_group, "x", - ["foo=baz", "foo=bar"]) - - def testParseMap(self): - m = EntryPoint.parse_map({'xyz':self.submap_str}) - self.checkSubMap(m['xyz']) - self.assertEqual(list(m.keys()),['xyz']) - m = EntryPoint.parse_map("[xyz]\n"+self.submap_str) - self.checkSubMap(m['xyz']) - self.assertEqual(list(m.keys()),['xyz']) - self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"]) - self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str) - -class RequirementsTests(TestCase): - - def testBasics(self): - r = Requirement.parse("Twisted>=1.2") - self.assertEqual(str(r),"Twisted>=1.2") - self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')") - self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ())) - self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ())) - self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ())) - self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2")) - - def testOrdering(self): - r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ()) - r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ()) - self.assertEqual(r1,r2) - self.assertEqual(str(r1),str(r2)) - self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2") - - def testBasicContains(self): - r = Requirement("Twisted", [('>=','1.2')], ()) - foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg") - twist11 = Distribution.from_filename("Twisted-1.1.egg") - twist12 = Distribution.from_filename("Twisted-1.2.egg") - self.assertTrue(parse_version('1.2') in r) - self.assertTrue(parse_version('1.1') not in r) - self.assertTrue('1.2' in r) - self.assertTrue('1.1' not in r) - self.assertTrue(foo_dist not in r) - self.assertTrue(twist11 not in r) - self.assertTrue(twist12 in r) - - def testAdvancedContains(self): - r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5") - for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'): - self.assertTrue(v in r, (v,r)) - for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'): - self.assertTrue(v not in r, (v,r)) - - def testOptionsAndHashing(self): - r1 = Requirement.parse("Twisted[foo,bar]>=1.2") - r2 = Requirement.parse("Twisted[bar,FOO]>=1.2") - r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0") - self.assertEqual(r1,r2) - self.assertEqual(r1,r3) - self.assertEqual(r1.extras, ("foo","bar")) - self.assertEqual(r2.extras, ("bar","foo")) # extras are normalized - self.assertEqual(hash(r1), hash(r2)) - self.assertEqual( - hash(r1), hash(("twisted", ((">=",parse_version("1.2")),), - frozenset(["foo","bar"]))) - ) - - def testVersionEquality(self): - r1 = Requirement.parse("foo==0.3a2") - r2 = Requirement.parse("foo!=0.3a4") - d = Distribution.from_filename - - self.assertTrue(d("foo-0.3a4.egg") not in r1) - self.assertTrue(d("foo-0.3a1.egg") not in r1) - self.assertTrue(d("foo-0.3a4.egg") not in r2) - - self.assertTrue(d("foo-0.3a2.egg") in r1) - self.assertTrue(d("foo-0.3a2.egg") in r2) - self.assertTrue(d("foo-0.3a3.egg") in r2) - self.assertTrue(d("foo-0.3a5.egg") in r2) - - def testSetuptoolsProjectName(self): - """ - The setuptools project should implement the setuptools package. - """ - - self.assertEqual( - Requirement.parse('setuptools').project_name, 'setuptools') - # setuptools 0.7 and higher means setuptools. - self.assertEqual( - Requirement.parse('setuptools == 0.7').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools == 0.7a1').project_name, 'setuptools') - self.assertEqual( - Requirement.parse('setuptools >= 0.7').project_name, 'setuptools') - - -class ParseTests(TestCase): - - def testEmptyParse(self): - self.assertEqual(list(parse_requirements('')), []) - - def testYielding(self): - for inp,out in [ - ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']), - (['x\n\n','y'], ['x','y']), - ]: - self.assertEqual(list(pkg_resources.yield_lines(inp)),out) - - def testSplitting(self): - sample = """ - x - [Y] - z - - a - [b ] - # foo - c - [ d] - [q] - v - """ - self.assertEqual(list(pkg_resources.split_sections(sample)), - [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])] - ) - self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo")) - - def testSafeName(self): - self.assertEqual(safe_name("adns-python"), "adns-python") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("WSGI Utils"), "WSGI-Utils") - self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker") - self.assertNotEqual(safe_name("peak.web"), "peak-web") - - def testSafeVersion(self): - self.assertEqual(safe_version("1.2-1"), "1.2-1") - self.assertEqual(safe_version("1.2 alpha"), "1.2.alpha") - self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521") - self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker") - self.assertEqual(safe_version("peak.web"), "peak.web") - - def testSimpleRequirements(self): - self.assertEqual( - list(parse_requirements('Twis-Ted>=1.2-1')), - [Requirement('Twis-Ted',[('>=','1.2-1')], ())] - ) - self.assertEqual( - list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')), - [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())] - ) - self.assertEqual( - Requirement.parse("FooBar==1.99a3"), - Requirement("FooBar", [('==','1.99a3')], ()) - ) - self.assertRaises(ValueError,Requirement.parse,">=2.3") - self.assertRaises(ValueError,Requirement.parse,"x\\") - self.assertRaises(ValueError,Requirement.parse,"x==2 q") - self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2") - self.assertRaises(ValueError,Requirement.parse,"#") - - def testVersionEquality(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertEqual(p1,p2, (s1,s2,p1,p2)) - - c('1.2-rc1', '1.2rc1') - c('0.4', '0.4.0') - c('0.4.0.0', '0.4.0') - c('0.4.0-0', '0.4-0') - c('0pl1', '0.0pl1') - c('0pre1', '0.0c1') - c('0.0.0preview1', '0c1') - c('0.0c1', '0-rc1') - c('1.2a1', '1.2.a.1') - c('1.2...a', '1.2a') - - def testVersionOrdering(self): - def c(s1,s2): - p1, p2 = parse_version(s1),parse_version(s2) - self.assertTrue(p1= (3,) and os.environ.get("LC_CTYPE") - in (None, "C", "POSIX")): - return - - class java: - class lang: - class System: - @staticmethod - def getProperty(property): - return "" - sys.modules["java"] = java - - platform = sys.platform - sys.platform = 'java1.5.0_13' - stdout, stderr = sys.stdout, sys.stderr - try: - # A mock sys.executable that uses a shebang line (this file) - exe = os.path.normpath(os.path.splitext(__file__)[0] + '.py') - self.assertEqual( - get_script_header('#!/usr/local/bin/python', executable=exe), - '#!/usr/bin/env %s\n' % exe) - - # Ensure we generate what is basically a broken shebang line - # when there's options, with a warning emitted - sys.stdout = sys.stderr = StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python -x', - executable=exe), - '#!%s -x\n' % exe) - self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) - sys.stdout = sys.stderr = StringIO() - self.assertEqual(get_script_header('#!/usr/bin/python', - executable=self.non_ascii_exe), - '#!%s -x\n' % self.non_ascii_exe) - self.assertTrue('Unable to adapt shebang line' in sys.stdout.getvalue()) - finally: - del sys.modules["java"] - sys.platform = platform - sys.stdout, sys.stderr = stdout, stderr - - -class NamespaceTests(TestCase): - - def setUp(self): - self._ns_pkgs = pkg_resources._namespace_packages.copy() - self._tmpdir = tempfile.mkdtemp(prefix="tests-setuptools-") - os.makedirs(os.path.join(self._tmpdir, "site-pkgs")) - self._prev_sys_path = sys.path[:] - sys.path.append(os.path.join(self._tmpdir, "site-pkgs")) - - def tearDown(self): - shutil.rmtree(self._tmpdir) - pkg_resources._namespace_packages = self._ns_pkgs.copy() - sys.path = self._prev_sys_path[:] - - def _assertIn(self, member, container): - """ assertIn and assertTrue does not exist in Python2.3""" - if member not in container: - standardMsg = '%s not found in %s' % (safe_repr(member), - safe_repr(container)) - self.fail(self._formatMessage(msg, standardMsg)) - - def test_two_levels_deep(self): - """ - Test nested namespace packages - Create namespace packages in the following tree : - site-packages-1/pkg1/pkg2 - site-packages-2/pkg1/pkg2 - Check both are in the _namespace_packages dict and that their __path__ - is correct - """ - sys.path.append(os.path.join(self._tmpdir, "site-pkgs2")) - os.makedirs(os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2")) - os.makedirs(os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")) - ns_str = "__import__('pkg_resources').declare_namespace(__name__)\n" - for site in ["site-pkgs", "site-pkgs2"]: - pkg1_init = open(os.path.join(self._tmpdir, site, - "pkg1", "__init__.py"), "w") - pkg1_init.write(ns_str) - pkg1_init.close() - pkg2_init = open(os.path.join(self._tmpdir, site, - "pkg1", "pkg2", "__init__.py"), "w") - pkg2_init.write(ns_str) - pkg2_init.close() - import pkg1 - self._assertIn("pkg1", pkg_resources._namespace_packages.keys()) - try: - import pkg1.pkg2 - except ImportError: - self.fail("Setuptools tried to import the parent namespace package") - # check the _namespace_packages dict - self._assertIn("pkg1.pkg2", pkg_resources._namespace_packages.keys()) - self.assertEqual(pkg_resources._namespace_packages["pkg1"], ["pkg1.pkg2"]) - # check the __path__ attribute contains both paths - self.assertEqual(pkg1.pkg2.__path__, [ - os.path.join(self._tmpdir, "site-pkgs", "pkg1", "pkg2"), - os.path.join(self._tmpdir, "site-pkgs2", "pkg1", "pkg2")]) diff --git a/libs/setuptools-2.2/setuptools/tests/test_sandbox.py b/libs/setuptools-2.2/setuptools/tests/test_sandbox.py deleted file mode 100644 index 3dad137..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_sandbox.py +++ /dev/null @@ -1,79 +0,0 @@ -"""develop tests -""" -import sys -import os -import shutil -import unittest -import tempfile -import types - -import pkg_resources -import setuptools.sandbox -from setuptools.sandbox import DirectorySandbox, SandboxViolation - -def has_win32com(): - """ - Run this to determine if the local machine has win32com, and if it - does, include additional tests. - """ - if not sys.platform.startswith('win32'): - return False - try: - mod = __import__('win32com') - except ImportError: - return False - return True - -class TestSandbox(unittest.TestCase): - - def setUp(self): - self.dir = tempfile.mkdtemp() - - def tearDown(self): - shutil.rmtree(self.dir) - - def test_devnull(self): - if sys.version < '2.4': - return - sandbox = DirectorySandbox(self.dir) - sandbox.run(self._file_writer(os.devnull)) - - def _file_writer(path): - def do_write(): - f = open(path, 'w') - f.write('xxx') - f.close() - return do_write - - _file_writer = staticmethod(_file_writer) - - if has_win32com(): - def test_win32com(self): - """ - win32com should not be prevented from caching COM interfaces - in gen_py. - """ - import win32com - gen_py = win32com.__gen_path__ - target = os.path.join(gen_py, 'test_write') - sandbox = DirectorySandbox(self.dir) - try: - try: - sandbox.run(self._file_writer(target)) - except SandboxViolation: - self.fail("Could not create gen_py file due to SandboxViolation") - finally: - if os.path.exists(target): os.remove(target) - - def test_setup_py_with_BOM(self): - """ - It should be possible to execute a setup.py with a Byte Order Mark - """ - target = pkg_resources.resource_filename(__name__, - 'script-with-bom.py') - namespace = types.ModuleType('namespace') - setuptools.sandbox.execfile(target, vars(namespace)) - assert namespace.result == 'passed' - -if __name__ == '__main__': - unittest.main() diff --git a/libs/setuptools-2.2/setuptools/tests/test_sdist.py b/libs/setuptools-2.2/setuptools/tests/test_sdist.py deleted file mode 100644 index 71d1075..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_sdist.py +++ /dev/null @@ -1,535 +0,0 @@ -# -*- coding: utf-8 -*- -"""sdist tests""" - -import locale -import os -import shutil -import sys -import tempfile -import unittest -import unicodedata -import re -from setuptools.tests import environment, test_svn -from setuptools.tests.py26compat import skipIf - -from setuptools.compat import StringIO, unicode -from setuptools.tests.py26compat import skipIf -from setuptools.command.sdist import sdist, walk_revctrl -from setuptools.command.egg_info import manifest_maker -from setuptools.dist import Distribution -from setuptools import svn_utils - -SETUP_ATTRS = { - 'name': 'sdist_test', - 'version': '0.0', - 'packages': ['sdist_test'], - 'package_data': {'sdist_test': ['*.txt']} -} - - -SETUP_PY = """\ -from setuptools import setup - -setup(**%r) -""" % SETUP_ATTRS - - -if sys.version_info >= (3,): - LATIN1_FILENAME = 'smörbröd.py'.encode('latin-1') -else: - LATIN1_FILENAME = 'sm\xf6rbr\xf6d.py' - - -# Cannot use context manager because of Python 2.4 -def quiet(): - global old_stdout, old_stderr - old_stdout, old_stderr = sys.stdout, sys.stderr - sys.stdout, sys.stderr = StringIO(), StringIO() - -def unquiet(): - sys.stdout, sys.stderr = old_stdout, old_stderr - - -# Fake byte literals for Python <= 2.5 -def b(s, encoding='utf-8'): - if sys.version_info >= (3,): - return s.encode(encoding) - return s - - -# Convert to POSIX path -def posix(path): - if sys.version_info >= (3,) and not isinstance(path, str): - return path.replace(os.sep.encode('ascii'), b('/')) - else: - return path.replace(os.sep, '/') - - -# HFS Plus uses decomposed UTF-8 -def decompose(path): - if isinstance(path, unicode): - return unicodedata.normalize('NFD', path) - try: - path = path.decode('utf-8') - path = unicodedata.normalize('NFD', path) - path = path.encode('utf-8') - except UnicodeError: - pass # Not UTF-8 - return path - - -class TestSdistTest(unittest.TestCase): - - def setUp(self): - self.temp_dir = tempfile.mkdtemp() - f = open(os.path.join(self.temp_dir, 'setup.py'), 'w') - f.write(SETUP_PY) - f.close() - # Set up the rest of the test package - test_pkg = os.path.join(self.temp_dir, 'sdist_test') - os.mkdir(test_pkg) - # *.rst was not included in package_data, so c.rst should not be - # automatically added to the manifest when not under version control - for fname in ['__init__.py', 'a.txt', 'b.txt', 'c.rst']: - # Just touch the files; their contents are irrelevant - open(os.path.join(test_pkg, fname), 'w').close() - - self.old_cwd = os.getcwd() - os.chdir(self.temp_dir) - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.temp_dir) - - def test_package_data_in_sdist(self): - """Regression test for pull request #4: ensures that files listed in - package_data are included in the manifest even if they're not added to - version control. - """ - - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # squelch output - quiet() - try: - cmd.run() - finally: - unquiet() - - manifest = cmd.filelist.files - self.assertTrue(os.path.join('sdist_test', 'a.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'b.txt') in manifest) - self.assertTrue(os.path.join('sdist_test', 'c.rst') not in manifest) - - def test_manifest_is_written_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join('sdist_test', 'smörbröd.py') - - # Add UTF-8 filename and write manifest - quiet() - try: - mm.run() - mm.filelist.files.append(filename) - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - u_contents = contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The manifest should contain the UTF-8 filename - if sys.version_info >= (3,): - self.assertTrue(posix(filename) in u_contents) - else: - self.assertTrue(posix(filename) in contents) - - # Python 3 only - if sys.version_info >= (3,): - - def test_write_manifest_allows_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - - # Add filename and write manifest - quiet() - try: - mm.run() - u_filename = filename.decode('utf-8') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The manifest should contain the UTF-8 filename - self.assertTrue(posix(filename) in contents) - - # The filelist should have been updated as well - self.assertTrue(u_filename in mm.filelist.files) - - def test_write_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - mm = manifest_maker(dist) - mm.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - os.mkdir('sdist_test.egg-info') - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - - # Add filename with surrogates and write manifest - quiet() - try: - mm.run() - u_filename = filename.decode('utf-8', 'surrogateescape') - mm.filelist.files.append(u_filename) - # Re-write manifest - mm.write_manifest() - finally: - unquiet() - - manifest = open(mm.manifest, 'rbU') - contents = manifest.read() - manifest.close() - - # The manifest should be UTF-8 encoded - try: - contents.decode('UTF-8') - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - - # The Latin-1 filename should have been skipped - self.assertFalse(posix(filename) in contents) - - # The filelist should have been updated as well - self.assertFalse(u_filename in mm.filelist.files) - - def test_manifest_is_read_with_utf8_encoding(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - quiet() - try: - cmd.run() - finally: - unquiet() - - # Add UTF-8 filename to manifest - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - quiet() - try: - cmd.read_manifest() - finally: - unquiet() - - # The filelist should contain the UTF-8 filename - if sys.version_info >= (3,): - filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) - - # Python 3 only - if sys.version_info >= (3,): - - def test_read_manifest_skips_non_utf8_filenames(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Create manifest - quiet() - try: - cmd.run() - finally: - unquiet() - - # Add Latin-1 filename to manifest - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - cmd.manifest = os.path.join('sdist_test.egg-info', 'SOURCES.txt') - manifest = open(cmd.manifest, 'ab') - manifest.write(b('\n')+filename) - manifest.close() - - # The file must exist to be included in the filelist - open(filename, 'w').close() - - # Re-read manifest - cmd.filelist.files = [] - quiet() - try: - try: - cmd.read_manifest() - except UnicodeDecodeError: - e = sys.exc_info()[1] - self.fail(e) - finally: - unquiet() - - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) - - @skipIf(sys.version_info >= (3,) and locale.getpreferredencoding() != 'UTF-8', - 'Unittest fails if locale is not utf-8 but the manifests is recorded correctly') - def test_sdist_with_utf8_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # UTF-8 filename - filename = os.path.join(b('sdist_test'), b('smörbröd.py')) - open(filename, 'w').close() - - quiet() - try: - cmd.run() - finally: - unquiet() - - if sys.platform == 'darwin': - filename = decompose(filename) - - if sys.version_info >= (3,): - fs_enc = sys.getfilesystemencoding() - - if sys.platform == 'win32': - if fs_enc == 'cp1252': - # Python 3 mangles the UTF-8 filename - filename = filename.decode('cp1252') - self.assertTrue(filename in cmd.filelist.files) - else: - filename = filename.decode('mbcs') - self.assertTrue(filename in cmd.filelist.files) - else: - filename = filename.decode('utf-8') - self.assertTrue(filename in cmd.filelist.files) - else: - self.assertTrue(filename in cmd.filelist.files) - - def test_sdist_with_latin1_encoded_filename(self): - # Test for #303. - dist = Distribution(SETUP_ATTRS) - dist.script_name = 'setup.py' - cmd = sdist(dist) - cmd.ensure_finalized() - - # Latin-1 filename - filename = os.path.join(b('sdist_test'), LATIN1_FILENAME) - open(filename, 'w').close() - self.assertTrue(os.path.isfile(filename)) - - quiet() - try: - cmd.run() - finally: - unquiet() - - if sys.version_info >= (3,): - #not all windows systems have a default FS encoding of cp1252 - if sys.platform == 'win32': - # Latin-1 is similar to Windows-1252 however - # on mbcs filesys it is not in latin-1 encoding - fs_enc = sys.getfilesystemencoding() - if fs_enc == 'mbcs': - filename = filename.decode('mbcs') - else: - filename = filename.decode('latin-1') - - self.assertTrue(filename in cmd.filelist.files) - else: - # The Latin-1 filename should have been skipped - filename = filename.decode('latin-1') - self.assertFalse(filename in cmd.filelist.files) - else: - # No conversion takes place under Python 2 and the file - # is included. We shall keep it that way for BBB. - self.assertTrue(filename in cmd.filelist.files) - - -class TestDummyOutput(environment.ZippedEnvironment): - - def setUp(self): - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', "dummy.zip") - self.dataname = "dummy" - super(TestDummyOutput, self).setUp() - - def _run(self): - code, data = environment.run_setup_py(["sdist"], - pypath=self.old_cwd, - data_stream=0) - if code: - info = "DIR: " + os.path.abspath('.') - info += "\n SDIST RETURNED: %i\n\n" % code - info += data - raise AssertionError(info) - - datalines = data.splitlines() - - possible = ( - "running sdist", - "running egg_info", - "creating dummy\.egg-info", - "writing dummy\.egg-info", - "writing top-level names to dummy\.egg-info", - "writing dependency_links to dummy\.egg-info", - "writing manifest file 'dummy\.egg-info", - "reading manifest file 'dummy\.egg-info", - "reading manifest template 'MANIFEST\.in'", - "writing manifest file 'dummy\.egg-info", - "creating dummy-0.1.1", - "making hard links in dummy-0\.1\.1", - "copying files to dummy-0\.1\.1", - "copying \S+ -> dummy-0\.1\.1", - "copying dummy", - "copying dummy\.egg-info", - "hard linking \S+ -> dummy-0\.1\.1", - "hard linking dummy", - "hard linking dummy\.egg-info", - "Writing dummy-0\.1\.1", - "creating dist", - "creating 'dist", - "Creating tar archive", - "running check", - "adding 'dummy-0\.1\.1", - "tar .+ dist/dummy-0\.1\.1\.tar dummy-0\.1\.1", - "gzip .+ dist/dummy-0\.1\.1\.tar", - "removing 'dummy-0\.1\.1' \\(and everything under it\\)", - ) - - print(" DIR: " + os.path.abspath('.')) - for line in datalines: - found = False - for pattern in possible: - if re.match(pattern, line): - print(" READ: " + line) - found = True - break - if not found: - raise AssertionError("Unexpexected: %s\n-in-\n%s" - % (line, data)) - - return data - - def test_sources(self): - self._run() - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # None or Empty - return - - self.base_version = tuple([int(x) for x in version.split('.')][:2]) - - if not self.base_version: - raise ValueError('No SVN tools installed') - elif self.base_version < (1, 3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1, 9): - #trying the latest version - self.base_version = (1, 8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not test_svn._svn_check, "No SVN to text, in the first place") - def test_walksvn(self): - if self.base_version >= (1, 6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - #TODO is this right - expected = set([ - os.path.join('a file'), - os.path.join(folder2, 'Changes.txt'), - os.path.join(folder2, 'MD5SUMS'), - os.path.join(folder2, 'README.txt'), - os.path.join(folder3, 'Changes.txt'), - os.path.join(folder3, 'MD5SUMS'), - os.path.join(folder3, 'README.txt'), - os.path.join(folder3, 'TODO.txt'), - os.path.join(folder3, 'fin'), - os.path.join('third_party', 'README.txt'), - os.path.join('folder', folder2, 'Changes.txt'), - os.path.join('folder', folder2, 'MD5SUMS'), - os.path.join('folder', folder2, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'Changes.txt'), - os.path.join('folder', folder3, 'fin'), - os.path.join('folder', folder3, 'MD5SUMS'), - os.path.join('folder', folder3, 'oops'), - os.path.join('folder', folder3, 'WatashiNiYomimasu.txt'), - os.path.join('folder', folder3, 'ZuMachen.txt'), - os.path.join('folder', 'third_party', 'WatashiNiYomimasu.txt'), - os.path.join('folder', 'lalala.txt'), - os.path.join('folder', 'quest.txt'), - # The example will have a deleted file - # (or should) but shouldn't return it - ]) - self.assertEqual(set(x for x in walk_revctrl()), expected) - - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/libs/setuptools-2.2/setuptools/tests/test_svn.py b/libs/setuptools-2.2/setuptools/tests/test_svn.py deleted file mode 100644 index 3340036..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_svn.py +++ /dev/null @@ -1,245 +0,0 @@ -# -*- coding: utf-8 -*- -"""svn tests""" - -import io -import os -import subprocess -import sys -import unittest -from setuptools.tests import environment -from setuptools.compat import unicode, unichr - -from setuptools import svn_utils -from setuptools.tests.py26compat import skipIf - - -def _do_svn_check(): - try: - subprocess.check_call(["svn", "--version"], - shell=(sys.platform == 'win32')) - return True - except (OSError, subprocess.CalledProcessError): - return False -_svn_check = _do_svn_check() - - -class TestSvnVersion(unittest.TestCase): - - def test_no_svn_found(self): - path_variable = None - for env in os.environ: - if env.lower() == 'path': - path_variable = env - - if path_variable is None: - try: - self.skipTest('Cannot figure out how to modify path') - except AttributeError: # PY26 doesn't have this - return - - old_path = os.environ[path_variable] - os.environ[path_variable] = '' - try: - version = svn_utils.SvnInfo.get_svn_version() - self.assertEqual(version, '') - finally: - os.environ[path_variable] = old_path - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_svn_should_exist(self): - version = svn_utils.SvnInfo.get_svn_version() - self.assertNotEqual(version, '') - -def _read_utf8_file(path): - fileobj = None - try: - fileobj = io.open(path, 'r', encoding='utf-8') - data = fileobj.read() - return data - finally: - if fileobj: - fileobj.close() - - -class ParserInfoXML(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_info.xml') - #Remember these are pre-generated to test XML parsing - # so these paths might not valid on your system - example_base = "%s_example" % svn_name - - data = _read_utf8_file(path) - - expected = set([ - ("\\".join((example_base, 'a file')), 'file'), - ("\\".join((example_base, 'folder')), 'dir'), - ("\\".join((example_base, 'folder', 'lalala.txt')), 'file'), - ("\\".join((example_base, 'folder', 'quest.txt')), 'file'), - ]) - self.assertEqual(set(x for x in svn_utils.parse_dir_entries(data)), - expected) - - def test_svn13(self): - self.parse_tester('svn13', False) - - def test_svn14(self): - self.parse_tester('svn14', False) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - -class ParserExternalXML(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_ext_list.xml') - example_base = svn_name + '_example' - data = _read_utf8_file(path) - - if ext_spaces: - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - expected = set([ - os.sep.join((example_base, folder2)), - os.sep.join((example_base, folder3)), - # folder is third_party大介 - os.sep.join((example_base, - unicode('third_party') + - unichr(0x5927) + unichr(0x4ecb))), - os.sep.join((example_base, 'folder', folder2)), - os.sep.join((example_base, 'folder', folder3)), - os.sep.join((example_base, 'folder', - unicode('third_party') + - unichr(0x5927) + unichr(0x4ecb))), - ]) - - expected = set(os.path.normpath(x) for x in expected) - dir_base = os.sep.join(('C:', 'development', 'svn_example')) - self.assertEqual(set(x for x - in svn_utils.parse_externals_xml(data, dir_base)), expected) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - - -class ParseExternal(unittest.TestCase): - - def parse_tester(self, svn_name, ext_spaces): - path = os.path.join('setuptools', 'tests', - 'svn_data', svn_name + '_ext_list.txt') - data = _read_utf8_file(path) - - if ext_spaces: - expected = set(['third party2', 'third party3', - 'third party3b', 'third_party']) - else: - expected = set(['third_party2', 'third_party3', 'third_party']) - - self.assertEqual(set(x for x in svn_utils.parse_external_prop(data)), - expected) - - def test_svn13(self): - self.parse_tester('svn13', False) - - def test_svn14(self): - self.parse_tester('svn14', False) - - def test_svn15(self): - self.parse_tester('svn15', False) - - def test_svn16(self): - self.parse_tester('svn16', True) - - def test_svn17(self): - self.parse_tester('svn17', True) - - def test_svn18(self): - self.parse_tester('svn18', True) - - -class TestSvn(environment.ZippedEnvironment): - - def setUp(self): - version = svn_utils.SvnInfo.get_svn_version() - if not version: # empty or null - self.dataname = None - self.datafile = None - return - - self.base_version = tuple([int(x) for x in version.split('.')[:2]]) - - if self.base_version < (1,3): - raise ValueError('Insufficient SVN Version %s' % version) - elif self.base_version >= (1,9): - #trying the latest version - self.base_version = (1,8) - - self.dataname = "svn%i%i_example" % self.base_version - self.datafile = os.path.join('setuptools', 'tests', - 'svn_data', self.dataname + ".zip") - super(TestSvn, self).setUp() - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_revision(self): - rev = svn_utils.SvnInfo.load('.').get_revision() - self.assertEqual(rev, 6) - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_entries(self): - expected = set([ - (os.path.join('a file'), 'file'), - (os.path.join('folder'), 'dir'), - (os.path.join('folder', 'lalala.txt'), 'file'), - (os.path.join('folder', 'quest.txt'), 'file'), - #The example will have a deleted file (or should) - #but shouldn't return it - ]) - info = svn_utils.SvnInfo.load('.') - self.assertEqual(set(x for x in info.entries), expected) - - @skipIf(not _svn_check, "No SVN to text, in the first place") - def test_externals(self): - if self.base_version >= (1,6): - folder2 = 'third party2' - folder3 = 'third party3' - else: - folder2 = 'third_party2' - folder3 = 'third_party3' - - expected = set([ - os.path.join(folder2), - os.path.join(folder3), - os.path.join('third_party'), - os.path.join('folder', folder2), - os.path.join('folder', folder3), - os.path.join('folder', 'third_party'), - ]) - info = svn_utils.SvnInfo.load('.') - self.assertEqual(set([x for x in info.externals]), expected) - -def test_suite(): - return unittest.defaultTestLoader.loadTestsFromName(__name__) diff --git a/libs/setuptools-2.2/setuptools/tests/test_test.py b/libs/setuptools-2.2/setuptools/tests/test_test.py deleted file mode 100644 index 7a06a40..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_test.py +++ /dev/null @@ -1,124 +0,0 @@ -# -*- coding: UTF-8 -*- - -"""develop tests -""" -import sys -import os, shutil, tempfile, unittest -import tempfile -import site - -from distutils.errors import DistutilsError -from setuptools.compat import StringIO -from setuptools.command.test import test -from setuptools.command import easy_install as easy_install_pkg -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', -) -""" - -NS_INIT = """# -*- coding: Latin-1 -*- -# Söme Arbiträry Ünicode to test Issüé 310 -try: - __import__('pkg_resources').declare_namespace(__name__) -except ImportError: - from pkgutil import extend_path - __path__ = extend_path(__path__, __name__) -""" -# Make sure this is Latin-1 binary, before writing: -if sys.version_info < (3,): - NS_INIT = NS_INIT.decode('UTF-8') -NS_INIT = NS_INIT.encode('Latin-1') - -TEST_PY = """import unittest - -class TestTest(unittest.TestCase): - def test_test(self): - print "Foo" # Should fail under Python 3 unless 2to3 is used - -test_suite = unittest.makeSuite(TestTest) -""" - -class TestTestTest(unittest.TestCase): - - def setUp(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - # Directory structure - self.dir = tempfile.mkdtemp() - os.mkdir(os.path.join(self.dir, 'name')) - os.mkdir(os.path.join(self.dir, 'name', 'space')) - os.mkdir(os.path.join(self.dir, 'name', 'space', 'tests')) - # setup.py - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'wt') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - # name/__init__.py - init = os.path.join(self.dir, 'name', '__init__.py') - f = open(init, 'wb') - f.write(NS_INIT) - f.close() - # name/space/__init__.py - init = os.path.join(self.dir, 'name', 'space', '__init__.py') - f = open(init, 'wt') - f.write('#empty\n') - f.close() - # name/space/tests/__init__.py - init = os.path.join(self.dir, 'name', 'space', 'tests', '__init__.py') - f = open(init, 'wt') - f.write(TEST_PY) - f.close() - - os.chdir(self.dir) - self.old_base = site.USER_BASE - site.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_test(self): - if sys.version < "2.6" or hasattr(sys, 'real_prefix'): - return - - dist = Distribution(dict( - name='foo', - packages=['name', 'name.space', 'name.space.tests'], - namespace_packages=['name'], - test_suite='name.space.tests.test_suite', - use_2to3=True, - )) - dist.script_name = 'setup.py' - cmd = test(dist) - cmd.user = 1 - cmd.ensure_finalized() - cmd.install_dir = site.USER_SITE - cmd.user = 1 - old_stdout = sys.stdout - sys.stdout = StringIO() - try: - try: # try/except/finally doesn't work in Python 2.4, so we need nested try-statements. - cmd.run() - except SystemExit: # The test runner calls sys.exit, stop that making an error. - pass - finally: - sys.stdout = old_stdout - diff --git a/libs/setuptools-2.2/setuptools/tests/test_upload_docs.py b/libs/setuptools-2.2/setuptools/tests/test_upload_docs.py deleted file mode 100644 index 769f16c..0000000 --- a/libs/setuptools-2.2/setuptools/tests/test_upload_docs.py +++ /dev/null @@ -1,72 +0,0 @@ -"""build_ext tests -""" -import sys, os, shutil, tempfile, unittest, site, zipfile -from setuptools.command.upload_docs import upload_docs -from setuptools.dist import Distribution - -SETUP_PY = """\ -from setuptools import setup - -setup(name='foo') -""" - -class TestUploadDocsTest(unittest.TestCase): - def setUp(self): - self.dir = tempfile.mkdtemp() - setup = os.path.join(self.dir, 'setup.py') - f = open(setup, 'w') - f.write(SETUP_PY) - f.close() - self.old_cwd = os.getcwd() - os.chdir(self.dir) - - self.upload_dir = os.path.join(self.dir, 'build') - os.mkdir(self.upload_dir) - - # A test document. - f = open(os.path.join(self.upload_dir, 'index.html'), 'w') - f.write("Hello world.") - f.close() - - # An empty folder. - os.mkdir(os.path.join(self.upload_dir, 'empty')) - - if sys.version >= "2.6": - self.old_base = site.USER_BASE - site.USER_BASE = upload_docs.USER_BASE = tempfile.mkdtemp() - self.old_site = site.USER_SITE - site.USER_SITE = upload_docs.USER_SITE = tempfile.mkdtemp() - - def tearDown(self): - os.chdir(self.old_cwd) - shutil.rmtree(self.dir) - if sys.version >= "2.6": - shutil.rmtree(site.USER_BASE) - shutil.rmtree(site.USER_SITE) - site.USER_BASE = self.old_base - site.USER_SITE = self.old_site - - def test_create_zipfile(self): - # Test to make sure zipfile creation handles common cases. - # This explicitly includes a folder containing an empty folder. - - dist = Distribution() - - cmd = upload_docs(dist) - cmd.upload_dir = self.upload_dir - cmd.target_dir = self.upload_dir - tmp_dir = tempfile.mkdtemp() - tmp_file = os.path.join(tmp_dir, 'foo.zip') - try: - zip_file = cmd.create_zipfile(tmp_file) - - assert zipfile.is_zipfile(tmp_file) - - zip_file = zipfile.ZipFile(tmp_file) # woh... - - assert zip_file.namelist() == ['index.html'] - - zip_file.close() - finally: - shutil.rmtree(tmp_dir) - diff --git a/libs/setuptools-2.2/setuptools/tests/win_script_wrapper.txt b/libs/setuptools-2.2/setuptools/tests/win_script_wrapper.txt deleted file mode 100644 index 731243d..0000000 --- a/libs/setuptools-2.2/setuptools/tests/win_script_wrapper.txt +++ /dev/null @@ -1,154 +0,0 @@ -Python Script Wrapper for Windows -================================= - -setuptools includes wrappers for Python scripts that allows them to be -executed like regular windows programs. There are 2 wrappers, once -for command-line programs, cli.exe, and one for graphica programs, -gui.exe. These programs are almost identical, function pretty much -the same way, and are generated from the same source file. The -wrapper programs are used by copying them to the directory containing -the script they are to wrap and with the same name as the script they -are to wrap. In the rest of this document, we'll give an example that -will illustrate this. - -Let's create a simple script, foo-script.py: - - >>> import os, sys, tempfile - >>> from setuptools.command.easy_install import nt_quote_arg - >>> sample_directory = tempfile.mkdtemp() - >>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w') - >>> bytes_written = f.write( - ... """#!%(python_exe)s - ... import sys - ... input = repr(sys.stdin.read()) - ... print(sys.argv[0][-14:]) - ... print(sys.argv[1:]) - ... print(input) - ... if __debug__: - ... print('non-optimized') - ... """ % dict(python_exe=nt_quote_arg(sys.executable))) - >>> f.close() - -Note that the script starts with a Unix-style '#!' line saying which -Python executable to run. The wrapper will use this to find the -correct Python executable. - -We'll also copy cli.exe to the sample-directory with the name foo.exe: - - >>> import pkg_resources - >>> f = open(os.path.join(sample_directory, 'foo.exe'), 'wb') - >>> bytes_written = f.write( - ... pkg_resources.resource_string('setuptools', 'cli-32.exe') - ... ) - >>> f.close() - -When the copy of cli.exe, foo.exe in this example, runs, it examines -the path name it was run with and computes a Python script path name -by removing the '.exe' suffic and adding the '-script.py' suffix. (For -GUI programs, the suffix '-script-pyw' is added.) This is why we -named out script the way we did. Now we can run out script by running -the wrapper: - - >>> import subprocess - >>> cmd = [os.path.join(sample_directory, 'foo.exe'), 'arg1', 'arg 2', - ... 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] - >>> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE) - >>> stdout, stderr = proc.communicate('hello\nworld\n'.encode('ascii')) - >>> bytes = sys.stdout.write(stdout.decode('ascii').replace('\r\n', '\n')) - \foo-script.py - ['arg1', 'arg 2', 'arg "2\\"', 'arg 4\\', 'arg5 a\\\\b'] - 'hello\nworld\n' - non-optimized - -This example was a little pathological in that it exercised windows -(MS C runtime) quoting rules: - -- Strings containing spaces are surrounded by double quotes. - -- Double quotes in strings need to be escaped by preceding them with - back slashes. - -- One or more backslashes preceding double quotes quotes need to be - escaped by preceding each of them them with back slashes. - - -Specifying Python Command-line Options --------------------------------------- - -You can specify a single argument on the '#!' line. This can be used -to specify Python options like -O, to run in optimized mode or -i -to start the interactive interpreter. You can combine multiple -options as usual. For example, to run in optimized mode and -enter the interpreter after running the script, you could use -Oi: - - >>> f = open(os.path.join(sample_directory, 'foo-script.py'), 'w') - >>> bytes_written = f.write( - ... """#!%(python_exe)s -Oi - ... import sys - ... input = repr(sys.stdin.read()) - ... print(sys.argv[0][-14:]) - ... print(sys.argv[1:]) - ... print(input) - ... if __debug__: - ... print('non-optimized') - ... sys.ps1 = '---' - ... """ % dict(python_exe=nt_quote_arg(sys.executable))) - >>> f.close() - >>> cmd = [os.path.join(sample_directory, 'foo.exe')] - >>> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) - >>> stdout, stderr = proc.communicate() - >>> bytes = sys.stdout.write(stdout.decode('ascii').replace('\r\n', '\n')) - \foo-script.py - [] - '' - --- - -Testing the GUI Version ------------------------ - -Now let's test the GUI version with the simple scipt, bar-script.py: - - >>> import os, sys, tempfile - >>> from setuptools.command.easy_install import nt_quote_arg - >>> sample_directory = tempfile.mkdtemp() - >>> f = open(os.path.join(sample_directory, 'bar-script.pyw'), 'w') - >>> bytes_written = f.write( - ... """#!%(python_exe)s - ... import sys - ... f = open(sys.argv[1], 'wb') - ... bytes_written = f.write(repr(sys.argv[2]).encode('utf-8')) - ... f.close() - ... """ % dict(python_exe=nt_quote_arg(sys.executable))) - >>> f.close() - -We'll also copy gui.exe to the sample-directory with the name bar.exe: - - >>> import pkg_resources - >>> f = open(os.path.join(sample_directory, 'bar.exe'), 'wb') - >>> bytes_written = f.write( - ... pkg_resources.resource_string('setuptools', 'gui-32.exe') - ... ) - >>> f.close() - -Finally, we'll run the script and check the result: - - >>> cmd = [ - ... os.path.join(sample_directory, 'bar.exe'), - ... os.path.join(sample_directory, 'test_output.txt'), - ... 'Test Argument', - ... ] - >>> proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.STDOUT) - >>> stdout, stderr = proc.communicate() - >>> print(stdout.decode('ascii')) - - >>> f_out = open(os.path.join(sample_directory, 'test_output.txt'), 'rb') - >>> print(f_out.read().decode('ascii')) - 'Test Argument' - >>> f_out.close() - - -We're done with the sample_directory: - - >>> import shutil - >>> shutil.rmtree(sample_directory) - diff --git a/libs/setuptools-2.2/setuptools/version.py b/libs/setuptools-2.2/setuptools/version.py deleted file mode 100644 index 2b9ccf1..0000000 --- a/libs/setuptools-2.2/setuptools/version.py +++ /dev/null @@ -1 +0,0 @@ -__version__ = '2.2' diff --git a/libs/setuptools-2.2/tests/api_tests.txt b/libs/setuptools-2.2/tests/api_tests.txt deleted file mode 100644 index d34f231..0000000 --- a/libs/setuptools-2.2/tests/api_tests.txt +++ /dev/null @@ -1,424 +0,0 @@ -Pluggable Distributions of Python Software -========================================== - -Distributions -------------- - -A "Distribution" is a collection of files that represent a "Release" of a -"Project" as of a particular point in time, denoted by a -"Version":: - - >>> import sys, pkg_resources - >>> from pkg_resources import Distribution - >>> Distribution(project_name="Foo", version="1.2") - Foo 1.2 - -Distributions have a location, which can be a filename, URL, or really anything -else you care to use:: - - >>> dist = Distribution( - ... location="http://example.com/something", - ... project_name="Bar", version="0.9" - ... ) - - >>> dist - Bar 0.9 (http://example.com/something) - - -Distributions have various introspectable attributes:: - - >>> dist.location - 'http://example.com/something' - - >>> dist.project_name - 'Bar' - - >>> dist.version - '0.9' - - >>> dist.py_version == sys.version[:3] - True - - >>> print(dist.platform) - None - -Including various computed attributes:: - - >>> from pkg_resources import parse_version - >>> dist.parsed_version == parse_version(dist.version) - True - - >>> dist.key # case-insensitive form of the project name - 'bar' - -Distributions are compared (and hashed) by version first:: - - >>> Distribution(version='1.0') == Distribution(version='1.0') - True - >>> Distribution(version='1.0') == Distribution(version='1.1') - False - >>> Distribution(version='1.0') < Distribution(version='1.1') - True - -but also by project name (case-insensitive), platform, Python version, -location, etc.:: - - >>> Distribution(project_name="Foo",version="1.0") == \ - ... Distribution(project_name="Foo",version="1.0") - True - - >>> Distribution(project_name="Foo",version="1.0") == \ - ... Distribution(project_name="foo",version="1.0") - True - - >>> Distribution(project_name="Foo",version="1.0") == \ - ... Distribution(project_name="Foo",version="1.1") - False - - >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \ - ... Distribution(project_name="Foo",py_version="2.4",version="1.0") - False - - >>> Distribution(location="spam",version="1.0") == \ - ... Distribution(location="spam",version="1.0") - True - - >>> Distribution(location="spam",version="1.0") == \ - ... Distribution(location="baz",version="1.0") - False - - - -Hash and compare distribution by prio/plat - -Get version from metadata -provider capabilities -egg_name() -as_requirement() -from_location, from_filename (w/path normalization) - -Releases may have zero or more "Requirements", which indicate -what releases of another project the release requires in order to -function. A Requirement names the other project, expresses some criteria -as to what releases of that project are acceptable, and lists any "Extras" -that the requiring release may need from that project. (An Extra is an -optional feature of a Release, that can only be used if its additional -Requirements are satisfied.) - - - -The Working Set ---------------- - -A collection of active distributions is called a Working Set. Note that a -Working Set can contain any importable distribution, not just pluggable ones. -For example, the Python standard library is an importable distribution that -will usually be part of the Working Set, even though it is not pluggable. -Similarly, when you are doing development work on a project, the files you are -editing are also a Distribution. (And, with a little attention to the -directory names used, and including some additional metadata, such a -"development distribution" can be made pluggable as well.) - - >>> from pkg_resources import WorkingSet - -A working set's entries are the sys.path entries that correspond to the active -distributions. By default, the working set's entries are the items on -``sys.path``:: - - >>> ws = WorkingSet() - >>> ws.entries == sys.path - True - -But you can also create an empty working set explicitly, and add distributions -to it:: - - >>> ws = WorkingSet([]) - >>> ws.add(dist) - >>> ws.entries - ['http://example.com/something'] - >>> dist in ws - True - >>> Distribution('foo',version="") in ws - False - -And you can iterate over its distributions:: - - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -Adding the same distribution more than once is a no-op:: - - >>> ws.add(dist) - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -For that matter, adding multiple distributions for the same project also does -nothing, because a working set can only hold one active distribution per -project -- the first one added to it:: - - >>> ws.add( - ... Distribution( - ... 'http://example.com/something', project_name="Bar", - ... version="7.2" - ... ) - ... ) - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -You can append a path entry to a working set using ``add_entry()``:: - - >>> ws.entries - ['http://example.com/something'] - >>> ws.add_entry(pkg_resources.__file__) - >>> ws.entries - ['http://example.com/something', '...pkg_resources.py...'] - -Multiple additions result in multiple entries, even if the entry is already in -the working set (because ``sys.path`` can contain the same entry more than -once):: - - >>> ws.add_entry(pkg_resources.__file__) - >>> ws.entries - ['...example.com...', '...pkg_resources...', '...pkg_resources...'] - -And you can specify the path entry a distribution was found under, using the -optional second parameter to ``add()``:: - - >>> ws = WorkingSet([]) - >>> ws.add(dist,"foo") - >>> ws.entries - ['foo'] - -But even if a distribution is found under multiple path entries, it still only -shows up once when iterating the working set: - - >>> ws.add_entry(ws.entries[0]) - >>> list(ws) - [Bar 0.9 (http://example.com/something)] - -You can ask a WorkingSet to ``find()`` a distribution matching a requirement:: - - >>> from pkg_resources import Requirement - >>> print(ws.find(Requirement.parse("Foo==1.0"))) # no match, return None - None - - >>> ws.find(Requirement.parse("Bar==0.9")) # match, return distribution - Bar 0.9 (http://example.com/something) - -Note that asking for a conflicting version of a distribution already in a -working set triggers a ``pkg_resources.VersionConflict`` error: - - >>> try: - ... ws.find(Requirement.parse("Bar==1.0")) - ... except pkg_resources.VersionConflict: - ... exc = sys.exc_info()[1] - ... print(str(exc)) - ... else: - ... raise AssertionError("VersionConflict was not raised") - (Bar 0.9 (http://example.com/something), Requirement.parse('Bar==1.0')) - -You can subscribe a callback function to receive notifications whenever a new -distribution is added to a working set. The callback is immediately invoked -once for each existing distribution in the working set, and then is called -again for new distributions added thereafter:: - - >>> def added(dist): print("Added %s" % dist) - >>> ws.subscribe(added) - Added Bar 0.9 - >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") - >>> ws.add(foo12) - Added Foo 1.2 - -Note, however, that only the first distribution added for a given project name -will trigger a callback, even during the initial ``subscribe()`` callback:: - - >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") - >>> ws.add(foo14) # no callback, because Foo 1.2 is already active - - >>> ws = WorkingSet([]) - >>> ws.add(foo12) - >>> ws.add(foo14) - >>> ws.subscribe(added) - Added Foo 1.2 - -And adding a callback more than once has no effect, either:: - - >>> ws.subscribe(added) # no callbacks - - # and no double-callbacks on subsequent additions, either - >>> just_a_test = Distribution(project_name="JustATest", version="0.99") - >>> ws.add(just_a_test) - Added JustATest 0.99 - - -Finding Plugins ---------------- - -``WorkingSet`` objects can be used to figure out what plugins in an -``Environment`` can be loaded without any resolution errors:: - - >>> from pkg_resources import Environment - - >>> plugins = Environment([]) # normally, a list of plugin directories - >>> plugins.add(foo12) - >>> plugins.add(foo14) - >>> plugins.add(just_a_test) - -In the simplest case, we just get the newest version of each distribution in -the plugin environment:: - - >>> ws = WorkingSet([]) - >>> ws.find_plugins(plugins) - ([JustATest 0.99, Foo 1.4 (f14)], {}) - -But if there's a problem with a version conflict or missing requirements, the -method falls back to older versions, and the error info dict will contain an -exception instance for each unloadable plugin:: - - >>> ws.add(foo12) # this will conflict with Foo 1.4 - >>> ws.find_plugins(plugins) - ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)}) - -But if you disallow fallbacks, the failed plugin will be skipped instead of -trying older versions:: - - >>> ws.find_plugins(plugins, fallback=False) - ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)}) - - - -Platform Compatibility Rules ----------------------------- - -On the Mac, there are potential compatibility issues for modules compiled -on newer versions of Mac OS X than what the user is running. Additionally, -Mac OS X will soon have two platforms to contend with: Intel and PowerPC. - -Basic equality works as on other platforms:: - - >>> from pkg_resources import compatible_platforms as cp - >>> reqd = 'macosx-10.4-ppc' - >>> cp(reqd, reqd) - True - >>> cp("win32", reqd) - False - -Distributions made on other machine types are not compatible:: - - >>> cp("macosx-10.4-i386", reqd) - False - -Distributions made on earlier versions of the OS are compatible, as -long as they are from the same top-level version. The patchlevel version -number does not matter:: - - >>> cp("macosx-10.4-ppc", reqd) - True - >>> cp("macosx-10.3-ppc", reqd) - True - >>> cp("macosx-10.5-ppc", reqd) - False - >>> cp("macosx-9.5-ppc", reqd) - False - -Backwards compatibility for packages made via earlier versions of -setuptools is provided as well:: - - >>> cp("darwin-8.2.0-Power_Macintosh", reqd) - True - >>> cp("darwin-7.2.0-Power_Macintosh", reqd) - True - >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc") - False - - -Environment Markers -------------------- - - >>> from pkg_resources import invalid_marker as im, evaluate_marker as em - >>> import os - - >>> print(im("sys_platform")) - Comparison or logical expression expected - - >>> print(im("sys_platform==")) - invalid syntax - - >>> print(im("sys_platform=='win32'")) - False - - >>> print(im("sys=='x'")) - Unknown name 'sys' - - >>> print(im("(extra)")) - Comparison or logical expression expected - - >>> print(im("(extra")) - invalid syntax - - >>> print(im("os.open('foo')=='y'")) - Language feature not supported in environment markers - - >>> print(im("'x'=='y' and os.open('foo')=='y'")) # no short-circuit! - Language feature not supported in environment markers - - >>> print(im("'x'=='x' or os.open('foo')=='y'")) # no short-circuit! - Language feature not supported in environment markers - - >>> print(im("'x' < 'y'")) - '<' operator not allowed in environment markers - - >>> print(im("'x' < 'y' < 'z'")) - Chained comparison not allowed in environment markers - - >>> print(im("r'x'=='x'")) - Only plain strings allowed in environment markers - - >>> print(im("'''x'''=='x'")) - Only plain strings allowed in environment markers - - >>> print(im('"""x"""=="x"')) - Only plain strings allowed in environment markers - - >>> print(im(r"'x\n'=='x'")) - Only plain strings allowed in environment markers - - >>> print(im("os.open=='y'")) - Language feature not supported in environment markers - - >>> em('"x"=="x"') - True - - >>> em('"x"=="y"') - False - - >>> em('"x"=="y" and "x"=="x"') - False - - >>> em('"x"=="y" or "x"=="x"') - True - - >>> em('"x"=="y" and "x"=="q" or "z"=="z"') - True - - >>> em('"x"=="y" and ("x"=="q" or "z"=="z")') - False - - >>> em('"x"=="y" and "z"=="z" or "x"=="q"') - False - - >>> em('"x"=="x" and "z"=="z" or "x"=="q"') - True - - >>> em("sys_platform=='win32'") == (sys.platform=='win32') - True - - >>> em("'x' in 'yx'") - True - - >>> em("'yx' in 'x'") - False - - - - diff --git a/libs/setuptools-2.2/tests/manual_test.py b/libs/setuptools-2.2/tests/manual_test.py deleted file mode 100644 index e6489b1..0000000 --- a/libs/setuptools-2.2/tests/manual_test.py +++ /dev/null @@ -1,93 +0,0 @@ -#!/usr/bin/env python - -import sys -import os -import shutil -import tempfile -import subprocess -from distutils.command.install import INSTALL_SCHEMES -from string import Template -from setuptools.compat import urlopen - -def _system_call(*args): - assert subprocess.call(args) == 0 - -def tempdir(func): - def _tempdir(*args, **kwargs): - test_dir = tempfile.mkdtemp() - old_dir = os.getcwd() - os.chdir(test_dir) - try: - return func(*args, **kwargs) - finally: - os.chdir(old_dir) - shutil.rmtree(test_dir) - return _tempdir - -SIMPLE_BUILDOUT = """\ -[buildout] - -parts = eggs - -[eggs] -recipe = zc.recipe.egg - -eggs = - extensions -""" - -BOOTSTRAP = 'http://downloads.buildout.org/1/bootstrap.py' -PYVER = sys.version.split()[0][:3] - -_VARS = {'base': '.', - 'py_version_short': PYVER} - -if sys.platform == 'win32': - PURELIB = INSTALL_SCHEMES['nt']['purelib'] -else: - PURELIB = INSTALL_SCHEMES['unix_prefix']['purelib'] - - -@tempdir -def test_virtualenv(): - """virtualenv with setuptools""" - purelib = os.path.abspath(Template(PURELIB).substitute(**_VARS)) - _system_call('virtualenv', '--no-site-packages', '.') - _system_call('bin/easy_install', 'setuptools==dev') - # linux specific - site_pkg = os.listdir(purelib) - site_pkg.sort() - assert 'setuptools' in site_pkg[0] - easy_install = os.path.join(purelib, 'easy-install.pth') - with open(easy_install) as f: - res = f.read() - assert 'setuptools' in res - -@tempdir -def test_full(): - """virtualenv + pip + buildout""" - _system_call('virtualenv', '--no-site-packages', '.') - _system_call('bin/easy_install', '-q', 'setuptools==dev') - _system_call('bin/easy_install', '-qU', 'setuptools==dev') - _system_call('bin/easy_install', '-q', 'pip') - _system_call('bin/pip', 'install', '-q', 'zc.buildout') - - with open('buildout.cfg', 'w') as f: - f.write(SIMPLE_BUILDOUT) - - with open('bootstrap.py', 'w') as f: - f.write(urlopen(BOOTSTRAP).read()) - - _system_call('bin/python', 'bootstrap.py') - _system_call('bin/buildout', '-q') - eggs = os.listdir('eggs') - eggs.sort() - assert len(eggs) == 3 - assert eggs[1].startswith('setuptools') - del eggs[1] - assert eggs == ['extensions-0.3-py2.6.egg', - 'zc.recipe.egg-1.2.2-py2.6.egg'] - -if __name__ == '__main__': - test_virtualenv() - test_full() diff --git a/libs/setuptools-2.2/tests/shlib_test/hello.c b/libs/setuptools-2.2/tests/shlib_test/hello.c deleted file mode 100644 index 9998372..0000000 --- a/libs/setuptools-2.2/tests/shlib_test/hello.c +++ /dev/null @@ -1,168 +0,0 @@ -/* Generated by Pyrex 0.9.3 on Thu Jan 05 17:47:12 2006 */ - -#include "Python.h" -#include "structmember.h" -#ifndef PY_LONG_LONG - #define PY_LONG_LONG LONG_LONG -#endif - - -typedef struct {PyObject **p; char *s;} __Pyx_InternTabEntry; /*proto*/ -typedef struct {PyObject **p; char *s; long n;} __Pyx_StringTabEntry; /*proto*/ -static PyObject *__Pyx_UnpackItem(PyObject *, int); /*proto*/ -static int __Pyx_EndUnpack(PyObject *, int); /*proto*/ -static int __Pyx_PrintItem(PyObject *); /*proto*/ -static int __Pyx_PrintNewline(void); /*proto*/ -static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/ -static void __Pyx_ReRaise(void); /*proto*/ -static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list); /*proto*/ -static PyObject *__Pyx_GetExcValue(void); /*proto*/ -static int __Pyx_ArgTypeTest(PyObject *obj, PyTypeObject *type, int none_allowed, char *name); /*proto*/ -static int __Pyx_TypeTest(PyObject *obj, PyTypeObject *type); /*proto*/ -static int __Pyx_GetStarArgs(PyObject **args, PyObject **kwds, char *kwd_list[], int nargs, PyObject **args2, PyObject **kwds2); /*proto*/ -static void __Pyx_WriteUnraisable(char *name); /*proto*/ -static void __Pyx_AddTraceback(char *funcname); /*proto*/ -static PyTypeObject *__Pyx_ImportType(char *module_name, char *class_name, long size); /*proto*/ -static int __Pyx_SetVtable(PyObject *dict, void *vtable); /*proto*/ -static int __Pyx_GetVtable(PyObject *dict, void *vtabptr); /*proto*/ -static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name, char *modname); /*proto*/ -static int __Pyx_InternStrings(__Pyx_InternTabEntry *t); /*proto*/ -static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/ -static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/ - -static PyObject *__pyx_m; -static PyObject *__pyx_b; -static int __pyx_lineno; -static char *__pyx_filename; -staticforward char **__pyx_f; - -/* Declarations from hello */ - -char (*(get_hello_msg(void))); /*proto*/ - -/* Implementation of hello */ - -static PyObject *__pyx_n_hello; - -static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/ -static PyObject *__pyx_f_5hello_hello(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) { - PyObject *__pyx_r; - PyObject *__pyx_1 = 0; - static char *__pyx_argnames[] = {0}; - if (!PyArg_ParseTupleAndKeywords(__pyx_args, __pyx_kwds, "", __pyx_argnames)) return 0; - - /* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":4 */ - __pyx_1 = PyString_FromString(get_hello_msg()); if (!__pyx_1) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 4; goto __pyx_L1;} - __pyx_r = __pyx_1; - __pyx_1 = 0; - goto __pyx_L0; - - __pyx_r = Py_None; Py_INCREF(__pyx_r); - goto __pyx_L0; - __pyx_L1:; - Py_XDECREF(__pyx_1); - __Pyx_AddTraceback("hello.hello"); - __pyx_r = 0; - __pyx_L0:; - return __pyx_r; -} - -static __Pyx_InternTabEntry __pyx_intern_tab[] = { - {&__pyx_n_hello, "hello"}, - {0, 0} -}; - -static struct PyMethodDef __pyx_methods[] = { - {"hello", (PyCFunction)__pyx_f_5hello_hello, METH_VARARGS|METH_KEYWORDS, 0}, - {0, 0, 0, 0} -}; - -DL_EXPORT(void) inithello(void); /*proto*/ -DL_EXPORT(void) inithello(void) { - __pyx_m = Py_InitModule4("hello", __pyx_methods, 0, 0, PYTHON_API_VERSION); - if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;}; - __pyx_b = PyImport_AddModule("__builtin__"); - if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;}; - if (PyObject_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;}; - if (__Pyx_InternStrings(__pyx_intern_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; goto __pyx_L1;}; - - /* "C:\cygwin\home\pje\setuptools\tests\shlib_test\hello.pyx":3 */ - return; - __pyx_L1:; - __Pyx_AddTraceback("hello"); -} - -static char *__pyx_filenames[] = { - "hello.pyx", -}; -statichere char **__pyx_f = __pyx_filenames; - -/* Runtime support code */ - -static int __Pyx_InternStrings(__Pyx_InternTabEntry *t) { - while (t->p) { - *t->p = PyString_InternFromString(t->s); - if (!*t->p) - return -1; - ++t; - } - return 0; -} - -#include "compile.h" -#include "frameobject.h" -#include "traceback.h" - -static void __Pyx_AddTraceback(char *funcname) { - PyObject *py_srcfile = 0; - PyObject *py_funcname = 0; - PyObject *py_globals = 0; - PyObject *empty_tuple = 0; - PyObject *empty_string = 0; - PyCodeObject *py_code = 0; - PyFrameObject *py_frame = 0; - - py_srcfile = PyString_FromString(__pyx_filename); - if (!py_srcfile) goto bad; - py_funcname = PyString_FromString(funcname); - if (!py_funcname) goto bad; - py_globals = PyModule_GetDict(__pyx_m); - if (!py_globals) goto bad; - empty_tuple = PyTuple_New(0); - if (!empty_tuple) goto bad; - empty_string = PyString_FromString(""); - if (!empty_string) goto bad; - py_code = PyCode_New( - 0, /*int argcount,*/ - 0, /*int nlocals,*/ - 0, /*int stacksize,*/ - 0, /*int flags,*/ - empty_string, /*PyObject *code,*/ - empty_tuple, /*PyObject *consts,*/ - empty_tuple, /*PyObject *names,*/ - empty_tuple, /*PyObject *varnames,*/ - empty_tuple, /*PyObject *freevars,*/ - empty_tuple, /*PyObject *cellvars,*/ - py_srcfile, /*PyObject *filename,*/ - py_funcname, /*PyObject *name,*/ - __pyx_lineno, /*int firstlineno,*/ - empty_string /*PyObject *lnotab*/ - ); - if (!py_code) goto bad; - py_frame = PyFrame_New( - PyThreadState_Get(), /*PyThreadState *tstate,*/ - py_code, /*PyCodeObject *code,*/ - py_globals, /*PyObject *globals,*/ - 0 /*PyObject *locals*/ - ); - if (!py_frame) goto bad; - py_frame->f_lineno = __pyx_lineno; - PyTraceBack_Here(py_frame); -bad: - Py_XDECREF(py_srcfile); - Py_XDECREF(py_funcname); - Py_XDECREF(empty_tuple); - Py_XDECREF(empty_string); - Py_XDECREF(py_code); - Py_XDECREF(py_frame); -} diff --git a/libs/setuptools-2.2/tests/shlib_test/hello.pyx b/libs/setuptools-2.2/tests/shlib_test/hello.pyx deleted file mode 100644 index 58ce691..0000000 --- a/libs/setuptools-2.2/tests/shlib_test/hello.pyx +++ /dev/null @@ -1,4 +0,0 @@ -cdef extern char *get_hello_msg() - -def hello(): - return get_hello_msg() diff --git a/libs/setuptools-2.2/tests/shlib_test/hellolib.c b/libs/setuptools-2.2/tests/shlib_test/hellolib.c deleted file mode 100644 index 88d65ce..0000000 --- a/libs/setuptools-2.2/tests/shlib_test/hellolib.c +++ /dev/null @@ -1,3 +0,0 @@ -extern char* get_hello_msg() { - return "Hello, world!"; -} diff --git a/libs/setuptools-2.2/tests/shlib_test/setup.py b/libs/setuptools-2.2/tests/shlib_test/setup.py deleted file mode 100644 index b0c9399..0000000 --- a/libs/setuptools-2.2/tests/shlib_test/setup.py +++ /dev/null @@ -1,10 +0,0 @@ -from setuptools import setup, Extension, Library - -setup( - name="shlib_test", - ext_modules = [ - Library("hellolib", ["hellolib.c"]), - Extension("hello", ["hello.pyx"], libraries=["hellolib"]) - ], - test_suite="test_hello.HelloWorldTest", -) diff --git a/libs/setuptools-2.2/tests/shlib_test/test_hello.py b/libs/setuptools-2.2/tests/shlib_test/test_hello.py deleted file mode 100644 index 6da02e3..0000000 --- a/libs/setuptools-2.2/tests/shlib_test/test_hello.py +++ /dev/null @@ -1,7 +0,0 @@ -from unittest import TestCase - -class HelloWorldTest(TestCase): - def testHelloMsg(self): - from hello import hello - self.assertEqual(hello(), "Hello, world!") - diff --git a/libs/setuptools-2.2/tests/test_ez_setup.py b/libs/setuptools-2.2/tests/test_ez_setup.py deleted file mode 100644 index 26881f5..0000000 --- a/libs/setuptools-2.2/tests/test_ez_setup.py +++ /dev/null @@ -1,63 +0,0 @@ -import sys -import os -import tempfile -import unittest -import shutil -import copy - -CURDIR = os.path.abspath(os.path.dirname(__file__)) -TOPDIR = os.path.split(CURDIR)[0] -sys.path.insert(0, TOPDIR) - -from ez_setup import (use_setuptools, _build_egg, _python_cmd, _do_download, - _install, DEFAULT_URL, DEFAULT_VERSION) -import ez_setup - -class TestSetup(unittest.TestCase): - - def urlopen(self, url): - return open(self.tarball, 'rb') - - def setUp(self): - self.old_sys_path = copy.copy(sys.path) - self.cwd = os.getcwd() - self.tmpdir = tempfile.mkdtemp() - os.chdir(TOPDIR) - _python_cmd("setup.py", "-q", "egg_info", "-RDb", "''", "sdist", - "--dist-dir", "%s" % self.tmpdir) - tarball = os.listdir(self.tmpdir)[0] - self.tarball = os.path.join(self.tmpdir, tarball) - from setuptools.compat import urllib2 - urllib2.urlopen = self.urlopen - - def tearDown(self): - shutil.rmtree(self.tmpdir) - os.chdir(self.cwd) - sys.path = copy.copy(self.old_sys_path) - - def test_build_egg(self): - # making it an egg - egg = _build_egg('Egg to be built', self.tarball, self.tmpdir) - - # now trying to import it - sys.path[0] = egg - import setuptools - self.assertTrue(setuptools.__file__.startswith(egg)) - - def test_do_download(self): - tmpdir = tempfile.mkdtemp() - _do_download(DEFAULT_VERSION, DEFAULT_URL, tmpdir, 1) - import setuptools - self.assertTrue(setuptools.bootstrap_install_from.startswith(tmpdir)) - - def test_install(self): - def _faked(*args): - return True - ez_setup.python_cmd = _faked - _install(self.tarball) - - def test_use_setuptools(self): - self.assertEqual(use_setuptools(), None) - -if __name__ == '__main__': - unittest.main() diff --git a/libs/setuptools-2.2/tests/test_pkg_resources.py b/libs/setuptools-2.2/tests/test_pkg_resources.py deleted file mode 100644 index dfa2712..0000000 --- a/libs/setuptools-2.2/tests/test_pkg_resources.py +++ /dev/null @@ -1,74 +0,0 @@ -import sys -import tempfile -import os -import zipfile - -import pkg_resources - -try: - unicode -except NameError: - unicode = str - -class EggRemover(unicode): - def __call__(self): - if self in sys.path: - sys.path.remove(self) - if os.path.exists(self): - os.remove(self) - -class TestZipProvider(object): - finalizers = [] - - @classmethod - def setup_class(cls): - "create a zip egg and add it to sys.path" - egg = tempfile.NamedTemporaryFile(suffix='.egg', delete=False) - zip_egg = zipfile.ZipFile(egg, 'w') - zip_info = zipfile.ZipInfo() - zip_info.filename = 'mod.py' - zip_info.date_time = 2013, 5, 12, 13, 25, 0 - zip_egg.writestr(zip_info, 'x = 3\n') - zip_info = zipfile.ZipInfo() - zip_info.filename = 'data.dat' - zip_info.date_time = 2013, 5, 12, 13, 25, 0 - zip_egg.writestr(zip_info, 'hello, world!') - zip_egg.close() - egg.close() - - sys.path.append(egg.name) - cls.finalizers.append(EggRemover(egg.name)) - - @classmethod - def teardown_class(cls): - for finalizer in cls.finalizers: - finalizer() - - def test_resource_filename_rewrites_on_change(self): - """ - If a previous call to get_resource_filename has saved the file, but - the file has been subsequently mutated with different file of the - same size and modification time, it should not be overwritten on a - subsequent call to get_resource_filename. - """ - import mod - manager = pkg_resources.ResourceManager() - zp = pkg_resources.ZipProvider(mod) - filename = zp.get_resource_filename(manager, 'data.dat') - assert os.stat(filename).st_mtime == 1368379500 - f = open(filename, 'w') - f.write('hello, world?') - f.close() - os.utime(filename, (1368379500, 1368379500)) - filename = zp.get_resource_filename(manager, 'data.dat') - f = open(filename) - assert f.read() == 'hello, world!' - manager.cleanup_resources() - -class TestResourceManager(object): - def test_get_cache_path(self): - mgr = pkg_resources.ResourceManager() - path = mgr.get_cache_path('foo') - type_ = str(type(path)) - message = "Unexpected type from get_cache_path: " + type_ - assert isinstance(path, (unicode, str)), message diff --git a/meetings/nova-bug-scrub-meeting.yaml b/meetings/nova-bug-scrub-meeting.yaml index 1539757..1d770e0 100644 --- a/meetings/nova-bug-scrub-meeting.yaml +++ b/meetings/nova-bug-scrub-meeting.yaml @@ -1,13 +1,17 @@ -project: 'Nova Bug Scrub Meeting' -uuid: +project: Nova Bug Scrub Meeting schedule: - - time: '1630 UTC' - day: 'Wednesday' - irc: 'openstack-meeting-3' - period: 'weekly' -chair: 'Tracy Jones' -description: 'This is a weekly meeting review Nova Bugs. The initial focus will be on triaging bugs in the New state and ensuring that critical bugs are making progress. Once the New queue is under control we will switch our focus to reviewing incomplete bugs, removing assignees from stale bugs so others can pick them up (especially low hanging fruit)' + - time: '1630' + day: Wednesday + irc: openstack-meeting-3 + frequency: weekly +chair: Tracy Jones +description: > + This is a weekly meeting review Nova Bugs. The initial focus will be on + triaging bugs in the New state and ensuring that critical bugs are making + progress. Once the New queue is under control we will switch our focus to + reviewing incomplete bugs, removing assignees from stale bugs so others can + pick them up (especially low hanging fruit) agenda: - - 'triage new' - - 'review critical' - - 'open discussion' \ No newline at end of file + - triage new + - review critical + - open discussion diff --git a/meetings/nova-team-meeting.yaml b/meetings/nova-team-meeting.yaml index d6762d2..9259a42 100644 --- a/meetings/nova-team-meeting.yaml +++ b/meetings/nova-team-meeting.yaml @@ -1,20 +1,22 @@ -project: N'ova Team Meeting' -uuid: +project: Nova Team Meeting schedule: - - time: '1400 UTC' - day: 'Thursday' - irc: 'openstack-meeting-alt' - period: 'weekly' + - time: '1400' + day: Thursday + irc: openstack-meeting-alt + frequency: weekly - - time: '2100 UTC' - day: 'Thursday' - irc: 'openstack-meeting' - period: 'weekly' -chair: 'Russell Bryant' -description: 'This meeting is a weekly gathering of developers working on OpenStack Compute (Nova). We cover topics such as release planning and status, bugs, reviews, and other current topics worthy of real-time discussion.' + - time: '2100' + day: Thursday + irc: openstack-meeting + frequency: weekly +chair: Russell Bryant +description: > + This meeting is a weekly gathering of developers working on OpenStack Compute + (Nova). We cover topics such as release planning and status, bugs, reviews, + and other current topics worthy of real-time discussion. agenda: - - 'general annoucement' - - 'sub-teams' - - 'bugs' - - 'blueprints' - - 'open discussion' \ No newline at end of file + - general annoucement + - sub-teams + - bugs + - blueprints + - open discussion diff --git a/meetings/openstack-project-and-release-status-meeting.yaml b/meetings/openstack-project-and-release-status-meeting.yaml index 622367f..c660e81 100644 --- a/meetings/openstack-project-and-release-status-meeting.yaml +++ b/meetings/openstack-project-and-release-status-meeting.yaml @@ -1,14 +1,15 @@ -project: 'OpenStack Project and Release Status Meeting' -uuid: +project: OpenStack Project and Release Status Meeting schedule: - - time: '2100 UTC' - day: 'Tuesday' - irc: 'openstack-meeting' - period: 'weekly' -chair: 'Thierry Carrez' -description: 'The whole OpenStack Team holds a public weekly Project and Release Status meeting. Everyome is encouraged to attend.' + - time: '2100' + day: Tuesday + irc: openstack-meeting + frequency: weekly +chair: Thierry Carrez +description: > + The whole OpenStack Team holds a public weekly Project and Release Status + meeting. Everyome is encouraged to attend. agenda: - - 'icehouse-3 progress' - - 'red flag district (or blocked blueprints)' - - 'incubated projects' - - 'open discussion' \ No newline at end of file + - icehouse-3 progress + - red flag district (or blocked blueprints) + - incubated projects + - open discussion diff --git a/meetings/technical-committee-meeting.yaml b/meetings/technical-committee-meeting.yaml index 940f2e8..c32058e 100644 --- a/meetings/technical-committee-meeting.yaml +++ b/meetings/technical-committee-meeting.yaml @@ -1,18 +1,22 @@ -project: 'Technical Committee Meeting' -uuid: +project: Technical Committee Meeting schedule: - - time: '2000 UTC' - day: 'Tuesday' - irc: 'openstack-meeting' - period: 'weekly' -chair: 'Thierry Carrez' -description: 'The OpenStack Technical Committee is one of the governing bodies of the OpenStack project. It is an elected group that represents the contributors to the project, and has oversight on all technical matters.' + - time: '2000' + day: Tuesday + irc: openstack-meeting + frequency: weekly +chair: Thierry Carrez +description: > + The OpenStack Technical Committee is one of the governing bodies of the + OpenStack project. It is an elected group that represents the contributors + to the project, and has oversight on all technical matters. agenda: - - 'progress on DefCore feedback' - - 'creating key distribution service (KDS) under identity program' - - 'integrated projects and new requirements: Neutron' - - minor-govermance-changes: - - 'add oslo.test to the Oslo program' - - 'add Infrastructure Program mission' - - 'Oslo program changes (oslo.vmware addition)' - - 'open discussion' \ No newline at end of file + - progress on DefCore feedback + - creating key distribution service (KDS) under identity program + - 'integrated projects and new requirements: Neutron' + - + - minor govermance changes + - + - add oslo.test to the Oslo program + - add Infrastructure Program mission + - Oslo program changes (oslo.vmware addition) + - open discussion diff --git a/meetings/xenapi-team-meeting.yaml b/meetings/xenapi-team-meeting.yaml index 5e5de9d..f0c27ed 100644 --- a/meetings/xenapi-team-meeting.yaml +++ b/meetings/xenapi-team-meeting.yaml @@ -1,17 +1,22 @@ -project: 'XenAPI Team Meeting' -uuid: +project: XenAPI Team Meeting schedule: - - time: '1500 UTC' - day: 'Wednesday' - irc: 'openstack-meeting' - period: 'weekly' -chair: 'John Garbutt' -description: 'Meeting to discuss the status of Blueprints and bugs relating to the XenAPI layer.' + - time: '1500' + day: Wednesday + irc: openstack-meeting + frequency: weekly +chair: John Garbutt +description: > + Meeting to discuss the status of Blueprints and bugs relating to the XenAPI + layer. agenda: - - 'actions from last meeting' - - blueprints: - - 'discuss Icehouse-2 progress' - - bugs-and-QA: - - 'https://bugs.launchpad.net/nova/+bugs?field.tag=xenserver' - - 'gating progress' - - 'open discussion' \ No newline at end of file + - actions from last meeting + - + - blueprints + - + - discuss Icehouse-2 progress + - + - bugs and QA + - + - 'https://bugs.launchpad.net/nova/+bugs?field.tag=xenserver' + - gating progress + - open discussion diff --git a/src/jobs.py b/src/jobs.py index 3f6f06c..2107379 100644 --- a/src/jobs.py +++ b/src/jobs.py @@ -3,19 +3,30 @@ import icalendar import pprint import sys import os +import uuid from meeting import Meeting +import logging +# logging settings +logging.basicConfig(format='%(asctime)s - %(levelname)s - %(message)s', level=logging.DEBUG) + +yaml_dir = '../meetings' +ical_dir = '../icals' +publish_url = '127.0.0.1' + +# should we make execute_gate(), etc. static methods instead? class MeetingJobs: """Executes post, gate, and check jobs.""" - yaml_dir = '../meetings' - publish_url = '127.0.0.1' - def execute_check(self): - meetings = self.create_meetings(self.yaml_dir) - meetings_display = "\n".join([m.display() for m in meetings]) - print(meetings_display) # testing purpose + logging.info('Check job initiated.') + meetings = self.load_meetings(yaml_dir) + # now convert meetings to a list of ical + for m in meetings: + m.write_ical() + logging.info('Wrote %d meetings to iCal' % (len(meetings))) + logging.info('Check job finished.') def execute_gate(self): pass @@ -23,17 +34,18 @@ class MeetingJobs: def execute_post(self): pass - def create_meetings(self, yaml_dir): + def load_meetings(self, yaml_dir): os.chdir(yaml_dir) - meetings_yaml = [yaml.load(open(f, 'r')) for f in os.listdir() if os.path.isfile(f) and ".yaml" in f] - meetings = [Meeting(y) for y in meetings_yaml] + meetings_yaml = [f for f in os.listdir() if os.path.isfile(f) and f.endswith('yaml')] + meetings = [Meeting(yaml.load(open(f, 'r')), f) for f in meetings_yaml] + logging.info('Loaded %d meetings from YAML' % (len(meetings))) return meetings def pprint_yaml(): """For now, this is a simple script to import all the yaml files and pretty print it.""" # change the current directory to the meetings directory where all the yaml files are located - os.chdir('../meetings/') + os.chdir(yaml_dir) # get a list of all the yaml files meetings = [yaml.load(open(f, 'r')) for f in os.listdir() if os.path.isfile(f) and ".yaml" in f] @@ -42,6 +54,5 @@ def pprint_yaml(): print(yaml.dump(m)) # entry point -#pprint_yaml() jobs = MeetingJobs() jobs.execute_check() diff --git a/src/meeting.py b/src/meeting.py index e9e162b..d953436 100644 --- a/src/meeting.py +++ b/src/meeting.py @@ -1,31 +1,99 @@ import pprint +import pytz +import icalendar +import datetime +import time +import os +import yaml +import logging + +weekdays = { + 'Monday' : 0 + , 'Tuesday' : 1 + , 'Wednesday' : 2 + , 'Thursday' : 3 + , 'Friday' : 4 + , 'Saturday' : 5 + , 'Sunday' : 6 + } class Meeting: """An OpenStack meeting.""" - def __init__(self, yaml): + def __init__(self, yaml, filename): + + self.filename = filename # create yaml object from yaml file. use it initialize following fields. self.project = yaml['project'] self.chair = yaml['chair'] self.description = yaml['description'] - self.agenda = pprint.pformat(yaml['agenda']) # this is a list of topics + self.agenda = yaml['agenda'] # this is a list of list of topics - # create schedule object - schedule = yaml['schedule'][0] - self.schedule = Schedule(schedule['time'], schedule['day'], schedule['irc'], schedule['period']) + # create schedule objects + self.schedules = [Schedule(s) for s in yaml['schedule']] - def display(self): - return "project:\t%s\nchair:\t%s\ndescription:\t%s\nagenda:\t%s\nschedule:\t%s" % (self.project, self.chair, self.description, self.agenda, self.schedule.display()) + def write_ical(self): + cal = icalendar.Calendar() + + # add properties to ensure compliance + cal.add('prodid', '-//OpenStack//Gerrit-Powered Meeting Agendas//EN') + cal.add('version', '2.0') + + i = 1 + for s in self.schedules: + # one Event per iCal file + event = icalendar.Event() + # I think the summary field needs to be unique per event in an ical file (at least, for it to work with Google Calendar) + event.add('summary', self.project + ' ' + str(i)) + + # add ical description (meeting description, irc, agenda, chair, etc.) + ical_descript = "Project: %s\nChair: %s\nIRC: %s\nAgenda:\n%s\n\nDescription: %s" % (self.project, self.chair, s.irc, yaml.dump(self.agenda, default_flow_style=False), self.description) + event.add('description', ical_descript) + + # get starting date + d = datetime.datetime.utcnow() + next_meeting = next_weekday(d, weekdays[s.day]) # 0 = Monday, 1=Tuesday, 2=Wednesday... + + next_meeting_dt = datetime.datetime(next_meeting.year, next_meeting.month, next_meeting.day, s.time.hour, s.time.minute, tzinfo=pytz.utc) + event.add('dtstart', next_meeting_dt) + + # add recurrence rule + event.add('rrule', {'freq': s.freq}) + + # add meeting length + # TODO: figure out what to do for meeting length. doesn't seem to be specified for any of the openstack meetings + event.add('duration', datetime.timedelta(hours=1)) + + # add event to calendar + cal.add_component(event) + i += 1 + + # write ical files to disk + ical_dir = '../icals' + ical_filename = self.filename[:-4] + 'ics' + + if not os.path.exists(ical_dir): + os.makedirs(ical_dir) + os.chdir(ical_dir) + + with open(ical_filename, 'wb') as ics: + ics.write(cal.to_ical()) + + logging.info('\'%s\' processed. Contains %d events.' % (ical_filename, len(cal.subcomponents))) class Schedule: """A meeting schedule.""" - - def __init__(self, time, day, irc, period): - self.time = time - self.day = day - self.irc = irc - self.period = period - def display(self): - return "Schedule:\n\ttime:\t%s\n\tday:\t%s\n\tirc:\t%s\n\tperiod:\t%s\n" % (self.time, self.day, self.irc, self.period) + def __init__(self, sched_yaml): + self.time = datetime.datetime.strptime(sched_yaml['time'], '%H%M') + self.day = sched_yaml['day'] + self.irc = sched_yaml['irc'] + self.freq = sched_yaml['frequency'] + +# https://stackoverflow.com/questions/6558535/python-find-the-date-for-the-first-monday-after-a-given-a-date +def next_weekday(d, weekday): + days_ahead = weekday - d.weekday() + if days_ahead <= 0: # Target day already happened this week + days_ahead += 7 + return d + datetime.timedelta(days_ahead)