Pārlūkot izejas kodu

added new virtualenv

Nanak Tattyrek 10 gadi atpakaļ
vecāks
revīzija
96dbdb4d96
100 mainītis faili ar 10677 papildinājumiem un 0 dzēšanām
  1. 11 0
      venv/bin/wheel
  2. 40 0
      venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/DESCRIPTION.rst
  3. 62 0
      venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/METADATA
  4. 147 0
      venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/RECORD
  5. 5 0
      venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/WHEEL
  6. 0 0
      venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/metadata.json
  7. 1 0
      venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/top_level.txt
  8. 36 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/DESCRIPTION.rst
  9. 63 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/METADATA
  10. 53 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/RECORD
  11. 6 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/WHEEL
  12. 4 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/entry_points.txt
  13. 0 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/metadata.json
  14. 1 0
      venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/top_level.txt
  15. 101 0
      venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/DESCRIPTION.rst
  16. 121 0
      venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/METADATA
  17. 18 0
      venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/RECORD
  18. 5 0
      venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/WHEEL
  19. 0 0
      venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/metadata.json
  20. 1 0
      venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/top_level.txt
  21. 54 0
      venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/DESCRIPTION.rst
  22. 79 0
      venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/METADATA
  23. 93 0
      venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/RECORD
  24. 6 0
      venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/WHEEL
  25. 0 0
      venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/metadata.json
  26. 1 0
      venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/top_level.txt
  27. 3 0
      venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/DESCRIPTION.rst
  28. 16 0
      venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/METADATA
  29. 8 0
      venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/RECORD
  30. 5 0
      venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/WHEEL
  31. 0 0
      venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/metadata.json
  32. 1 0
      venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/top_level.txt
  33. 25 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/DESCRIPTION.rst
  34. 53 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/METADATA
  35. 461 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/RECORD
  36. 6 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/WHEEL
  37. 5 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/entry_points.txt
  38. 0 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/metadata.json
  39. 1 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/pbr.json
  40. 1 0
      venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/top_level.txt
  41. 11 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.py
  42. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyc
  43. 117 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py
  44. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyc
  45. 39 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.py
  46. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyc
  47. 18 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
  48. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyc
  49. 116 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
  50. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyc
  51. 41 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
  52. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyc
  53. 14 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.py
  54. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyc
  55. 299 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.py
  56. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyc
  57. 63 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py
  58. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyc
  59. 134 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.py
  60. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyc
  61. 184 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.py
  62. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyc
  63. 21 0
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.py
  64. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyc
  65. 2411 0
      venv/lib/python2.7/site-packages/pip/_vendor/ipaddress.py
  66. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyc
  67. 326 0
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py
  68. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyc
  69. 73 0
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py
  70. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyc
  71. 83 0
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py
  72. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyc
  73. 193 0
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py
  74. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyc
  75. 155 0
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py
  76. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyc
  77. 69 0
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py
  78. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyc
  79. 31 0
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py
  80. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.pyc
  81. 24 0
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py
  82. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.pyc
  83. 40 0
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py
  84. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.pyc
  85. 78 0
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py
  86. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.pyc
  87. 784 0
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py
  88. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.pyc
  89. 403 0
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/version.py
  90. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/packaging/version.pyc
  91. 3107 0
      venv/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py
  92. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyc
  93. 123 0
      venv/lib/python2.7/site-packages/pip/_vendor/progress/__init__.py
  94. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyc
  95. 86 0
      venv/lib/python2.7/site-packages/pip/_vendor/progress/bar.py
  96. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyc
  97. 49 0
      venv/lib/python2.7/site-packages/pip/_vendor/progress/counter.py
  98. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyc
  99. 92 0
      venv/lib/python2.7/site-packages/pip/_vendor/progress/helpers.py
  100. BIN
      venv/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyc

+ 11 - 0
venv/bin/wheel

@@ -0,0 +1,11 @@
+#!/Users/nanak/dev/dptapedbgui/venv/bin/python
+
+# -*- coding: utf-8 -*-
+import re
+import sys
+
+from wheel.tool import main
+
+if __name__ == '__main__':
+    sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
+    sys.exit(main())

+ 40 - 0
venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/DESCRIPTION.rst

@@ -0,0 +1,40 @@
+Flask
+-----
+
+Flask is a microframework for Python based on Werkzeug, Jinja 2 and good
+intentions. And before you ask: It's BSD licensed!
+
+Flask is Fun
+````````````
+
+.. code:: python
+
+    from flask import Flask
+    app = Flask(__name__)
+
+    @app.route("/")
+    def hello():
+        return "Hello World!"
+
+    if __name__ == "__main__":
+        app.run()
+
+And Easy to Setup
+`````````````````
+
+.. code:: bash
+
+    $ pip install Flask
+    $ python hello.py
+     * Running on http://localhost:5000/
+
+Links
+`````
+
+* `website <http://flask.pocoo.org/>`_
+* `documentation <http://flask.pocoo.org/docs/>`_
+* `development version
+  <http://github.com/mitsuhiko/flask/zipball/master#egg=Flask-dev>`_
+
+
+

+ 62 - 0
venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/METADATA

@@ -0,0 +1,62 @@
+Metadata-Version: 2.0
+Name: Flask
+Version: 0.10.1
+Summary: A microframework based on Werkzeug, Jinja2 and good intentions
+Home-page: http://github.com/mitsuhiko/flask/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+License: BSD
+Platform: any
+Classifier: Development Status :: 4 - Beta
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Requires-Dist: Werkzeug (>=0.7)
+Requires-Dist: Jinja2 (>=2.4)
+Requires-Dist: itsdangerous (>=0.21)
+
+Flask
+-----
+
+Flask is a microframework for Python based on Werkzeug, Jinja 2 and good
+intentions. And before you ask: It's BSD licensed!
+
+Flask is Fun
+````````````
+
+.. code:: python
+
+    from flask import Flask
+    app = Flask(__name__)
+
+    @app.route("/")
+    def hello():
+        return "Hello World!"
+
+    if __name__ == "__main__":
+        app.run()
+
+And Easy to Setup
+`````````````````
+
+.. code:: bash
+
+    $ pip install Flask
+    $ python hello.py
+     * Running on http://localhost:5000/
+
+Links
+`````
+
+* `website <http://flask.pocoo.org/>`_
+* `documentation <http://flask.pocoo.org/docs/>`_
+* `development version
+  <http://github.com/mitsuhiko/flask/zipball/master#egg=Flask-dev>`_
+
+
+

+ 147 - 0
venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/RECORD

@@ -0,0 +1,147 @@
+flask/__init__.py,sha256=f2QCGRK-AATcoOyDoSGPJJgnSHvNr-qdII-aqJpicYY,1674
+flask/_compat.py,sha256=ug9fCmzhMFE1ecCpZLvbHSTXluYCcLXt3_8SNwGNZyg,2164
+flask/app.py,sha256=Tr9IRAwdYmUQc6vMNE3ccpavtxQx2cmFXhzU7KSba7I,76782
+flask/blueprints.py,sha256=OBo5Mkl_y54l0xPuLptXz54CDvvRX5tApMKIXyvykTk,16320
+flask/config.py,sha256=NhTkQX2yFgPDZD-t3phxcS2kw6i2kA7RHgX0fDoHXbQ,6234
+flask/ctx.py,sha256=Ka_Ql-ZgRU7z9aVkET0_WwwTKguaUozg3ezDcm-tUKY,14266
+flask/debughelpers.py,sha256=iyJC--eX8ANvci4MsFbtwc_PkwVlbCrZGHCOnQsOHFs,3508
+flask/exthook.py,sha256=dfW3DBBI8LyPrwacwZN8q0u0Q8gBBCXtocJ7wBnvgAs,5087
+flask/globals.py,sha256=kNGf5b05C8AjTzZJhBUbbtgufgumOnAPk9g_LYh0Rv0,1137
+flask/helpers.py,sha256=roTv_nwOjhAhUWvW-Uz_Ex9SLA32mEx_7vfZagZjizA,33793
+flask/json.py,sha256=5d-FWvZfzun2JKpapE_fwk_-db0cYvrc5OO-MJsAdgk,8113
+flask/logging.py,sha256=EkiNIJbQyeUb8nipAURO8CQJwC4BESjkM_snQIRCeIc,1398
+flask/module.py,sha256=2J_pmW7jaFye1FtNTn5jfX-HxBqMc9EZzSLUXxiV-7c,1363
+flask/sessions.py,sha256=JU7E5KolMOyItlNNREwGv3RKBnmQW1vd4gHghrQwULI,13107
+flask/signals.py,sha256=DmcQfKzlPdLoY00fxf2BsH3SsDFiFSppCbX9Yfdv_ng,2140
+flask/templating.py,sha256=joMsvkTasZUJxdipA0BnrbfIMidzzBzPug-dIaW0Tzo,4707
+flask/testing.py,sha256=C8b-44Ro9hsFH5-eBksgjBWLrPznESmX9rdMFBkIbp4,5003
+flask/views.py,sha256=S2fRXcRsyy6FUKJ9tZh39LLH22vOeZvYMLZQKT1pc6Y,5642
+flask/wrappers.py,sha256=67pssdNN3I4dKSVUVru5HNUteAeF2V6_7qpLZoVwmiI,6709
+flask/ext/__init__.py,sha256=M97BrvCtJFgcWpt8Jo1qg2nLkQaGd-4whLkdHfi-rE8,842
+flask/testsuite/__init__.py,sha256=zCtem4AVtj6_GSTxCFUHElbH3g6LYY9ApIr7NVUWmGU,7022
+flask/testsuite/appctx.py,sha256=vt0c0gfqcUYXfJ7t2IRd3Gg0BzSaLKO2bx9d5Id7m6M,3116
+flask/testsuite/basic.py,sha256=zw2tDp2g3ptgUWfjwISAcnp5hBXGCj3Wu3jwAgMBRbI,43777
+flask/testsuite/blueprints.py,sha256=uLsqDu1AyTPeQJ6Ab1RHKPcYbo5s-7adSW5XSE1Fa0o,28089
+flask/testsuite/config.py,sha256=h64-JdPdlU6a7O8YfwAEriYZ30yAD1c_vjjYgyJ-JzA,11820
+flask/testsuite/deprecations.py,sha256=tsvmOrjcG1tV6vV4ySL5a7V4o77Tnf4wd_-yqVhUXIc,511
+flask/testsuite/examples.py,sha256=KyCnXuKNFfBEAyPl12a-etPF_PRufrJGwP09zTvl714,942
+flask/testsuite/ext.py,sha256=5jOofZijgqi5OmRV7YxB7RS-RY7HuSj3L90316yH_IY,5156
+flask/testsuite/helpers.py,sha256=O_pZrLMqnltQczhsvr2zf9doCz9Nc9Fp-H51Ktx7-hY,21973
+flask/testsuite/regression.py,sha256=QDMe2hkgu3lV8RaUhHsICciKWhSC6L2TaYe9qITlyL8,3198
+flask/testsuite/reqctx.py,sha256=EriP7GP23zt3zWKabTvEzldvp0EqRe--v8W-ZKwCz9k,5960
+flask/testsuite/signals.py,sha256=jclEzgHqRhPgM3uoxRbgmXIiFm_HWlrkqT5M0hBv5xw,4807
+flask/testsuite/subclassing.py,sha256=TT-nltJBy1Xi1BxcdztGZ3hmMq9h_nk1CVeSy9qBrXQ,1214
+flask/testsuite/templating.py,sha256=2lg2-MPMGkmlKap9M711uVXC08iZU8w3EtmTI2la_N8,11237
+flask/testsuite/testing.py,sha256=ihNMpcAxZ3wArSjUvFuCaHSREgjQNV5UAS2W1vYSNt4,7411
+flask/testsuite/views.py,sha256=2ThDMtk1zoMvuXzRuP1Wh3e_hgDwPtyu76TXycVyhqc,5068
+flask/testsuite/static/index.html,sha256=f28C_muYEAWoHyfaubxti-TLB0wMjFZHII3e9TRsmsI,22
+flask/testsuite/templates/_macro.html,sha256=XA2qnwpJpO3auSjZlKXODzeU4Fcnim12ie5lMXeL8ms,55
+flask/testsuite/templates/context_template.html,sha256=hXvHzdggsqqO2eXRlb6EhGDTTzPIQicaMk2EibzagDQ,36
+flask/testsuite/templates/escaping_template.html,sha256=ORPM3QIOIu_lWv9M101bPHZt-bmy8Biu2663aAoO50g,147
+flask/testsuite/templates/mail.txt,sha256=8f9j1PxUo_deGbrpJuDdrii16qE-xtV9RU6mYPVE_64,14
+flask/testsuite/templates/simple_template.html,sha256=d0akkxWQ1UQkDiL45b8nUvCE0KpL579DhioYlv713bE,23
+flask/testsuite/templates/template_filter.html,sha256=ZIk3H4Oa8fDn62Ij6jDzdHukLj3RXGtmBnkEtgTUmQ0,25
+flask/testsuite/templates/template_test.html,sha256=g3ZgUQN0PaOiz289_AxNeP3ICfR29ogzbQl3fCvxK_8,51
+flask/testsuite/templates/nested/nested.txt,sha256=76PlNDtwDVWjZMPHRlBqSX95jY52e_RWZCJKZfRWkAU,11
+flask/testsuite/test_apps/config_module_app.py,sha256=BaBJCCI4TcOc9Kc5OvlgmaCtfQH8D31xVZMLZ3FmyRE,101
+flask/testsuite/test_apps/flask_newext_simple.py,sha256=IIsV29nQ37pV2Z3O8GQ5a33YkIlII8B0rbOqz5k4ZxQ,25
+flask/testsuite/test_apps/importerror.py,sha256=gA8IjK673B4Rit7pYYnkspF2k9_zpztPmiMYgrwgzdk,46
+flask/testsuite/test_apps/main_app.py,sha256=mah5bd5ObZV6bl4YbnJEEdQcr5NqgMFmZqcaAITIReE,90
+flask/testsuite/test_apps/blueprintapp/__init__.py,sha256=OeOuAvS6IghuIekcXqfgPuoACL6JSxKw_xOpvgTx7lE,200
+flask/testsuite/test_apps/blueprintapp/apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.py,sha256=L5kSKkfOewREXLveDLogxtGyF2UrkIczxo4PZrjYCl8,362
+flask/testsuite/test_apps/blueprintapp/apps/admin/static/test.txt,sha256=lcPiuni6LR_rw5mxyjRmz8iMcifiSSNqPFlIrmLKzjY,11
+flask/testsuite/test_apps/blueprintapp/apps/admin/static/css/test.css,sha256=F1DG-ABW3o6yAD9Nc6IzV9iYefZeOHhn59009LMJboY,18
+flask/testsuite/test_apps/blueprintapp/apps/admin/templates/admin/index.html,sha256=2OWS6QYqxaarnZAWkLvam0P2QSodRNkA6M05eEOVLw4,21
+flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.py,sha256=djItvU0fqQg-MOlVoJR3--6cYf4VT1SI0WFsQyjQk4o,204
+flask/testsuite/test_apps/blueprintapp/apps/frontend/templates/frontend/index.html,sha256=hcC7DORSYxGNK5DAICVx1BeFVlzaanxcIhODQwm9rwg,24
+flask/testsuite/test_apps/config_package_app/__init__.py,sha256=BaBJCCI4TcOc9Kc5OvlgmaCtfQH8D31xVZMLZ3FmyRE,101
+flask/testsuite/test_apps/flask_broken/__init__.py,sha256=CDC0TTkjrjeLs7ZXzLwdjS0yFiE2DM9uZ65q927Xx-c,48
+flask/testsuite/test_apps/flask_broken/b.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+flask/testsuite/test_apps/flask_newext_package/__init__.py,sha256=UX_8L5rL7scxeUhXbUklOXDzE16eQ0eSdykHJvCe9O4,26
+flask/testsuite/test_apps/flask_newext_package/submodule.py,sha256=VBKM2Msdeq6F7ziaPiwTbcu9c4wAH4kKZH6FDUPi6zg,35
+flask/testsuite/test_apps/flaskext/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+flask/testsuite/test_apps/flaskext/oldext_simple.py,sha256=fuHRIiStUQIS-JoiE_Vwkc_nRX3qkMETN1yBtEDQyck,25
+flask/testsuite/test_apps/flaskext/oldext_package/__init__.py,sha256=u6W0k3--q15e2nTlDWvTA0gvu1EuvaJQGIb-y9gPe8A,26
+flask/testsuite/test_apps/flaskext/oldext_package/submodule.py,sha256=VBKM2Msdeq6F7ziaPiwTbcu9c4wAH4kKZH6FDUPi6zg,35
+flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.py,sha256=yObUTJgzGrSHwp-eLsE-bMbGmKLogUdE6bCn7jQPR6A,42
+flask/testsuite/test_apps/lib/python2.5/site-packages/SiteEgg.egg,sha256=fVBqIK3Yab7f_mM6B5dZO2zN1N9abqQKS1xRIbKNsgc,1218
+flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.py,sha256=yObUTJgzGrSHwp-eLsE-bMbGmKLogUdE6bCn7jQPR6A,42
+flask/testsuite/test_apps/moduleapp/__init__.py,sha256=x5mMfvASdMFFNFp0tq2Fwpp_-9DxNlRwHIFxj_bhbnQ,188
+flask/testsuite/test_apps/moduleapp/apps/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+flask/testsuite/test_apps/moduleapp/apps/admin/__init__.py,sha256=ESnyU_sz54kX22RJUOyciuHlGz0_c9d1qGxrzke9V7E,259
+flask/testsuite/test_apps/moduleapp/apps/admin/static/test.txt,sha256=lcPiuni6LR_rw5mxyjRmz8iMcifiSSNqPFlIrmLKzjY,11
+flask/testsuite/test_apps/moduleapp/apps/admin/static/css/test.css,sha256=F1DG-ABW3o6yAD9Nc6IzV9iYefZeOHhn59009LMJboY,18
+flask/testsuite/test_apps/moduleapp/apps/admin/templates/index.html,sha256=2OWS6QYqxaarnZAWkLvam0P2QSodRNkA6M05eEOVLw4,21
+flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.py,sha256=qtVGS_XzTw3uxAyEcOi49uXB4o-u4ZEZLQrhwIUkEME,158
+flask/testsuite/test_apps/moduleapp/apps/frontend/templates/index.html,sha256=hcC7DORSYxGNK5DAICVx1BeFVlzaanxcIhODQwm9rwg,24
+flask/testsuite/test_apps/path/installed_package/__init__.py,sha256=yObUTJgzGrSHwp-eLsE-bMbGmKLogUdE6bCn7jQPR6A,42
+flask/testsuite/test_apps/subdomaintestmodule/__init__.py,sha256=o7W5jKRXGVqfjwYKWjI3QX8WUztGxuVnGiZ-wSGd1Fk,74
+flask/testsuite/test_apps/subdomaintestmodule/static/hello.txt,sha256=5PvE5WQoKT9dXkOTZ63vfG9F0iAysyTmSTX0dA4uiQI,16
+Flask-0.10.1.dist-info/DESCRIPTION.rst,sha256=YsWE_jM8p5TWeiY2cuesAHH5JUW95FFaHKvruzGCg5o,694
+Flask-0.10.1.dist-info/METADATA,sha256=R0Epx982RlWRzCkkSHWbqD7n9XaqCt-etgGGrwAixNQ,1507
+Flask-0.10.1.dist-info/metadata.json,sha256=Dl1YMaSt0mHld2qGo3MdtpC3GS0lo0AYvSi5Ga0GoXI,970
+Flask-0.10.1.dist-info/RECORD,,
+Flask-0.10.1.dist-info/top_level.txt,sha256=dvi65F6AeGWVU0TBpYiC04yM60-FX1gJFkK31IKQr5c,6
+Flask-0.10.1.dist-info/WHEEL,sha256=54bVun1KfEBTJ68SHUmbxNPj80VxlQ0sHi4gZdGZXEY,92
+flask/config.pyc,,
+flask/testsuite/deprecations.pyc,,
+flask/testsuite/test_apps/flaskext/oldext_package/__init__.pyc,,
+flask/testsuite/test_apps/blueprintapp/__init__.pyc,,
+flask/testsuite/regression.pyc,,
+flask/module.pyc,,
+flask/debughelpers.pyc,,
+flask/testsuite/signals.pyc,,
+flask/testsuite/examples.pyc,,
+flask/testsuite/test_apps/blueprintapp/apps/frontend/__init__.pyc,,
+flask/testsuite/test_apps/moduleapp/apps/__init__.pyc,,
+flask/sessions.pyc,,
+flask/logging.pyc,,
+flask/testsuite/test_apps/path/installed_package/__init__.pyc,,
+flask/testsuite/test_apps/flask_broken/__init__.pyc,,
+flask/__init__.pyc,,
+flask/testsuite/config.pyc,,
+flask/testsuite/test_apps/importerror.pyc,,
+flask/testsuite/test_apps/flaskext/oldext_simple.pyc,,
+flask/testsuite/subclassing.pyc,,
+flask/testsuite/__init__.pyc,,
+flask/testsuite/test_apps/lib/python2.5/site-packages/site_package/__init__.pyc,,
+flask/globals.pyc,,
+flask/testsuite/test_apps/lib/python2.5/site-packages/site_app.pyc,,
+flask/testsuite/test_apps/subdomaintestmodule/__init__.pyc,,
+flask/views.pyc,,
+flask/json.pyc,,
+flask/testsuite/test_apps/flask_newext_simple.pyc,,
+flask/testsuite/test_apps/flask_broken/b.pyc,,
+flask/testsuite/test_apps/flask_newext_package/__init__.pyc,,
+flask/testsuite/test_apps/config_module_app.pyc,,
+flask/testsuite/blueprints.pyc,,
+flask/testsuite/test_apps/flaskext/oldext_package/submodule.pyc,,
+flask/blueprints.pyc,,
+flask/testsuite/test_apps/flaskext/__init__.pyc,,
+flask/exthook.pyc,,
+flask/testsuite/test_apps/blueprintapp/apps/__init__.pyc,,
+flask/testsuite/templating.pyc,,
+flask/testsuite/test_apps/main_app.pyc,,
+flask/testsuite/testing.pyc,,
+flask/_compat.pyc,,
+flask/ctx.pyc,,
+flask/testsuite/test_apps/moduleapp/apps/admin/__init__.pyc,,
+flask/testsuite/test_apps/moduleapp/__init__.pyc,,
+flask/testsuite/appctx.pyc,,
+flask/app.pyc,,
+flask/ext/__init__.pyc,,
+flask/testing.pyc,,
+flask/wrappers.pyc,,
+flask/testsuite/helpers.pyc,,
+flask/helpers.pyc,,
+flask/testsuite/test_apps/moduleapp/apps/frontend/__init__.pyc,,
+flask/testsuite/views.pyc,,
+flask/signals.pyc,,
+flask/testsuite/test_apps/flask_newext_package/submodule.pyc,,
+flask/testsuite/basic.pyc,,
+flask/templating.pyc,,
+flask/testsuite/test_apps/blueprintapp/apps/admin/__init__.pyc,,
+flask/testsuite/reqctx.pyc,,
+flask/testsuite/ext.pyc,,
+flask/testsuite/test_apps/config_package_app/__init__.pyc,,

+ 5 - 0
venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 0 - 0
venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/metadata.json


+ 1 - 0
venv/lib/python2.7/site-packages/Flask-0.10.1.dist-info/top_level.txt

@@ -0,0 +1 @@
+flask

+ 36 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/DESCRIPTION.rst

@@ -0,0 +1,36 @@
+Jinja2
+~~~~~~
+
+Jinja2 is a template engine written in pure Python.  It provides a
+`Django`_ inspired non-XML syntax but supports inline expressions and
+an optional `sandboxed`_ environment.
+
+Nutshell
+--------
+
+Here a small example of a Jinja template::
+
+    {% extends 'base.html' %}
+    {% block title %}Memberlist{% endblock %}
+    {% block content %}
+      <ul>
+      {% for user in users %}
+        <li><a href="{{ user.url }}">{{ user.username }}</a></li>
+      {% endfor %}
+      </ul>
+    {% endblock %}
+
+Philosophy
+----------
+
+Application logic is for the controller but don't try to make the life
+for the template designer too hard by giving him too few functionality.
+
+For more informations visit the new `Jinja2 webpage`_ and `documentation`_.
+
+.. _sandboxed: http://en.wikipedia.org/wiki/Sandbox_(computer_security)
+.. _Django: http://www.djangoproject.com/
+.. _Jinja2 webpage: http://jinja.pocoo.org/
+.. _documentation: http://jinja.pocoo.org/2/documentation/
+
+

+ 63 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/METADATA

@@ -0,0 +1,63 @@
+Metadata-Version: 2.0
+Name: Jinja2
+Version: 2.8
+Summary: A small but fast and easy to use stand-alone template engine written in pure python.
+Home-page: http://jinja.pocoo.org/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+License: BSD
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup :: HTML
+Requires-Dist: MarkupSafe
+Provides-Extra: i18n
+Requires-Dist: Babel (>=0.8); extra == 'i18n'
+
+Jinja2
+~~~~~~
+
+Jinja2 is a template engine written in pure Python.  It provides a
+`Django`_ inspired non-XML syntax but supports inline expressions and
+an optional `sandboxed`_ environment.
+
+Nutshell
+--------
+
+Here a small example of a Jinja template::
+
+    {% extends 'base.html' %}
+    {% block title %}Memberlist{% endblock %}
+    {% block content %}
+      <ul>
+      {% for user in users %}
+        <li><a href="{{ user.url }}">{{ user.username }}</a></li>
+      {% endfor %}
+      </ul>
+    {% endblock %}
+
+Philosophy
+----------
+
+Application logic is for the controller but don't try to make the life
+for the template designer too hard by giving him too few functionality.
+
+For more informations visit the new `Jinja2 webpage`_ and `documentation`_.
+
+.. _sandboxed: http://en.wikipedia.org/wiki/Sandbox_(computer_security)
+.. _Django: http://www.djangoproject.com/
+.. _Jinja2 webpage: http://jinja.pocoo.org/
+.. _documentation: http://jinja.pocoo.org/2/documentation/
+
+

+ 53 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/RECORD

@@ -0,0 +1,53 @@
+jinja2/__init__.py,sha256=c59bnaAFo63I7lYUZlO2UKHj8LPG3JACKnCrwWgvjGY,2326
+jinja2/_compat.py,sha256=O4FnYOMi4HRBfoCKkX137tt3sR6HvpnQNcwqg8ARYog,3109
+jinja2/_stringdefs.py,sha256=SFObWX5vSMeGNc_aSO3_B2EEmScCstFWtjS4K0YFBXk,404291
+jinja2/bccache.py,sha256=EMN9fsvOpwK3DfxQ9F1lmWoxU2Qlo6AnNhPXTsMrw84,12793
+jinja2/compiler.py,sha256=nQmoS6HpGwgDIC8UXkSdjPYiAjbVqZ-Gf4odO-SAR6E,63846
+jinja2/constants.py,sha256=DCr-oKC2xQO-fkOQO3kXRJW7rEYgmcsMRNpPnM66YSU,1626
+jinja2/debug.py,sha256=GEGHM8vFsNFF-kGc0_fwyj1ftMtuyaH4r0nyG-XA9Z8,11553
+jinja2/defaults.py,sha256=eLMOE7JC52QwZBu5Gz4TPZqzoJy9IgV5EynL_pW7MUw,1057
+jinja2/environment.py,sha256=jzJmujSFtxb1HvITO4TdUCOOA-hSZx0gHrxeDZ2VE-M,48120
+jinja2/exceptions.py,sha256=Q9yZOUif-lhVj5BRw0ELjfBvEdBsB7xZobgOvC2qGy4,4428
+jinja2/ext.py,sha256=X-1zCiut1cuxIteKPkJr3jb6odlVE1jciO8RnrMniPE,25072
+jinja2/filters.py,sha256=R4x2flPfyzIjrtItzpGpK4LzBvx-NOlEXH9wD-ZBWtU,30115
+jinja2/lexer.py,sha256=QyiQwAQVEE2YREZJLcA04F3yqv0XOwBbSlWaFW4xJ20,28425
+jinja2/loaders.py,sha256=BgDCvmiB0gH_zPMf-6TMemqtJdrck3IyJ8g0kWUvFa0,17380
+jinja2/meta.py,sha256=cxAOtMuSWWSQX2H8zhYsAtjNwRcNB8Zvs06Y-JlWnbk,4198
+jinja2/nodes.py,sha256=YN6hfFa0WlfToG2r-Q-yhUkAUp0O9l8KulK53mOAVUo,28954
+jinja2/optimizer.py,sha256=bNNKbo5SC5FBUm9dvP-I3GkiXZYBYIER7_g9hK77ZVI,2302
+jinja2/parser.py,sha256=pjLfkZDg2IKJKt_ixNosV-RzwAja5GWYuVeBQumIRns,35442
+jinja2/runtime.py,sha256=Ct36Q9-gVmKer45syS4j3thQ15T_DnLDh6CqvTcnPwQ,22530
+jinja2/sandbox.py,sha256=qgH4CoBsF5NwGj0krqsCOw8sg2mXmfpZKnvmZEE-da4,13327
+jinja2/tests.py,sha256=znB0L_k6wdKp_lQJvxboXwUXDy1HhFe5SSA888tHt_w,4131
+jinja2/utils.py,sha256=pjbOhQJ5NYexu2MbjA66nBibudUkYcQRZbxvbYE0tFk,16560
+jinja2/visitor.py,sha256=3hEAYD26xS_JiJBf4RfcqYPpiuR6efOH8Hh6om59eU8,3316
+Jinja2-2.8.dist-info/DESCRIPTION.rst,sha256=CXIS1UnPSk5_lZBS6Lb8ko-3lqGfjsiUwNBLXCTj2lc,975
+Jinja2-2.8.dist-info/entry_points.txt,sha256=NdzVcOrqyNyKDxD09aERj__3bFx2paZhizFDsKmVhiA,72
+Jinja2-2.8.dist-info/METADATA,sha256=Vio5F8qaEVcGzaCV1rl8tIWEKsHUFSSSAfL0u9oMmGk,2061
+Jinja2-2.8.dist-info/metadata.json,sha256=4TsqsSBwGwy0C2xF_uRZHYsRn2W5Lv4NUMBjTnXPldM,1275
+Jinja2-2.8.dist-info/RECORD,,
+Jinja2-2.8.dist-info/top_level.txt,sha256=PkeVWtLb3-CqjWi1fO29OCbj55EhX_chhKrCdrVe_zs,7
+Jinja2-2.8.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
+jinja2/_compat.pyc,,
+jinja2/sandbox.pyc,,
+jinja2/_stringdefs.pyc,,
+jinja2/bccache.pyc,,
+jinja2/runtime.pyc,,
+jinja2/utils.pyc,,
+jinja2/debug.pyc,,
+jinja2/parser.pyc,,
+jinja2/defaults.pyc,,
+jinja2/visitor.pyc,,
+jinja2/ext.pyc,,
+jinja2/lexer.pyc,,
+jinja2/nodes.pyc,,
+jinja2/environment.pyc,,
+jinja2/compiler.pyc,,
+jinja2/exceptions.pyc,,
+jinja2/__init__.pyc,,
+jinja2/meta.pyc,,
+jinja2/loaders.pyc,,
+jinja2/optimizer.pyc,,
+jinja2/filters.pyc,,
+jinja2/tests.pyc,,
+jinja2/constants.pyc,,

+ 6 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/WHEEL

@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+

+ 4 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/entry_points.txt

@@ -0,0 +1,4 @@
+
+    [babel.extractors]
+    jinja2 = jinja2.ext:babel_extract[i18n]
+    

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 0 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/metadata.json


+ 1 - 0
venv/lib/python2.7/site-packages/Jinja2-2.8.dist-info/top_level.txt

@@ -0,0 +1 @@
+jinja2

+ 101 - 0
venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/DESCRIPTION.rst

@@ -0,0 +1,101 @@
+MarkupSafe
+==========
+
+Implements a unicode subclass that supports HTML strings:
+
+>>> from markupsafe import Markup, escape
+>>> escape("<script>alert(document.cookie);</script>")
+Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+>>> tmpl = Markup("<em>%s</em>")
+>>> tmpl % "Peter > Lustig"
+Markup(u'<em>Peter &gt; Lustig</em>')
+
+If you want to make an object unicode that is not yet unicode
+but don't want to lose the taint information, you can use the
+`soft_unicode` function.  (On Python 3 you can also use `soft_str` which
+is a different name for the same function).
+
+>>> from markupsafe import soft_unicode
+>>> soft_unicode(42)
+u'42'
+>>> soft_unicode(Markup('foo'))
+Markup(u'foo')
+
+HTML Representations
+--------------------
+
+Objects can customize their HTML markup equivalent by overriding
+the `__html__` function:
+
+>>> class Foo(object):
+...  def __html__(self):
+...   return '<strong>Nice</strong>'
+...
+>>> escape(Foo())
+Markup(u'<strong>Nice</strong>')
+>>> Markup(Foo())
+Markup(u'<strong>Nice</strong>')
+
+Silent Escapes
+--------------
+
+Since MarkupSafe 0.10 there is now also a separate escape function
+called `escape_silent` that returns an empty string for `None` for
+consistency with other systems that return empty strings for `None`
+when escaping (for instance Pylons' webhelpers).
+
+If you also want to use this for the escape method of the Markup
+object, you can create your own subclass that does that::
+
+    from markupsafe import Markup, escape_silent as escape
+
+    class SilentMarkup(Markup):
+        __slots__ = ()
+
+        @classmethod
+        def escape(cls, s):
+            return cls(escape(s))
+
+New-Style String Formatting
+---------------------------
+
+Starting with MarkupSafe 0.21 new style string formats from Python 2.6 and
+3.x are now fully supported.  Previously the escape behavior of those
+functions was spotty at best.  The new implementations operates under the
+following algorithm:
+
+1.  if an object has an ``__html_format__`` method it is called as
+    replacement for ``__format__`` with the format specifier.  It either
+    has to return a string or markup object.
+2.  if an object has an ``__html__`` method it is called.
+3.  otherwise the default format system of Python kicks in and the result
+    is HTML escaped.
+
+Here is how you can implement your own formatting::
+
+    class User(object):
+
+        def __init__(self, id, username):
+            self.id = id
+            self.username = username
+
+        def __html_format__(self, format_spec):
+            if format_spec == 'link':
+                return Markup('<a href="/user/{0}">{1}</a>').format(
+                    self.id,
+                    self.__html__(),
+                )
+            elif format_spec:
+                raise ValueError('Invalid format spec')
+            return self.__html__()
+
+        def __html__(self):
+            return Markup('<span class=user>{0}</span>').format(self.username)
+
+And to format that user:
+
+>>> user = User(1, 'foo')
+>>> Markup('<p>User: {0:link}').format(user)
+Markup(u'<p>User: <a href="/user/1"><span class=user>foo</span></a>')
+
+

+ 121 - 0
venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/METADATA

@@ -0,0 +1,121 @@
+Metadata-Version: 2.0
+Name: MarkupSafe
+Version: 0.23
+Summary: Implements a XML/HTML/XHTML Markup safe string for Python
+Home-page: http://github.com/mitsuhiko/markupsafe
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+License: BSD
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: Text Processing :: Markup :: HTML
+
+MarkupSafe
+==========
+
+Implements a unicode subclass that supports HTML strings:
+
+>>> from markupsafe import Markup, escape
+>>> escape("<script>alert(document.cookie);</script>")
+Markup(u'&lt;script&gt;alert(document.cookie);&lt;/script&gt;')
+>>> tmpl = Markup("<em>%s</em>")
+>>> tmpl % "Peter > Lustig"
+Markup(u'<em>Peter &gt; Lustig</em>')
+
+If you want to make an object unicode that is not yet unicode
+but don't want to lose the taint information, you can use the
+`soft_unicode` function.  (On Python 3 you can also use `soft_str` which
+is a different name for the same function).
+
+>>> from markupsafe import soft_unicode
+>>> soft_unicode(42)
+u'42'
+>>> soft_unicode(Markup('foo'))
+Markup(u'foo')
+
+HTML Representations
+--------------------
+
+Objects can customize their HTML markup equivalent by overriding
+the `__html__` function:
+
+>>> class Foo(object):
+...  def __html__(self):
+...   return '<strong>Nice</strong>'
+...
+>>> escape(Foo())
+Markup(u'<strong>Nice</strong>')
+>>> Markup(Foo())
+Markup(u'<strong>Nice</strong>')
+
+Silent Escapes
+--------------
+
+Since MarkupSafe 0.10 there is now also a separate escape function
+called `escape_silent` that returns an empty string for `None` for
+consistency with other systems that return empty strings for `None`
+when escaping (for instance Pylons' webhelpers).
+
+If you also want to use this for the escape method of the Markup
+object, you can create your own subclass that does that::
+
+    from markupsafe import Markup, escape_silent as escape
+
+    class SilentMarkup(Markup):
+        __slots__ = ()
+
+        @classmethod
+        def escape(cls, s):
+            return cls(escape(s))
+
+New-Style String Formatting
+---------------------------
+
+Starting with MarkupSafe 0.21 new style string formats from Python 2.6 and
+3.x are now fully supported.  Previously the escape behavior of those
+functions was spotty at best.  The new implementations operates under the
+following algorithm:
+
+1.  if an object has an ``__html_format__`` method it is called as
+    replacement for ``__format__`` with the format specifier.  It either
+    has to return a string or markup object.
+2.  if an object has an ``__html__`` method it is called.
+3.  otherwise the default format system of Python kicks in and the result
+    is HTML escaped.
+
+Here is how you can implement your own formatting::
+
+    class User(object):
+
+        def __init__(self, id, username):
+            self.id = id
+            self.username = username
+
+        def __html_format__(self, format_spec):
+            if format_spec == 'link':
+                return Markup('<a href="/user/{0}">{1}</a>').format(
+                    self.id,
+                    self.__html__(),
+                )
+            elif format_spec:
+                raise ValueError('Invalid format spec')
+            return self.__html__()
+
+        def __html__(self):
+            return Markup('<span class=user>{0}</span>').format(self.username)
+
+And to format that user:
+
+>>> user = User(1, 'foo')
+>>> Markup('<p>User: {0:link}').format(user)
+Markup(u'<p>User: <a href="/user/1"><span class=user>foo</span></a>')
+
+

+ 18 - 0
venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/RECORD

@@ -0,0 +1,18 @@
+markupsafe/__init__.py,sha256=zFQpANILi3mCCALiPd6ZJdlW6ibu_hTKzikMXKXVtaM,10338
+markupsafe/_compat.py,sha256=r1HE0CpcAZeb-AiTV9wITR91PeLHn0CzZ_XHkYoozpI,565
+markupsafe/_constants.py,sha256=U_xybFQsyXKCgHSfranJnFzo-z9nn9fuBeSk243sE5Q,4795
+markupsafe/_native.py,sha256=E2Un1ysOf-w45d18YCj8UelT5UP7Vt__IuFPYJ7YRIs,1187
+markupsafe/_speedups.c,sha256=gZwPEM_0zcbAzJjPuPYXk97R67QR1uUGtDvOPsvirCA,5939
+markupsafe/_speedups.so,sha256=wCX_WrXWXYnBY6ZJyIwIfLfp21LZmtr6y8ntNrNonWc,26892
+markupsafe/tests.py,sha256=RLI4eYI0ICNZwkoN638VHXf_fDu4d_jnvbGr22j58Ng,6107
+MarkupSafe-0.23.dist-info/DESCRIPTION.rst,sha256=VnEbwPneiOkqh-nzxb0DUiGlcVGHuaDQjsNBLi-yNYw,3091
+MarkupSafe-0.23.dist-info/METADATA,sha256=g-KikeSr9J7vagkJoCt0ViT2ORy9O4NYV7XtRu1Pni8,3879
+MarkupSafe-0.23.dist-info/metadata.json,sha256=pbwe44bNun_O3P76VOAq-IkPgHJekqyiyIjo68QrkEc,901
+MarkupSafe-0.23.dist-info/RECORD,,
+MarkupSafe-0.23.dist-info/top_level.txt,sha256=qy0Plje5IJuvsCBjejJyhDCjEAdcDLK_2agVcex8Z6U,11
+MarkupSafe-0.23.dist-info/WHEEL,sha256=z3IOIAk4e1vMDuU6DA5QbANK9Dd8Ln1lTP-1Xi7RlK8,109
+markupsafe/_native.pyc,,
+markupsafe/_constants.pyc,,
+markupsafe/_compat.pyc,,
+markupsafe/tests.pyc,,
+markupsafe/__init__.pyc,,

+ 5 - 0
venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: false
+Tag: cp27-none-macosx_10_11_intel
+

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 0 - 0
venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/metadata.json


+ 1 - 0
venv/lib/python2.7/site-packages/MarkupSafe-0.23.dist-info/top_level.txt

@@ -0,0 +1 @@
+markupsafe

+ 54 - 0
venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/DESCRIPTION.rst

@@ -0,0 +1,54 @@
+Werkzeug
+========
+
+Werkzeug started as simple collection of various utilities for WSGI
+applications and has become one of the most advanced WSGI utility
+modules.  It includes a powerful debugger, full featured request and
+response objects, HTTP utilities to handle entity tags, cache control
+headers, HTTP dates, cookie handling, file uploads, a powerful URL
+routing system and a bunch of community contributed addon modules.
+
+Werkzeug is unicode aware and doesn't enforce a specific template
+engine, database adapter or anything else.  It doesn't even enforce
+a specific way of handling requests and leaves all that up to the
+developer. It's most useful for end user applications which should work
+on as many server environments as possible (such as blogs, wikis,
+bulletin boards, etc.).
+
+Details and example applications are available on the
+`Werkzeug website <http://werkzeug.pocoo.org/>`_.
+
+
+Features
+--------
+
+-   unicode awareness
+
+-   request and response objects
+
+-   various utility functions for dealing with HTTP headers such as
+    `Accept` and `Cache-Control` headers.
+
+-   thread local objects with proper cleanup at request end
+
+-   an interactive debugger
+
+-   A simple WSGI server with support for threading and forking
+    with an automatic reloader.
+
+-   a flexible URL routing system with REST support.
+
+-   fully WSGI compatible
+
+
+Development Version
+-------------------
+
+The Werkzeug development version can be installed by cloning the git
+repository from `github`_::
+
+    git clone git@github.com:mitsuhiko/werkzeug.git
+
+.. _github: http://github.com/mitsuhiko/werkzeug
+
+

+ 79 - 0
venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/METADATA

@@ -0,0 +1,79 @@
+Metadata-Version: 2.0
+Name: Werkzeug
+Version: 0.11.2
+Summary: The Swiss Army knife of Python web development
+Home-page: http://werkzeug.pocoo.org/
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+License: BSD
+Platform: any
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Web Environment
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+Classifier: Topic :: Internet :: WWW/HTTP :: Dynamic Content
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+
+Werkzeug
+========
+
+Werkzeug started as simple collection of various utilities for WSGI
+applications and has become one of the most advanced WSGI utility
+modules.  It includes a powerful debugger, full featured request and
+response objects, HTTP utilities to handle entity tags, cache control
+headers, HTTP dates, cookie handling, file uploads, a powerful URL
+routing system and a bunch of community contributed addon modules.
+
+Werkzeug is unicode aware and doesn't enforce a specific template
+engine, database adapter or anything else.  It doesn't even enforce
+a specific way of handling requests and leaves all that up to the
+developer. It's most useful for end user applications which should work
+on as many server environments as possible (such as blogs, wikis,
+bulletin boards, etc.).
+
+Details and example applications are available on the
+`Werkzeug website <http://werkzeug.pocoo.org/>`_.
+
+
+Features
+--------
+
+-   unicode awareness
+
+-   request and response objects
+
+-   various utility functions for dealing with HTTP headers such as
+    `Accept` and `Cache-Control` headers.
+
+-   thread local objects with proper cleanup at request end
+
+-   an interactive debugger
+
+-   A simple WSGI server with support for threading and forking
+    with an automatic reloader.
+
+-   a flexible URL routing system with REST support.
+
+-   fully WSGI compatible
+
+
+Development Version
+-------------------
+
+The Werkzeug development version can be installed by cloning the git
+repository from `github`_::
+
+    git clone git@github.com:mitsuhiko/werkzeug.git
+
+.. _github: http://github.com/mitsuhiko/werkzeug
+
+

+ 93 - 0
venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/RECORD

@@ -0,0 +1,93 @@
+Werkzeug-0.11.2.dist-info/DESCRIPTION.rst,sha256=5sTwZ_Sj5aeEN8mlcOdNJ_ng40HiGazGmILLyTMX8o0,1595
+Werkzeug-0.11.2.dist-info/METADATA,sha256=Gzt-y2NPdJkUY9SFc4g0kHtS1mikdG-uwhWzL747kKg,2599
+Werkzeug-0.11.2.dist-info/RECORD,,
+Werkzeug-0.11.2.dist-info/WHEEL,sha256=GrqQvamwgBV4nLoJe0vhYRSWzWsx7xjlt74FT0SWYfE,110
+Werkzeug-0.11.2.dist-info/metadata.json,sha256=Y0hE0Kgbe7uzjxbKMd9_NZj0sKuxiMSYABJqPu86VHo,1095
+Werkzeug-0.11.2.dist-info/top_level.txt,sha256=QRyj2VjwJoQkrwjwFIOlB8Xg3r9un0NtqVHQF-15xaw,9
+werkzeug/__init__.py,sha256=ejcgDnbN8NLHeEZIe70kgK8Km7KW6vC6Rt5E0NVVir8,6919
+werkzeug/_compat.py,sha256=8c4U9o6A_TR9nKCcTbpZNxpqCXcXDVIbFawwKM2s92c,6311
+werkzeug/_internal.py,sha256=IEScSoFtQ8KqFH_2ubdfscNAdQ2RIysyVupI5BR9W2U,13709
+werkzeug/_reloader.py,sha256=YQykMSQW7AlojJQ7qOlgNaXw5_CNjf9yzxplwzVdL7Q,8336
+werkzeug/datastructures.py,sha256=iQ4TqWXdHSXtL5srpP8t-6bEJEiKDkfiI8lHOl_A_kU,87447
+werkzeug/exceptions.py,sha256=c-3fKHItsPvC52X_NwBNLcmGXR30h0WP5ynPSwCqPiw,18733
+werkzeug/filesystem.py,sha256=0_gjAftvnRBxoD6ZCssJDJztUjprsLC97eC_k4YRdXs,2174
+werkzeug/formparser.py,sha256=ndLQxfmq-IeNUlee30WHfxq1YggzSO1l7QGeeFVr99M,21207
+werkzeug/http.py,sha256=I4yzJHGnsaMW7kZQcWzNW9H7ijXT_iPR16jbN3jejVY,35031
+werkzeug/local.py,sha256=vWF4ECvkbXoOWM4BThsU5qEzC2LwdiEn7WA-qEt8pvg,14123
+werkzeug/posixemulation.py,sha256=xEF2Bxc-vUCPkiu4IbfWVd3LW7DROYAT-ExW6THqyzw,3519
+werkzeug/routing.py,sha256=TqiZD5HkwdLBnKBUjC5PlytzXmpczQC5dz54VfQzMOw,66350
+werkzeug/script.py,sha256=DwaVDcXdaOTffdNvlBdLitxWXjKaRVT32VbhDtljFPY,11365
+werkzeug/security.py,sha256=tuVc22OqoHV5K-TrYJmynCJJa12aUt9BQ3wR_vEPQ34,8971
+werkzeug/serving.py,sha256=5P-CbqJ0o6Z_ETLRlm8dL30e3Je18HJB-yD-n0EgIc8,27559
+werkzeug/test.py,sha256=nan0aDi3g5hyUzWCtaN3XL9HrbIsNNNgMNjwpfM6qMc,34152
+werkzeug/testapp.py,sha256=3HQRW1sHZKXuAjCvFMet4KXtQG3loYTFnvn6LWt-4zI,9396
+werkzeug/urls.py,sha256=DHtRDF5rPSzxDY8D1FW9ODxpN1uAPDGfyTlWcvInNBc,36704
+werkzeug/useragents.py,sha256=uqpgPcJ5BfcCVh9nPIIl2r3duIrIuENmrbRqbAMmPDk,5418
+werkzeug/utils.py,sha256=lkybtv_mq35zV1qhelvEcILTzrMUwZ9yon6E8XwapJE,22972
+werkzeug/wrappers.py,sha256=lKYevpKD1-quk9Cop7bsFxt1eWJxU3h33HCnOI_YzSU,77011
+werkzeug/wsgi.py,sha256=SzSjiVVGzjD5F1yKIbuZVINNdS5T_sD2ryHD6Dg9t5I,38011
+werkzeug/contrib/__init__.py,sha256=f7PfttZhbrImqpr5Ezre8CXgwvcGUJK7zWNpO34WWrw,623
+werkzeug/contrib/atom.py,sha256=rvijBrphjMzVObfuCR6ddu6aLwI_SiNiudu64OSTh4Q,15588
+werkzeug/contrib/cache.py,sha256=4W2WCT9Hw6HEU8yME9GuU4Xf8e50r2K84ASMxhLb6tY,27983
+werkzeug/contrib/fixers.py,sha256=MtN_YmENxoTsGvXGGERmtbQ62LaeFc5I2d1YifXNENA,10183
+werkzeug/contrib/iterio.py,sha256=pTX36rYCKO_9IEoB5sIN5cFSYszI9zdx6YhquWovcPY,10814
+werkzeug/contrib/jsrouting.py,sha256=QTmgeDoKXvNK02KzXgx9lr3cAH6fAzpwF5bBdPNvJPs,8564
+werkzeug/contrib/limiter.py,sha256=iS8-ahPZ-JLRnmfIBzxpm7O_s3lPsiDMVWv7llAIDCI,1334
+werkzeug/contrib/lint.py,sha256=UxVVvYwnKa5BqYJdgfAzNc2w834OKgguvN-wg8E1Dhc,12322
+werkzeug/contrib/profiler.py,sha256=ISwCWvwVyGpDLRBRpLjo_qUWma6GXYBrTAco4PEQSHY,5151
+werkzeug/contrib/securecookie.py,sha256=X-Ao_0NRDveW6K1Fhe4U42hHWBW8esCpA3VcBDpzWIk,12206
+werkzeug/contrib/sessions.py,sha256=uAPcnyxaxEla-bUA13gKc3KK4mwSagdzbCZzyKl3PeE,12577
+werkzeug/contrib/testtools.py,sha256=G9xN-qeihJlhExrIZMCahvQOIDxdL9NiX874jiiHFMs,2453
+werkzeug/contrib/wrappers.py,sha256=Uv5FRO5OqKwOsNgkW2-FRcw0vUDe3uiaivjPNYWNfAk,10337
+werkzeug/debug/__init__.py,sha256=auYT2FW4gHDOgH1JAAsg8sU4nskWq24nVQjO6DQFD3k,13129
+werkzeug/debug/console.py,sha256=B7uAu9Rk60siDnGlEt-A_q1ZR4zCtmxx5itg3X-BOxo,5599
+werkzeug/debug/repr.py,sha256=NaoB89aHb0vuvdSWels-GWdeGDZp76uE4uSNZPX1jAM,9354
+werkzeug/debug/tbtools.py,sha256=L5P5TkGEHc_Bc5duNosP6D4CNe7ieTo1oiPX8nKQdek,18402
+werkzeug/debug/shared/FONT_LICENSE,sha256=LwAVEI1oYnvXiNMT9SnCH_TaLCxCpeHziDrMg0gPkAI,4673
+werkzeug/debug/shared/console.png,sha256=bxax6RXXlvOij_KeqvSNX0ojJf83YbnZ7my-3Gx9w2A,507
+werkzeug/debug/shared/debugger.js,sha256=PEMBoNuD6fUaNou8Km_ZvVmFcIA3z3k3jSEMWLW-cA0,6187
+werkzeug/debug/shared/jquery.js,sha256=7LkWEzqTdpEfELxcZZlS6wAx5Ff13zZ83lYO2_ujj7g,95957
+werkzeug/debug/shared/less.png,sha256=-4-kNRaXJSONVLahrQKUxMwXGm9R4OnZ9SxDGpHlIR4,191
+werkzeug/debug/shared/more.png,sha256=GngN7CioHQoV58rH6ojnkYi8c_qED2Aka5FO5UXrReY,200
+werkzeug/debug/shared/source.png,sha256=RoGcBTE4CyCB85GBuDGTFlAnUqxwTBiIfDqW15EpnUQ,818
+werkzeug/debug/shared/style.css,sha256=7x1s8olZO1XHalqD4M9MWn9vRqQkA635S9_6zRoe220,6231
+werkzeug/debug/shared/ubuntu.ttf,sha256=1eaHFyepmy4FyDvjLVzpITrGEBu_CZYY94jE0nED1c0,70220
+werkzeug/_reloader.pyc,,
+werkzeug/filesystem.pyc,,
+werkzeug/contrib/testtools.pyc,,
+werkzeug/formparser.pyc,,
+werkzeug/_compat.pyc,,
+werkzeug/posixemulation.pyc,,
+werkzeug/serving.pyc,,
+werkzeug/contrib/__init__.pyc,,
+werkzeug/contrib/iterio.pyc,,
+werkzeug/datastructures.pyc,,
+werkzeug/__init__.pyc,,
+werkzeug/contrib/limiter.pyc,,
+werkzeug/debug/tbtools.pyc,,
+werkzeug/contrib/sessions.pyc,,
+werkzeug/local.pyc,,
+werkzeug/utils.pyc,,
+werkzeug/contrib/lint.pyc,,
+werkzeug/security.pyc,,
+werkzeug/contrib/cache.pyc,,
+werkzeug/contrib/securecookie.pyc,,
+werkzeug/script.pyc,,
+werkzeug/routing.pyc,,
+werkzeug/wrappers.pyc,,
+werkzeug/contrib/jsrouting.pyc,,
+werkzeug/contrib/fixers.pyc,,
+werkzeug/contrib/profiler.pyc,,
+werkzeug/debug/console.pyc,,
+werkzeug/debug/__init__.pyc,,
+werkzeug/wsgi.pyc,,
+werkzeug/test.pyc,,
+werkzeug/http.pyc,,
+werkzeug/urls.pyc,,
+werkzeug/useragents.pyc,,
+werkzeug/_internal.pyc,,
+werkzeug/contrib/wrappers.pyc,,
+werkzeug/exceptions.pyc,,
+werkzeug/contrib/atom.pyc,,
+werkzeug/testapp.pyc,,
+werkzeug/debug/repr.pyc,,

+ 6 - 0
venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/WHEEL

@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.26.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 0 - 0
venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/metadata.json


+ 1 - 0
venv/lib/python2.7/site-packages/Werkzeug-0.11.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+werkzeug

+ 3 - 0
venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/DESCRIPTION.rst

@@ -0,0 +1,3 @@
+UNKNOWN
+
+

+ 16 - 0
venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/METADATA

@@ -0,0 +1,16 @@
+Metadata-Version: 2.0
+Name: itsdangerous
+Version: 0.24
+Summary: Various helpers to pass trusted data to untrusted environments and back.
+Home-page: http://github.com/mitsuhiko/itsdangerous
+Author: Armin Ronacher
+Author-email: armin.ronacher@active-4.com
+License: UNKNOWN
+Platform: UNKNOWN
+Classifier: License :: OSI Approved :: BSD License
+Classifier: Programming Language :: Python
+Classifier: Programming Language :: Python :: 3
+
+UNKNOWN
+
+

+ 8 - 0
venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/RECORD

@@ -0,0 +1,8 @@
+itsdangerous.py,sha256=l7u6oqIepEllYDR7hRmoE7z47Ry6vtFCF6EAbJt-ilI,31840
+itsdangerous-0.24.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10
+itsdangerous-0.24.dist-info/METADATA,sha256=9b-dx0zycw57kVzRJ7mF7j_shyBFpAZOyfxuwPsYYcQ,442
+itsdangerous-0.24.dist-info/metadata.json,sha256=SAA1cUh5o6IMRFu_riun4N9D77uixmRFd1vq_tJ0KqA,586
+itsdangerous-0.24.dist-info/RECORD,,
+itsdangerous-0.24.dist-info/top_level.txt,sha256=gKN1OKLk81i7fbWWildJA88EQ9NhnGMSvZqhfz9ICjk,13
+itsdangerous-0.24.dist-info/WHEEL,sha256=54bVun1KfEBTJ68SHUmbxNPj80VxlQ0sHi4gZdGZXEY,92
+itsdangerous.pyc,,

+ 5 - 0
venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/WHEEL

@@ -0,0 +1,5 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 0 - 0
venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/metadata.json


+ 1 - 0
venv/lib/python2.7/site-packages/itsdangerous-0.24.dist-info/top_level.txt

@@ -0,0 +1 @@
+itsdangerous

+ 25 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/DESCRIPTION.rst

@@ -0,0 +1,25 @@
+pip
+===
+
+The `PyPA recommended
+<https://python-packaging-user-guide.readthedocs.org/en/latest/current.html>`_
+tool for installing Python packages.
+
+* `Installation <https://pip.pypa.io/en/stable/installing.html>`_
+* `Documentation <https://pip.pypa.io/>`_
+* `Changelog <https://pip.pypa.io/en/stable/news.html>`_
+* `Github Page <https://github.com/pypa/pip>`_
+* `Issue Tracking <https://github.com/pypa/pip/issues>`_
+* `User mailing list <http://groups.google.com/group/python-virtualenv>`_
+* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_
+* User IRC: #pypa on Freenode.
+* Dev IRC: #pypa-dev on Freenode.
+
+
+.. image:: https://pypip.in/v/pip/badge.png
+        :target: https://pypi.python.org/pypi/pip
+
+.. image:: https://secure.travis-ci.org/pypa/pip.png?branch=develop
+   :target: http://travis-ci.org/pypa/pip
+
+

+ 53 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/METADATA

@@ -0,0 +1,53 @@
+Metadata-Version: 2.0
+Name: pip
+Version: 7.1.2
+Summary: The PyPA recommended tool for installing Python packages.
+Home-page: https://pip.pypa.io/
+Author: The pip developers
+Author-email: python-virtualenv@groups.google.com
+License: MIT
+Keywords: easy_install distutils setuptools egg virtualenv
+Platform: UNKNOWN
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Topic :: Software Development :: Build Tools
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.2
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: Implementation :: PyPy
+Provides-Extra: testing
+Requires-Dist: pytest; extra == 'testing'
+Requires-Dist: virtualenv (>=1.10); extra == 'testing'
+Requires-Dist: scripttest (>=1.3); extra == 'testing'
+Requires-Dist: mock; extra == 'testing'
+
+pip
+===
+
+The `PyPA recommended
+<https://python-packaging-user-guide.readthedocs.org/en/latest/current.html>`_
+tool for installing Python packages.
+
+* `Installation <https://pip.pypa.io/en/stable/installing.html>`_
+* `Documentation <https://pip.pypa.io/>`_
+* `Changelog <https://pip.pypa.io/en/stable/news.html>`_
+* `Github Page <https://github.com/pypa/pip>`_
+* `Issue Tracking <https://github.com/pypa/pip/issues>`_
+* `User mailing list <http://groups.google.com/group/python-virtualenv>`_
+* `Dev mailing list <http://groups.google.com/group/pypa-dev>`_
+* User IRC: #pypa on Freenode.
+* Dev IRC: #pypa-dev on Freenode.
+
+
+.. image:: https://pypip.in/v/pip/badge.png
+        :target: https://pypi.python.org/pypi/pip
+
+.. image:: https://secure.travis-ci.org/pypa/pip.png?branch=develop
+   :target: http://travis-ci.org/pypa/pip
+
+

+ 461 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/RECORD

@@ -0,0 +1,461 @@
+pip/__init__.py,sha256=m7PqiHbqrEDh1FABYrQPXy1nGzBgsw1NxFmXff46ERI,10414
+pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584
+pip/basecommand.py,sha256=ogTu4U-jtdAeOjtwaMpAlEdW_EBSUUtLG9WD8DmVatI,10617
+pip/baseparser.py,sha256=Nlc7Un9gat27xtB24SnKL_3pZZOoh62gNNRdS6tDRZY,10465
+pip/cmdoptions.py,sha256=KsnFWOkho2FORJxYw9TDV9Ys1q7BFyuepRJSo07JMM8,14701
+pip/download.py,sha256=N_YJKby1svBZyEHt3d3IYGhQW0oH6V8rGEnQ5rrdqmk,31936
+pip/exceptions.py,sha256=8hfyL6jntWp7aejEkDFMe4idF1IIFYiUHDBuVRRBOv0,1257
+pip/index.py,sha256=f6zG23oVm4c5gCh2xCf1rUzv22kJxWXDfj-Hd8MbhA4,47847
+pip/locations.py,sha256=6PSdEha93gk2WKjGz-AxBdxNncA1DVQL6TLDeJoTUuQ,6644
+pip/pep425tags.py,sha256=jdIwmMwubI6wT5pw7znJPMtdh9WkNUQiORw6nz987XM,4427
+pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156
+pip/wheel.py,sha256=f49Yp-dJHqSNRfWEmdzXwPcDKsFGoTbzyS9ALFgy6Q4,30186
+pip/_vendor/__init__.py,sha256=FyU1LxurgMgzgJiAvsaflSMMOgldJWKP-Pmh9jfSOJA,2508
+pip/_vendor/ipaddress.py,sha256=1atIh2cVrwtjr9_2rgSwZTQdLRMcdpYqwwZE1wQcQoI,79659
+pip/_vendor/re-vendor.py,sha256=PcdZ40d0ohMsdJmA4t0AeAWbPXi1tFsvAwA5KE5FGeY,773
+pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972
+pip/_vendor/six.py,sha256=zKxWCKje5Gpr06IIWNgQL8-8GJ9rwAulE0DnWBKzmhE,29664
+pip/_vendor/_markerlib/__init__.py,sha256=2hgtRuYDOrimZF9-ENCkrP8gnJ59HZEtlk-zoTEvn1Y,564
+pip/_vendor/_markerlib/markers.py,sha256=YuFp0-osufFIoqnzG3L0Z2fDCx4Vln3VUDeXJ2DA_1I,3979
+pip/_vendor/cachecontrol/__init__.py,sha256=D6mWmLQUEUGCs0pWDk7GtNmBHr_syFuPfijpWL84gN4,302
+pip/_vendor/cachecontrol/adapter.py,sha256=eizWWJwOnG5TToxL-XiEywPEf2k20--e-5C6u6wAEts,4196
+pip/_vendor/cachecontrol/cache.py,sha256=xtl-V-pr9KSt9VvFDRCB9yrHPEvqvbk-5M1vAInZb5k,790
+pip/_vendor/cachecontrol/compat.py,sha256=kjqVbQGXwDm8xsBFtnLtJ50VQoOAUzgLvv5bjxrzguM,311
+pip/_vendor/cachecontrol/controller.py,sha256=NyC69aG2oMqlex7zH_tihj0rLmWzSngj3vw7GzMs-tU,10124
+pip/_vendor/cachecontrol/filewrapper.py,sha256=jkC0GOorbWIpy9CzP3PwxSHx5J2CXBAxNbGH68HmP1M,2168
+pip/_vendor/cachecontrol/heuristics.py,sha256=2P6eQo5sn4v6Eu3VzF3k8SflNUuaTSiY4q_j2q3jPQc,4053
+pip/_vendor/cachecontrol/serialize.py,sha256=5EBnH_7HrVzBH80yDqcv3s2Y6ccvxasODW2ka3sBq2E,6189
+pip/_vendor/cachecontrol/wrapper.py,sha256=Kqyu_3TW_54XDudha4-HF21vyEOAJ4ZnRXFysTiLmXA,498
+pip/_vendor/cachecontrol/caches/__init__.py,sha256=uWnUtyMvHY_LULaL_4_IR1F_xPgK5zHfJyRnBq4DnPE,369
+pip/_vendor/cachecontrol/caches/file_cache.py,sha256=FsDug3bwUAQ3okjjfGzxlDaBf2fwVSn1iBKMTL6SyGU,3532
+pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=XywqxkS9MkCaflTOY_wjrE02neKdywB9YwlOBbP7Ywc,973
+pip/_vendor/colorama/__init__.py,sha256=MaGNdH3aaGqXbOrn-V50ojZcxPx_Lak3WkD943_gG3o,225
+pip/_vendor/colorama/ansi.py,sha256=QqrSoEXqVOpqsz0ChbSqxQ3pkRNbrmdpuiFRTX7bceY,2304
+pip/_vendor/colorama/ansitowin32.py,sha256=BfJ_7-Ya6odmjvOlpI32KUTY2K2QWIeFmhg5oNymVsM,9262
+pip/_vendor/colorama/initialise.py,sha256=CRwgjKdSGhivhfBITpB9vyQsVgc6TyCGvFqVymqxK90,1597
+pip/_vendor/colorama/win32.py,sha256=pm8dhPtaCDr8W2V7b0bVJUHuz0rVM896z6OfA-iRV24,5121
+pip/_vendor/colorama/winterm.py,sha256=6KM_v7qYo-_GM7ZECbPV8ZY8iLwP-5mFqJHCGGOp11g,5732
+pip/_vendor/distlib/__init__.py,sha256=iQfvezhw6UAHvpHnVjHTpgihkVdORxsaplTI85GdPgQ,581
+pip/_vendor/distlib/compat.py,sha256=cQpdOl3c94jOx7KQXvDzDjdDaqxzs0kdnGYe5lMspAc,40660
+pip/_vendor/distlib/database.py,sha256=ox41J2cwXW2J0bZvgaKf8dhm3DfuiCR4o1icl5qr9c4,49268
+pip/_vendor/distlib/index.py,sha256=qU38JCRI5F_1Z-QmydiocDE58d4KvHzKjS_T0dBsPlg,20976
+pip/_vendor/distlib/locators.py,sha256=J720MSOYW3l35af3f5IpsUYiz1EgmUxTtMZ0y1Bm3Nk,50347
+pip/_vendor/distlib/manifest.py,sha256=JF5EstaCOPnsW2tUdXCdjIFn-Zkf48Dqw0TcxKjuni0,13598
+pip/_vendor/distlib/markers.py,sha256=iRrVWwpyVwjkKJSX8NEQ92_MRMwpROcfNGKCD-Ch1QM,6282
+pip/_vendor/distlib/metadata.py,sha256=HvsxgUBUdrJe3pnRQCyCVtKrX5GIw0vjPEzvnmC5by0,38314
+pip/_vendor/distlib/resources.py,sha256=oQ58hJM6egiGve0P6UzdSTGvV5Bt8raW_Dla2LCrOWs,10615
+pip/_vendor/distlib/scripts.py,sha256=y5v9QEHw_Z1fLU-ETjU9z_-nQZlKBu49r0meUlj1qp4,14183
+pip/_vendor/distlib/t32.exe,sha256=rOJD6eDYk88TJ5lJtyt58El-nYNip4UvnYIDJ2y6QNs,89088
+pip/_vendor/distlib/t64.exe,sha256=qDBQu9uPHCVdBFM6ANg-Xp9nc5Wz_iFnSmsCTvdEQec,97792
+pip/_vendor/distlib/util.py,sha256=1t4_TO0LcXzgWUQHv4OZrRmD2P-JrxX0TutY08L_11w,51518
+pip/_vendor/distlib/version.py,sha256=GeYZxzA0k6zytPBOC5R6RQiUWRoIR2arUrpwUejRUWo,23711
+pip/_vendor/distlib/w32.exe,sha256=LrnXXqK-Yb1tzS0lxymvQPiMlkQZWAB0eHM5jnL0mAk,85504
+pip/_vendor/distlib/w64.exe,sha256=GbKq4oBmzHZXdcpaLupKLenmQD7_DXsYX8PDPWo_U3M,94208
+pip/_vendor/distlib/wheel.py,sha256=xvaNt5tYH1qI--jo7mG7EQgHM8Gb3CxdWD3bpksy0QM,39043
+pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
+pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
+pip/_vendor/distlib/_backport/shutil.py,sha256=AUi8718iRoJ9K26mRi-rywtt8Gx7ykvrvbUbZszjfYE,25650
+pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
+pip/_vendor/distlib/_backport/sysconfig.py,sha256=7WdYP0wbw8izH1eAEGNA-HXUyJrhzIAGK_LniUs4UNI,26958
+pip/_vendor/distlib/_backport/tarfile.py,sha256=bjyTNONZb-YEXrHFLExOSuagtSOoPaONP2UUoxwkAqE,92627
+pip/_vendor/html5lib/__init__.py,sha256=s_L7sZU8SZtyY1cYNdHuF0WcA21XDiojviES6MBjLC8,714
+pip/_vendor/html5lib/constants.py,sha256=B5LN2DMP-6lEp9wpON4ecX3Kx01n_cbMjuGd6AteixE,86873
+pip/_vendor/html5lib/html5parser.py,sha256=o9FOrhdLGYOtUhsueAsuXwduydagASvwxJ0lUpGYrYg,117347
+pip/_vendor/html5lib/ihatexml.py,sha256=MT12cVXAKaW-ALUkUeN175HpUP73xK8wAIpPzQ8cgfI,16581
+pip/_vendor/html5lib/inputstream.py,sha256=ss3wjtlObOVoVGWFsBztYdpnUqRaezyJ0sTXfdb4Ly4,31665
+pip/_vendor/html5lib/sanitizer.py,sha256=GR-Qc1OKabhKH7Z220fw2kaevU1jLpzBV3ii3a2M900,17660
+pip/_vendor/html5lib/tokenizer.py,sha256=6Uf8sDUkvNn661bcBSBYUCTfXzSs9EyCTiPcj5PAjYI,76929
+pip/_vendor/html5lib/utils.py,sha256=PSVv1ig9oAZa-DU16DT4cbbggnG7K3qQIkPm6uJKxFg,3267
+pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/html5lib/filters/_base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286
+pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=fpRLbz6TCe5yXEkGmyMlJ80FekWsTR-sHk3Ano0U9LQ,624
+pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=xllv1I7unxhcyZTf3LTsv30wh2mAkT7wmTZx7zIhpuY,2746
+pip/_vendor/html5lib/filters/lint.py,sha256=8eJo0SXDcY40OhsNd0Cft36kUXCZ5t-30mNFSUf4LnE,4208
+pip/_vendor/html5lib/filters/optionaltags.py,sha256=4ozLwBgMRaxe7iqxefLQpDhp3irK7YHo9LgSGsvZYMw,10500
+pip/_vendor/html5lib/filters/sanitizer.py,sha256=MvGUs_v2taWPgGhjxswRSUiHfxrqMUhsNPz-eSeUYUQ,352
+pip/_vendor/html5lib/filters/whitespace.py,sha256=LbOUcC0zQ9z703KNZrArOr0kVBO7OMXjKjucDW32LU4,1142
+pip/_vendor/html5lib/serializer/__init__.py,sha256=xFXFP-inaTNlbnau5c5DGrH_O8yPm-C6HWbJxpiSqFE,490
+pip/_vendor/html5lib/serializer/htmlserializer.py,sha256=G-aVHmlR7uMR011jO0ev7sZvkVHpLr3OrLSYMZ7liVs,12855
+pip/_vendor/html5lib/treeadapters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/html5lib/treeadapters/sax.py,sha256=3of4vvaUYIAic7pngebwJV24hpOS7Zg9ggJa_WQegy4,1661
+pip/_vendor/html5lib/treebuilders/__init__.py,sha256=Xz4X6B5DA1R-5GyRa44j0sJwfl6dUNyb0NBu9-7sK3U,3405
+pip/_vendor/html5lib/treebuilders/_base.py,sha256=Xf0FZVcVwIQS6tEseJdj5wKbYucbNCnbAsnsG4lONis,13711
+pip/_vendor/html5lib/treebuilders/dom.py,sha256=jvmtvnERtpxXpHvBgiq1FpzAUYAAzoolOTx_DoXwGEI,8469
+pip/_vendor/html5lib/treebuilders/etree.py,sha256=etbO6yQlyV46rWlj9mSyVqQOWrgoHgyJ01Tut4lWZkk,12621
+pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=z3Bnfm2MstEEb_lbaAeicl5l-ab6MSQa5Q1ZZreK7Pc,14031
+pip/_vendor/html5lib/treewalkers/__init__.py,sha256=m2-4a5P4dMNlQb26MNIhgj69p6ms1i-JD2HPDr7iTfw,5766
+pip/_vendor/html5lib/treewalkers/_base.py,sha256=9nXtXtgubdWKFlKxhVzWarE0Hiv3T4VC7_Wt9ulVzB0,7014
+pip/_vendor/html5lib/treewalkers/dom.py,sha256=Lb63Nuz8HtgvkuuvSmU5LOyUkEtstH5saPPAg5xN4r8,1421
+pip/_vendor/html5lib/treewalkers/etree.py,sha256=dO_3d81tdFtjTTR4cCBMh5kB_BNNvqdT7GxrCBJ7IFM,4590
+pip/_vendor/html5lib/treewalkers/genshistream.py,sha256=IbBFrlgi-59-K7P1zm0d7ZFIknBN4c5E57PHJDkx39s,2278
+pip/_vendor/html5lib/treewalkers/lxmletree.py,sha256=am6t_JHh_Fpm10CaW-zDaGGsDwTPK3Pas7TRBezFs4w,5992
+pip/_vendor/html5lib/treewalkers/pulldom.py,sha256=9W6i8yWtUzayV6EwX-okVacttHaqpQZwdBCc2S3XeQ4,2302
+pip/_vendor/html5lib/trie/__init__.py,sha256=mec5zyJ5wIKRM8819gIcIsYQwncg91rEmPwGH1dG3Ho,212
+pip/_vendor/html5lib/trie/_base.py,sha256=WGY8SGptFmx4O0aKLJ54zrIQOoyuvhS0ngA36vAcIcc,927
+pip/_vendor/html5lib/trie/datrie.py,sha256=EQpqSfkZRuTbE-DuhW7xMdVDxdZNZ0CfmnYfHA_3zxM,1178
+pip/_vendor/html5lib/trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775
+pip/_vendor/lockfile/__init__.py,sha256=tRsliCGxIGRfQ_OsAEoA70UatRbdIiVh0OZrYL_xIn0,9162
+pip/_vendor/lockfile/linklockfile.py,sha256=pLUQvtn6BLNfzEy5Vd_SH86mx5hO3XpVDu1xAdSeRaw,2649
+pip/_vendor/lockfile/mkdirlockfile.py,sha256=D5msAvl2kXtvGh5dhHf39eah-7KiZRsAPArRfWBAYm8,3098
+pip/_vendor/lockfile/pidlockfile.py,sha256=frwO5ouxdegU7mgFS-lOvUKIbq8STLc41hHWDiG9yAk,6221
+pip/_vendor/lockfile/sqlitelockfile.py,sha256=DXtqjbp4qFzbaAP6r8AqiCH7WBwl3NzfLwM1srC1ObM,5540
+pip/_vendor/lockfile/symlinklockfile.py,sha256=O1l2YuZs1RQuWGHRx4dyQ2Jkb4NSzKAxipS3vN5Chtw,2613
+pip/_vendor/packaging/__about__.py,sha256=YzdrW-1lWmyCBDyrcNkZbJo4tiDWXpoiqPjfyCYMzIE,1073
+pip/_vendor/packaging/__init__.py,sha256=2V8n-eEpSgBuXlV8hlMmhU7ZklpsrrusWMZNp2gC4Hs,906
+pip/_vendor/packaging/_compat.py,sha256=wofog8iYo_zudt_10i6JiXKHDs5GhCuXC09hCuSJiv4,1253
+pip/_vendor/packaging/_structures.py,sha256=93YvgrEE2HgFp8AdXy0pwCRVnZeutRHO_-puJ7T0cPw,1809
+pip/_vendor/packaging/specifiers.py,sha256=UV9T01_kKloA8PSeMI3HTYBSJ_4KLs00yLvrlciZ3yU,28079
+pip/_vendor/packaging/version.py,sha256=dEGrWZJZ6sef1xMxSfDCego2hS3Q86by0hUIFVk-AGc,11949
+pip/_vendor/pkg_resources/__init__.py,sha256=XxyhKV_SdPpFlALmKOe4g4jkrgn6rWx21THQP4-nZQI,106466
+pip/_vendor/progress/__init__.py,sha256=Wn1074LUDZovd4zfoVYojnPBgOc6ctHbQX7rp_p8lRA,3023
+pip/_vendor/progress/bar.py,sha256=bH72DehBggOGvZsv4839W2p-981_AOSC8e7ZihxQ3gU,2707
+pip/_vendor/progress/counter.py,sha256=UZcnk68HkBOCRjz25W8MmHY-ev2BGn-sYWsR-iSfQds,1529
+pip/_vendor/progress/helpers.py,sha256=aPoo84DShYXFe2Aqwm9Wj-gt3Lj9BJY9-bwC4C2c2II,2894
+pip/_vendor/progress/spinner.py,sha256=LRVxxpM1-EZ5JJe-yYriiHWPTYZnaYUxsbxyFWQVOQc,1341
+pip/_vendor/requests/__init__.py,sha256=3TJHLc9RZ_3QCQYiP0r9lshzLMIdLm1gn0wVwNqaZmg,1861
+pip/_vendor/requests/adapters.py,sha256=MEedP-slXqxDnYdi4JxuCE3y2NTizShvL_SfP3ncge4,16810
+pip/_vendor/requests/api.py,sha256=X8Znh82s62_3FBPFkqafNQn7s4L-5q8-GLclLKKHs3I,5415
+pip/_vendor/requests/auth.py,sha256=pD2td_DoHl-YWQQITk5ZPSKjsR5_sBxRkseYMx1n704,6794
+pip/_vendor/requests/cacert.pem,sha256=ak7q_q8ozHdQ9ff27U-E1vCNrLisFRQSMy9zJkdpQlM,308434
+pip/_vendor/requests/certs.py,sha256=RX5H1cSiB52Hbjh_qv3eMW8hqHEF_r4Qiv_4AwfziuU,613
+pip/_vendor/requests/compat.py,sha256=hq7CKHoykNs8yzKPAJiOkHQJPoNp9A89MufTdhlCniY,1469
+pip/_vendor/requests/cookies.py,sha256=Um1lXApUUO0-P6U1TOudXu1cYVXBFN77f16OFxiIAt8,17191
+pip/_vendor/requests/exceptions.py,sha256=zZhHieXgR1teqbvuo_9OrwDMHnrvRtulW97VfzumQv4,2517
+pip/_vendor/requests/hooks.py,sha256=9vNiuiRHRd5Qy6BX_0p1H3NsUzDo1M_HaFR2AFL41Tg,820
+pip/_vendor/requests/models.py,sha256=_vxB3Z3QDkV_HYcZFqFrPq31TmworMeYEM094Q9Vf00,29176
+pip/_vendor/requests/sessions.py,sha256=kWfupbWN_Uhu-FKPEc7y1NyvyQNE5jryuA5Uad2Yy0U,24250
+pip/_vendor/requests/status_codes.py,sha256=DVA33t4UthIiZhP4iYSChbWjuhrJWvVA04qle9nwj2Q,3200
+pip/_vendor/requests/structures.py,sha256=i3yMaaDbl4_gNJKdcK3kDmeSLoo0r59XEIWoc_qtNyo,2977
+pip/_vendor/requests/utils.py,sha256=yh5am9D9nG_oAYyYU2upkXIth2QOi1LJhiHSx5dSVbw,21334
+pip/_vendor/requests/packages/__init__.py,sha256=aXkbNCjM_WhryRBocE4AaA_p7-CTxL5LOutY7XzKm4s,62
+pip/_vendor/requests/packages/chardet/__init__.py,sha256=XuTKCYOR7JwsoHxqZTYH86LVyMDbDI3s1s0W_qoGEBM,1295
+pip/_vendor/requests/packages/chardet/big5freq.py,sha256=D8oTdz-GM7Jg8TsaWJDm65vM_OLHC3xub6qUJ3rOgsQ,82594
+pip/_vendor/requests/packages/chardet/big5prober.py,sha256=XX96C--6WKYW36mL-z7pJSAtc169Z8ZImByCP4pEN9A,1684
+pip/_vendor/requests/packages/chardet/chardetect.py,sha256=f4299UZG6uWd3i3r_N0OdrFj2sA9JFI54PAmDLAFmWA,2504
+pip/_vendor/requests/packages/chardet/chardistribution.py,sha256=cUARQFr1oTLXeJCDQrDRkUP778AvSMzhSCnG8VLCV58,9226
+pip/_vendor/requests/packages/chardet/charsetgroupprober.py,sha256=0lKk7VE516fgMw119tNefFqLOxKfIE9WfdkpIT69OKU,3791
+pip/_vendor/requests/packages/chardet/charsetprober.py,sha256=Z48o2KiOj23FNqYH8FqzhH5m1qdm3rI8DcTm2Yqtklg,1902
+pip/_vendor/requests/packages/chardet/codingstatemachine.py,sha256=E85rYhHVMw9xDEJVgiQhp0OnLGr6i2r8_7QOWMKTH08,2318
+pip/_vendor/requests/packages/chardet/compat.py,sha256=5mm6yrHwef1JEG5OxkPJlSq5lkjLVpEGh3iPgFBkpkM,1157
+pip/_vendor/requests/packages/chardet/constants.py,sha256=-UnY8U7EP7z9fTyd09yq35BEkSFEAUAiv9ohd1DW1s4,1335
+pip/_vendor/requests/packages/chardet/cp949prober.py,sha256=FMvdLyB7fejPXRsTbca7LK1P3RUvvssmjUNyaEfz8zY,1782
+pip/_vendor/requests/packages/chardet/escprober.py,sha256=q5TcQKeVq31WxrW7Sv8yjpZkjEoaHO8S92EJZ9hodys,3187
+pip/_vendor/requests/packages/chardet/escsm.py,sha256=7iljEKN8lXTh8JFXPUSwlibMno6R6ksq4evLxbkzfro,7839
+pip/_vendor/requests/packages/chardet/eucjpprober.py,sha256=5IpfSEjAb7h3hcGMd6dkU80O900C2N6xku28rdYFKuc,3678
+pip/_vendor/requests/packages/chardet/euckrfreq.py,sha256=T5saK5mImySG5ygQPtsp6o2uKulouCwYm2ElOyFkJqU,45978
+pip/_vendor/requests/packages/chardet/euckrprober.py,sha256=Wo7dnZ5Erw_nB4H-m5alMiOxOuJUmGHlwCSaGqExDZA,1675
+pip/_vendor/requests/packages/chardet/euctwfreq.py,sha256=G_I0BW9i1w0ONeeUwIYqV7_U09buIHdqh-wNHVaql7I,34872
+pip/_vendor/requests/packages/chardet/euctwprober.py,sha256=upS2P6GuT5ujOxXYw-RJLcT7A4PTuo27KGUKU4UZpIQ,1676
+pip/_vendor/requests/packages/chardet/gb2312freq.py,sha256=M2gFdo_qQ_BslStEchrPW5CrPEZEacC0uyDLw4ok-kY,36011
+pip/_vendor/requests/packages/chardet/gb2312prober.py,sha256=VWnjoRa83Y6V6oczMaxyUr0uy48iCnC2nzk9zfEIRHc,1681
+pip/_vendor/requests/packages/chardet/hebrewprober.py,sha256=8pdoUfsVXf_L4BnJde_BewS6H2yInV5688eu0nFhLHY,13359
+pip/_vendor/requests/packages/chardet/jisfreq.py,sha256=ZcL4R5ekHHbP2KCYGakVMBsiKqZZZAABzhwi-uRkOps,47315
+pip/_vendor/requests/packages/chardet/jpcntx.py,sha256=yftmp0QaF6RJO5SJs8I7LU5AF4rwP23ebeCQL4BM1OY,19348
+pip/_vendor/requests/packages/chardet/langbulgarianmodel.py,sha256=ZyPsA796MSVhYdfWhMCgKWckupAKAnKqWcE3Cl3ej6o,12784
+pip/_vendor/requests/packages/chardet/langcyrillicmodel.py,sha256=fkcd5OvogUp-GrNDWAZPgkYsSRCD2omotAEvqjlmLKE,17725
+pip/_vendor/requests/packages/chardet/langgreekmodel.py,sha256=QHMy31CH_ot67UCtmurCEKqKx2WwoaKrw2YCYYBK2Lw,12628
+pip/_vendor/requests/packages/chardet/langhebrewmodel.py,sha256=4ASl5vzKJPng4H278VHKtRYC03TpQpenlHTcsmZH1rE,11318
+pip/_vendor/requests/packages/chardet/langhungarianmodel.py,sha256=SXwuUzh49_cBeMXhshRHdrhlkz0T8_pZWV_pdqBKNFk,12536
+pip/_vendor/requests/packages/chardet/langthaimodel.py,sha256=-k7djh3dGKngAGnt3WfuoJN7acDcWcmHAPojhaUd7q4,11275
+pip/_vendor/requests/packages/chardet/latin1prober.py,sha256=238JHOxH8aRudJY2NmeSv5s7i0Qe3GuklIU3HlYybvg,5232
+pip/_vendor/requests/packages/chardet/mbcharsetprober.py,sha256=9rOCjDVsmSMp6e7q2syqak22j7lrbUZhJhMee2gbVL0,3268
+pip/_vendor/requests/packages/chardet/mbcsgroupprober.py,sha256=SHRzNPLpDXfMJLA8phCHVU0WgqbgDCNxDQMolGX_7yk,1967
+pip/_vendor/requests/packages/chardet/mbcssm.py,sha256=IKwJXyxu34n6NojmxVxC60MLFtJKm-hIfxaFEnb3uBA,19590
+pip/_vendor/requests/packages/chardet/sbcharsetprober.py,sha256=Xq0lODqJnDgxglBiQI4BqTFiPbn63-0a5XNA5-hVu7U,4793
+pip/_vendor/requests/packages/chardet/sbcsgroupprober.py,sha256=8hLyH8RAG-aohBo7o_KciWVgRo42ZE_zEtuNG1JMRYI,3291
+pip/_vendor/requests/packages/chardet/sjisprober.py,sha256=UYOmiMDzttYIkSDoOB08UEagivJpUXz4tuWiWzTiOr8,3764
+pip/_vendor/requests/packages/chardet/universaldetector.py,sha256=h-E2x6XSCzlNjycYWG0Fe4Cf1SGdaIzUNu2HCphpMZA,6840
+pip/_vendor/requests/packages/chardet/utf8prober.py,sha256=7tdNZGrJY7jZUBD483GGMkiP0Tx8Fp-cGvWHoAsilHg,2652
+pip/_vendor/requests/packages/urllib3/__init__.py,sha256=WdpCLx5juBoMArBMbVT125BD8SvK-Id9Js57PTnpfSs,2055
+pip/_vendor/requests/packages/urllib3/_collections.py,sha256=tatXHmN5An3gw-alS68qs3yO_vweJ8eFTUmwMXvlKpQ,10454
+pip/_vendor/requests/packages/urllib3/connection.py,sha256=uqwsV351AZqV008fb7OscFbyZt6dRicjQQrUA0K01iw,9011
+pip/_vendor/requests/packages/urllib3/connectionpool.py,sha256=kGlpxwc2lRax0J7f2tFVWjXefCIdIWyLHqHDa6amN7A,30319
+pip/_vendor/requests/packages/urllib3/exceptions.py,sha256=za-cEwBqxBKOqqKTaIVAMdH3j1nDRqi-MtdojdpU4Wc,4374
+pip/_vendor/requests/packages/urllib3/fields.py,sha256=06XgBjTvEyVYUWA-j_6zhnfXMpd-IxZdzT85ppAFfYg,5833
+pip/_vendor/requests/packages/urllib3/filepost.py,sha256=TEpQ_PMO0loPQERLr4E7VcgbMfhNwOCxt8cudhrpkM0,2281
+pip/_vendor/requests/packages/urllib3/poolmanager.py,sha256=Qw1UStRXPh6RH5BOT7x4NY7Gqkho2njYEDiqk9_8728,9406
+pip/_vendor/requests/packages/urllib3/request.py,sha256=NjnLVqcKZVotmPV335m87AqMFBSH0V_ml2tOGxKSKRI,5751
+pip/_vendor/requests/packages/urllib3/response.py,sha256=t4Pl6JVQ9jYXbriu0cQf5dRuRSpxSP9YltgMu35JFPo,16459
+pip/_vendor/requests/packages/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.py,sha256=F29BjpIMId2u9Bwmy0bmg8eDYKvQZiXLsZeK2cDNctQ,4507
+pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py,sha256=ooNMMfp9cEirGyLRcIE-f-qidQPdDp2kBl-K5ALOJyU,9326
+pip/_vendor/requests/packages/urllib3/packages/__init__.py,sha256=EKCTAOjZtPR_HC50e7X8hS5j4bkFkN87XZOT-Wdpfus,74
+pip/_vendor/requests/packages/urllib3/packages/ordered_dict.py,sha256=VQaPONfhVMsb8B63Xg7ZOydJqIE_jzeMhVN3Pec6ogw,8935
+pip/_vendor/requests/packages/urllib3/packages/six.py,sha256=U-rO-WBrFS8PxHeamSl6okKCjqPF18NhiZb0qPZ67XM,11628
+pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py,sha256=cOWMIn1orgJoA35p6pSzO_-Dc6iOX9Dhl6D2sL9b_2o,460
+pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=fK28k37hL7-D79v9iM2fHgNK9Q1Pw0M7qVRL4rkfFjQ,3778
+pip/_vendor/requests/packages/urllib3/util/__init__.py,sha256=zrB1BFTNOUWxgEVvZlicK8uEU2AVpT1TFmWo2gQGfDA,486
+pip/_vendor/requests/packages/urllib3/util/connection.py,sha256=PTxckPfstrFVAPAfYn12kaZYEfoQn-CDCo2VrIBPtpo,3293
+pip/_vendor/requests/packages/urllib3/util/request.py,sha256=zY2x5tBXzvgLWgF3XRk_CEk-X8Q8L9bqpESqWn13I_0,2089
+pip/_vendor/requests/packages/urllib3/util/response.py,sha256=QMrOy69WPkoe42EU0Y5jwRNqBf-w1FF8GJWAx1jQDmY,566
+pip/_vendor/requests/packages/urllib3/util/retry.py,sha256=bcRb3QC1LDMHLx8gBDDGFqZ3dKs6bYcpfWZJ3FpOMtE,9924
+pip/_vendor/requests/packages/urllib3/util/ssl_.py,sha256=X4mp-FA-Jact7Z3sEzxenQ69Jnwlz2-G7gjJXL1CMdw,10037
+pip/_vendor/requests/packages/urllib3/util/timeout.py,sha256=2MqJVD_v_0tLxgm2Mr_ePqYmfnB5zjZXphlIexWocKM,9544
+pip/_vendor/requests/packages/urllib3/util/url.py,sha256=uPcy_lCMdmQhdofnemWyjYjHsPJaZ0VisANa2vaCr3g,5836
+pip/commands/__init__.py,sha256=7nn4pesggkyBI_yFJJkNhqRsbhX4hft582u2G7QTOaI,2007
+pip/commands/completion.py,sha256=7JkLif3DF0QGpjMaUjHvF8knJ3IumcED2gWRvMRpFy0,1991
+pip/commands/freeze.py,sha256=_wHnuHYXC4V0zBLD7LfDhgI_bWL6KdcCgzzQ9bXwDkU,2330
+pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982
+pip/commands/install.py,sha256=PIHF8rHRTYFeeSfYeWjKA93ewIo2PQd5eO6-WkHb2gs,14659
+pip/commands/list.py,sha256=ixgoudq7aPf1KVQO2BayACsb-EfF_8BxAnDF83gH_m4,7251
+pip/commands/search.py,sha256=zOrBophzW_s_gHMFAqibWz-jlLu-lPh08NIbR-8fDOU,4604
+pip/commands/show.py,sha256=5B7ky8SBNLy5mFMyXf2ethe6zQQ_Plr2h8GDWNaeEk8,4974
+pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884
+pip/commands/wheel.py,sha256=WCafM_PGHxi3AQbifEBJ72wVsA32EjLu0_ATPv0iGJs,7442
+pip/compat/__init__.py,sha256=NmhcnJOm7-4SXCJBntzgxQLSi4tep4qP-ltm8AL8jT8,3402
+pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096
+pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71
+pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487
+pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
+pip/operations/freeze.py,sha256=em8Vw1C2hZ9aVfSGyIVEEQp5KKzRByw4twFWvbEN5Es,3984
+pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276
+pip/req/req_file.py,sha256=UYkZHztKkDjANbjm8G5InoMZgiW_5R3cxPpuieFz5RQ,9670
+pip/req/req_install.py,sha256=L0gocWgopzIJ3qmXaD9_ECP0UQPXDMFKfnFIp1_rkf0,43096
+pip/req/req_set.py,sha256=ZTemgzstHr8wgmh2ZYk-rQnM99vRe0cN1zK-4KdK3Nc,27444
+pip/req/req_uninstall.py,sha256=r6p9H6EIJheSbi9inIV8iCWerwmVFBLpAU4rM30wa84,7135
+pip/utils/__init__.py,sha256=ws5g9s8zaq2GyXhGZqZ43OM9sZdG49q9ZuGbWslxpjY,26814
+pip/utils/appdirs.py,sha256=DsdEjHfkFGNJ7NmDR_SWQ0PzOXZdyqDzHVN673mLq9o,9173
+pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312
+pip/utils/deprecation.py,sha256=PtwQegNjDSmeT_m65pdcvnKEktTpb5AJvey6l3P6mQI,2152
+pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899
+pip/utils/logging.py,sha256=b3wtkeCjcA2YOuAVrPYDFhTL77Hb6LeNMCoSD_Ql1fA,3326
+pip/utils/outdated.py,sha256=sXrNapTD6NXdM9J3I11idYyDm7eXzobbGzb_URPrxpY,5555
+pip/utils/ui.py,sha256=gu7HI4bZ0EeloxhA-CL2ngIVXaN7_-T1erwNOR8M-sM,6774
+pip/vcs/__init__.py,sha256=n_2XKQyjdzLyOJt2pWtlsqXTynp2rI48iN4s_cUJMkI,11760
+pip/vcs/bazaar.py,sha256=ecV1QSNSwEeyFx4H_xPBc2NgCGrhUymVoLszaV5rnUo,4427
+pip/vcs/git.py,sha256=ZlgvRMiionvjFMC1tBpNcPTWOtx1ffRI_e_UTTulu7M,7608
+pip/vcs/mercurial.py,sha256=apGLpsxPSzQkxRxcKINy_C0whu09rkyrbMIfpPfenuo,4974
+pip/vcs/subversion.py,sha256=GvWew87FkTjQQZVnqqUURxPpXufJPfZBmLxXmIHq8SI,10468
+pip-7.1.2.dist-info/DESCRIPTION.rst,sha256=j7Go_0V5jh4mpRZT2o6GGv7Y7bg-DTE6z7Eh64RTcpw,829
+pip-7.1.2.dist-info/entry_points.txt,sha256=1-e4WB_Fe8mWHrMi1YQo_s5knbh0lu_uRmd8Wb6MJfY,68
+pip-7.1.2.dist-info/METADATA,sha256=WKp4C4pXRDMfb5mPhvFcz2gxvcs9g4jWeFFVdO9zmi4,1981
+pip-7.1.2.dist-info/metadata.json,sha256=m-R1CYUzmjj2tKdOtkEgCi6A74oiAUasrtVmpc0g89U,1491
+pip-7.1.2.dist-info/pbr.json,sha256=T9ZxpJEDJEZSD8LQIg_YHJkZSDL0mkKU8sE-cBzvkMc,46
+pip-7.1.2.dist-info/RECORD,,
+pip-7.1.2.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
+pip-7.1.2.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
+/Users/nanak/dev/dptapedbgui/venv/bin/pip,sha256=oF6QFv26-XYiuzNkbHGpd_Yb7SZ6jRVfNMuxpiW7aMc,233
+/Users/nanak/dev/dptapedbgui/venv/bin/pip2,sha256=oF6QFv26-XYiuzNkbHGpd_Yb7SZ6jRVfNMuxpiW7aMc,233
+/Users/nanak/dev/dptapedbgui/venv/bin/pip2.7,sha256=oF6QFv26-XYiuzNkbHGpd_Yb7SZ6jRVfNMuxpiW7aMc,233
+pip/_vendor/requests/compat.pyc,,
+pip/_vendor/cachecontrol/cache.pyc,,
+pip/_vendor/requests/certs.pyc,,
+pip/_vendor/distlib/util.pyc,,
+pip/_vendor/progress/spinner.pyc,,
+pip/__init__.pyc,,
+pip/_vendor/html5lib/treewalkers/genshistream.pyc,,
+pip/_vendor/requests/packages/chardet/latin1prober.pyc,,
+pip/_vendor/distlib/_backport/__init__.pyc,,
+pip/operations/__init__.pyc,,
+pip/_vendor/distlib/metadata.pyc,,
+pip/compat/__init__.pyc,,
+pip/_vendor/html5lib/treebuilders/__init__.pyc,,
+pip/compat/dictconfig.pyc,,
+pip/_vendor/html5lib/filters/alphabeticalattributes.pyc,,
+pip/_vendor/requests/packages/urllib3/request.pyc,,
+pip/_vendor/requests/packages/chardet/escprober.pyc,,
+pip/_vendor/requests/adapters.pyc,,
+pip/_vendor/requests/packages/chardet/langbulgarianmodel.pyc,,
+pip/commands/completion.pyc,,
+pip/_vendor/html5lib/html5parser.pyc,,
+pip/_vendor/html5lib/filters/lint.pyc,,
+pip/pep425tags.pyc,,
+pip/_vendor/packaging/__init__.pyc,,
+pip/_vendor/lockfile/linklockfile.pyc,,
+pip/_vendor/requests/packages/chardet/__init__.pyc,,
+pip/_vendor/html5lib/filters/whitespace.pyc,,
+pip/_vendor/requests/packages/chardet/euckrprober.pyc,,
+pip/_vendor/html5lib/treebuilders/etree_lxml.pyc,,
+pip/_vendor/distlib/database.pyc,,
+pip/_vendor/html5lib/treeadapters/sax.pyc,,
+pip/_vendor/requests/packages/chardet/euckrfreq.pyc,,
+pip/utils/__init__.pyc,,
+pip/_vendor/requests/auth.pyc,,
+pip/_vendor/html5lib/treewalkers/pulldom.pyc,,
+pip/_vendor/packaging/__about__.pyc,,
+pip/_vendor/progress/__init__.pyc,,
+pip/_vendor/html5lib/ihatexml.pyc,,
+pip/_vendor/html5lib/treewalkers/_base.pyc,,
+pip/commands/list.pyc,,
+pip/_vendor/distlib/scripts.pyc,,
+pip/_vendor/html5lib/filters/sanitizer.pyc,,
+pip/vcs/git.pyc,,
+pip/cmdoptions.pyc,,
+pip/_vendor/lockfile/symlinklockfile.pyc,,
+pip/_vendor/requests/packages/urllib3/util/ssl_.pyc,,
+pip/_vendor/requests/packages/urllib3/poolmanager.pyc,,
+pip/req/req_set.pyc,,
+pip/_vendor/html5lib/inputstream.pyc,,
+pip/req/__init__.pyc,,
+pip/_vendor/html5lib/sanitizer.pyc,,
+pip/_vendor/colorama/win32.pyc,,
+pip/_vendor/distlib/resources.pyc,,
+pip/_vendor/requests/packages/chardet/hebrewprober.pyc,,
+pip/_vendor/requests/cookies.pyc,,
+pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.pyc,,
+pip/_vendor/lockfile/__init__.pyc,,
+pip/_vendor/requests/packages/urllib3/packages/six.pyc,,
+pip/_vendor/progress/helpers.pyc,,
+pip/_vendor/html5lib/trie/__init__.pyc,,
+pip/vcs/bazaar.pyc,,
+pip/_vendor/html5lib/__init__.pyc,,
+pip/_vendor/requests/packages/chardet/charsetgroupprober.pyc,,
+pip/_vendor/cachecontrol/serialize.pyc,,
+pip/_vendor/requests/packages/urllib3/packages/__init__.pyc,,
+pip/_vendor/distlib/_backport/misc.pyc,,
+pip/_vendor/requests/packages/chardet/codingstatemachine.pyc,,
+pip/_vendor/requests/packages/urllib3/util/url.pyc,,
+pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/__init__.pyc,,
+pip/_vendor/cachecontrol/controller.pyc,,
+pip/utils/deprecation.pyc,,
+pip/_vendor/distlib/_backport/sysconfig.pyc,,
+pip/_vendor/requests/packages/chardet/langgreekmodel.pyc,,
+pip/_vendor/requests/packages/__init__.pyc,,
+pip/commands/show.pyc,,
+pip/_vendor/requests/packages/urllib3/util/__init__.pyc,,
+pip/_vendor/_markerlib/__init__.pyc,,
+pip/_vendor/requests/packages/chardet/jpcntx.pyc,,
+pip/_vendor/colorama/initialise.pyc,,
+pip/_vendor/html5lib/treebuilders/_base.pyc,,
+pip/commands/help.pyc,,
+pip/_vendor/requests/utils.pyc,,
+pip/_vendor/colorama/__init__.pyc,,
+pip/_vendor/packaging/_compat.pyc,,
+pip/_vendor/distlib/version.pyc,,
+pip/utils/ui.pyc,,
+pip/commands/uninstall.pyc,,
+pip/_vendor/distlib/index.pyc,,
+pip/_vendor/cachecontrol/heuristics.pyc,,
+pip/_vendor/requests/sessions.pyc,,
+pip/_vendor/html5lib/treewalkers/__init__.pyc,,
+pip/_vendor/requests/packages/chardet/big5prober.pyc,,
+pip/_vendor/requests/packages/chardet/langthaimodel.pyc,,
+pip/_vendor/distlib/_backport/tarfile.pyc,,
+pip/_vendor/requests/packages/urllib3/util/response.pyc,,
+pip/_vendor/requests/packages/chardet/mbcsgroupprober.pyc,,
+pip/_vendor/html5lib/filters/__init__.pyc,,
+pip/baseparser.pyc,,
+pip/status_codes.pyc,,
+pip/_vendor/distlib/__init__.pyc,,
+pip/_vendor/pkg_resources/__init__.pyc,,
+pip/commands/search.pyc,,
+pip/_vendor/requests/packages/chardet/langhungarianmodel.pyc,,
+pip/_vendor/html5lib/utils.pyc,,
+pip/_vendor/html5lib/trie/datrie.pyc,,
+pip/_vendor/requests/structures.pyc,,
+pip/_vendor/packaging/version.pyc,,
+pip/_vendor/cachecontrol/adapter.pyc,,
+pip/_vendor/requests/packages/urllib3/fields.pyc,,
+pip/_vendor/requests/packages/urllib3/contrib/ntlmpool.pyc,,
+pip/vcs/__init__.pyc,,
+pip/req/req_file.pyc,,
+pip/models/__init__.pyc,,
+pip/_vendor/html5lib/constants.pyc,,
+pip/_vendor/html5lib/trie/py.pyc,,
+pip/_vendor/packaging/specifiers.pyc,,
+pip/_vendor/requests/packages/chardet/gb2312freq.pyc,,
+pip/download.pyc,,
+pip/commands/__init__.pyc,,
+pip/_vendor/requests/packages/urllib3/__init__.pyc,,
+pip/_vendor/distlib/compat.pyc,,
+pip/models/index.pyc,,
+pip/_vendor/lockfile/mkdirlockfile.pyc,,
+pip/req/req_uninstall.pyc,,
+pip/_vendor/distlib/wheel.pyc,,
+pip/_vendor/requests/packages/chardet/euctwprober.pyc,,
+pip/utils/build.pyc,,
+pip/_vendor/requests/packages/chardet/escsm.pyc,,
+pip/_vendor/requests/status_codes.pyc,,
+pip/_vendor/requests/exceptions.pyc,,
+pip/_vendor/distlib/markers.pyc,,
+pip/index.pyc,,
+pip/utils/logging.pyc,,
+pip/_vendor/requests/packages/chardet/cp949prober.pyc,,
+pip/utils/outdated.pyc,,
+pip/_vendor/requests/api.pyc,,
+pip/_vendor/requests/packages/urllib3/filepost.pyc,,
+pip/_vendor/requests/packages/chardet/big5freq.pyc,,
+pip/_vendor/html5lib/treebuilders/etree.pyc,,
+pip/_vendor/cachecontrol/caches/file_cache.pyc,,
+pip/_vendor/html5lib/treebuilders/dom.pyc,,
+pip/_vendor/requests/packages/chardet/mbcssm.pyc,,
+pip/_vendor/distlib/_backport/shutil.pyc,,
+pip/_vendor/requests/packages/chardet/sbcsgroupprober.pyc,,
+pip/__main__.pyc,,
+pip/_vendor/cachecontrol/compat.pyc,,
+pip/_vendor/html5lib/tokenizer.pyc,,
+pip/operations/freeze.pyc,,
+pip/_vendor/requests/models.pyc,,
+pip/_vendor/lockfile/sqlitelockfile.pyc,,
+pip/_vendor/requests/packages/chardet/utf8prober.pyc,,
+pip/_vendor/requests/packages/chardet/langhebrewmodel.pyc,,
+pip/_vendor/requests/packages/chardet/compat.pyc,,
+pip/_vendor/progress/counter.pyc,,
+pip/commands/install.pyc,,
+pip/utils/filesystem.pyc,,
+pip/_vendor/six.pyc,,
+pip/_vendor/requests/packages/chardet/langcyrillicmodel.pyc,,
+pip/_vendor/requests/packages/chardet/constants.pyc,,
+pip/_vendor/requests/packages/urllib3/packages/ordered_dict.pyc,,
+pip/_vendor/requests/packages/chardet/eucjpprober.pyc,,
+pip/_vendor/retrying.pyc,,
+pip/_vendor/html5lib/treewalkers/etree.pyc,,
+pip/_vendor/cachecontrol/wrapper.pyc,,
+pip/_vendor/requests/hooks.pyc,,
+pip/_vendor/packaging/_structures.pyc,,
+pip/_vendor/requests/packages/urllib3/connectionpool.pyc,,
+pip/_vendor/requests/packages/chardet/mbcharsetprober.pyc,,
+pip/locations.pyc,,
+pip/_vendor/requests/packages/chardet/jisfreq.pyc,,
+pip/vcs/subversion.pyc,,
+pip/_vendor/requests/packages/urllib3/util/connection.pyc,,
+pip/exceptions.pyc,,
+pip/basecommand.pyc,,
+pip/_vendor/distlib/locators.pyc,,
+pip/_vendor/html5lib/filters/_base.pyc,,
+pip/_vendor/re-vendor.pyc,,
+pip/_vendor/html5lib/treewalkers/dom.pyc,,
+pip/_vendor/requests/packages/urllib3/contrib/__init__.pyc,,
+pip/_vendor/requests/packages/chardet/euctwfreq.pyc,,
+pip/_vendor/requests/packages/chardet/chardistribution.pyc,,
+pip/_vendor/cachecontrol/caches/__init__.pyc,,
+pip/commands/wheel.pyc,,
+pip/_vendor/requests/packages/chardet/sbcharsetprober.pyc,,
+pip/_vendor/colorama/ansitowin32.pyc,,
+pip/commands/freeze.pyc,,
+pip/_vendor/cachecontrol/filewrapper.pyc,,
+pip/_vendor/requests/packages/chardet/sjisprober.pyc,,
+pip/_vendor/requests/packages/urllib3/util/timeout.pyc,,
+pip/_vendor/_markerlib/markers.pyc,,
+pip/_vendor/requests/packages/urllib3/_collections.pyc,,
+pip/_vendor/requests/packages/urllib3/util/request.pyc,,
+pip/_vendor/ipaddress.pyc,,
+pip/_vendor/distlib/manifest.pyc,,
+pip/_vendor/html5lib/serializer/htmlserializer.pyc,,
+pip/_vendor/html5lib/serializer/__init__.pyc,,
+pip/_vendor/html5lib/trie/_base.pyc,,
+pip/_vendor/requests/packages/urllib3/response.pyc,,
+pip/req/req_install.pyc,,
+pip/_vendor/html5lib/treeadapters/__init__.pyc,,
+pip/_vendor/cachecontrol/caches/redis_cache.pyc,,
+pip/_vendor/html5lib/filters/inject_meta_charset.pyc,,
+pip/_vendor/requests/packages/chardet/charsetprober.pyc,,
+pip/_vendor/requests/packages/urllib3/util/retry.pyc,,
+pip/_vendor/cachecontrol/__init__.pyc,,
+pip/vcs/mercurial.pyc,,
+pip/_vendor/requests/packages/urllib3/exceptions.pyc,,
+pip/_vendor/__init__.pyc,,
+pip/_vendor/requests/packages/chardet/universaldetector.pyc,,
+pip/_vendor/html5lib/treewalkers/lxmletree.pyc,,
+pip/_vendor/colorama/ansi.pyc,,
+pip/_vendor/requests/packages/chardet/gb2312prober.pyc,,
+pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.pyc,,
+pip/_vendor/progress/bar.pyc,,
+pip/wheel.pyc,,
+pip/_vendor/lockfile/pidlockfile.pyc,,
+pip/_vendor/requests/__init__.pyc,,
+pip/_vendor/requests/packages/chardet/chardetect.pyc,,
+pip/_vendor/html5lib/filters/optionaltags.pyc,,
+pip/_vendor/requests/packages/urllib3/connection.pyc,,
+pip/utils/appdirs.pyc,,
+pip/_vendor/colorama/winterm.pyc,,

+ 6 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/WHEEL

@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.24.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+

+ 5 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/entry_points.txt

@@ -0,0 +1,5 @@
+[console_scripts]
+pip = pip:main
+pip3 = pip:main
+pip3.4 = pip:main
+

Failā izmaiņas netiks attēlotas, jo tās ir par lielu
+ 0 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/metadata.json


+ 1 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/pbr.json

@@ -0,0 +1 @@
+{"is_release": true, "git_version": "78daccc"}

+ 1 - 0
venv/lib/python2.7/site-packages/pip-7.1.2.dist-info/top_level.txt

@@ -0,0 +1 @@
+pip

+ 11 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.py

@@ -0,0 +1,11 @@
+"""CacheControl import Interface.
+
+Make it easy to import from cachecontrol without long namespaces.
+"""
+__author__ = 'Eric Larson'
+__email__ = 'eric@ionrock.org'
+__version__ = '0.11.5'
+
+from .wrapper import CacheControl
+from .adapter import CacheControlAdapter
+from .controller import CacheController

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/__init__.pyc


+ 117 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.py

@@ -0,0 +1,117 @@
+import functools
+
+from pip._vendor.requests.adapters import HTTPAdapter
+
+from .controller import CacheController
+from .cache import DictCache
+from .filewrapper import CallbackFileWrapper
+
+
+class CacheControlAdapter(HTTPAdapter):
+    invalidating_methods = set(['PUT', 'DELETE'])
+
+    def __init__(self, cache=None,
+                 cache_etags=True,
+                 controller_class=None,
+                 serializer=None,
+                 heuristic=None,
+                 *args, **kw):
+        super(CacheControlAdapter, self).__init__(*args, **kw)
+        self.cache = cache or DictCache()
+        self.heuristic = heuristic
+
+        controller_factory = controller_class or CacheController
+        self.controller = controller_factory(
+            self.cache,
+            cache_etags=cache_etags,
+            serializer=serializer,
+        )
+
+    def send(self, request, **kw):
+        """
+        Send a request. Use the request information to see if it
+        exists in the cache and cache the response if we need to and can.
+        """
+        if request.method == 'GET':
+            cached_response = self.controller.cached_request(request)
+            if cached_response:
+                return self.build_response(request, cached_response,
+                                           from_cache=True)
+
+            # check for etags and add headers if appropriate
+            request.headers.update(
+                self.controller.conditional_headers(request)
+            )
+
+        resp = super(CacheControlAdapter, self).send(request, **kw)
+
+        return resp
+
+    def build_response(self, request, response, from_cache=False):
+        """
+        Build a response by making a request or using the cache.
+
+        This will end up calling send and returning a potentially
+        cached response
+        """
+        if not from_cache and request.method == 'GET':
+
+            # apply any expiration heuristics
+            if response.status == 304:
+                # We must have sent an ETag request. This could mean
+                # that we've been expired already or that we simply
+                # have an etag. In either case, we want to try and
+                # update the cache if that is the case.
+                cached_response = self.controller.update_cached_response(
+                    request, response
+                )
+
+                if cached_response is not response:
+                    from_cache = True
+
+                # We are done with the server response, read a
+                # possible response body (compliant servers will
+                # not return one, but we cannot be 100% sure) and
+                # release the connection back to the pool.
+                response.read(decode_content=False)
+                response.release_conn()
+
+                response = cached_response
+
+            # We always cache the 301 responses
+            elif response.status == 301:
+                self.controller.cache_response(request, response)
+            else:
+                # Check for any heuristics that might update headers
+                # before trying to cache.
+                if self.heuristic:
+                    response = self.heuristic.apply(response)
+
+                # Wrap the response file with a wrapper that will cache the
+                #   response when the stream has been consumed.
+                response._fp = CallbackFileWrapper(
+                    response._fp,
+                    functools.partial(
+                        self.controller.cache_response,
+                        request,
+                        response,
+                    )
+                )
+
+        resp = super(CacheControlAdapter, self).build_response(
+            request, response
+        )
+
+        # See if we should invalidate the cache.
+        if request.method in self.invalidating_methods and resp.ok:
+            cache_url = self.controller.cache_url(request.url)
+            self.cache.delete(cache_url)
+
+        # Give the request a from_cache attr to let people use it
+        resp.from_cache = from_cache
+
+        return resp
+
+    def close(self):
+        self.cache.close()
+        super(CacheControlAdapter, self).close()

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/adapter.pyc


+ 39 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.py

@@ -0,0 +1,39 @@
+"""
+The cache object API for implementing caches. The default is a thread
+safe in-memory dictionary.
+"""
+from threading import Lock
+
+
+class BaseCache(object):
+
+    def get(self, key):
+        raise NotImplemented()
+
+    def set(self, key, value):
+        raise NotImplemented()
+
+    def delete(self, key):
+        raise NotImplemented()
+
+    def close(self):
+        pass
+
+
+class DictCache(BaseCache):
+
+    def __init__(self, init_dict=None):
+        self.lock = Lock()
+        self.data = init_dict or {}
+
+    def get(self, key):
+        return self.data.get(key, None)
+
+    def set(self, key, value):
+        with self.lock:
+            self.data.update({key: value})
+
+    def delete(self, key):
+        with self.lock:
+            if key in self.data:
+                self.data.pop(key)

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/cache.pyc


+ 18 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.py

@@ -0,0 +1,18 @@
+from textwrap import dedent
+
+try:
+    from .file_cache import FileCache
+except ImportError:
+    notice = dedent('''
+    NOTE: In order to use the FileCache you must have
+    lockfile installed. You can install it via pip:
+      pip install lockfile
+    ''')
+    print(notice)
+
+
+try:
+    import redis
+    from .redis_cache import RedisCache
+except ImportError:
+    pass

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/__init__.pyc


+ 116 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py

@@ -0,0 +1,116 @@
+import hashlib
+import os
+
+from pip._vendor.lockfile import LockFile
+from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile
+
+from ..cache import BaseCache
+from ..controller import CacheController
+
+
+def _secure_open_write(filename, fmode):
+    # We only want to write to this file, so open it in write only mode
+    flags = os.O_WRONLY
+
+    # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
+    #  will open *new* files.
+    # We specify this because we want to ensure that the mode we pass is the
+    # mode of the file.
+    flags |= os.O_CREAT | os.O_EXCL
+
+    # Do not follow symlinks to prevent someone from making a symlink that
+    # we follow and insecurely open a cache file.
+    if hasattr(os, "O_NOFOLLOW"):
+        flags |= os.O_NOFOLLOW
+
+    # On Windows we'll mark this file as binary
+    if hasattr(os, "O_BINARY"):
+        flags |= os.O_BINARY
+
+    # Before we open our file, we want to delete any existing file that is
+    # there
+    try:
+        os.remove(filename)
+    except (IOError, OSError):
+        # The file must not exist already, so we can just skip ahead to opening
+        pass
+
+    # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
+    # race condition happens between the os.remove and this line, that an
+    # error will be raised. Because we utilize a lockfile this should only
+    # happen if someone is attempting to attack us.
+    fd = os.open(filename, flags, fmode)
+    try:
+        return os.fdopen(fd, "wb")
+    except:
+        # An error occurred wrapping our FD in a file object
+        os.close(fd)
+        raise
+
+
+class FileCache(BaseCache):
+    def __init__(self, directory, forever=False, filemode=0o0600,
+                 dirmode=0o0700, use_dir_lock=None, lock_class=None):
+
+        if use_dir_lock is not None and lock_class is not None:
+            raise ValueError("Cannot use use_dir_lock and lock_class together")
+
+        if use_dir_lock:
+            lock_class = MkdirLockFile
+
+        if lock_class is None:
+            lock_class = LockFile
+
+        self.directory = directory
+        self.forever = forever
+        self.filemode = filemode
+        self.dirmode = dirmode
+        self.lock_class = lock_class
+
+
+    @staticmethod
+    def encode(x):
+        return hashlib.sha224(x.encode()).hexdigest()
+
+    def _fn(self, name):
+        # NOTE: This method should not change as some may depend on it.
+        #       See: https://github.com/ionrock/cachecontrol/issues/63
+        hashed = self.encode(name)
+        parts = list(hashed[:5]) + [hashed]
+        return os.path.join(self.directory, *parts)
+
+    def get(self, key):
+        name = self._fn(key)
+        if not os.path.exists(name):
+            return None
+
+        with open(name, 'rb') as fh:
+            return fh.read()
+
+    def set(self, key, value):
+        name = self._fn(key)
+
+        # Make sure the directory exists
+        try:
+            os.makedirs(os.path.dirname(name), self.dirmode)
+        except (IOError, OSError):
+            pass
+
+        with self.lock_class(name) as lock:
+            # Write our actual file
+            with _secure_open_write(lock.path, self.filemode) as fh:
+                fh.write(value)
+
+    def delete(self, key):
+        name = self._fn(key)
+        if not self.forever:
+            os.remove(name)
+
+
+def url_to_file_path(url, filecache):
+    """Return the file cache path based on the URL.
+
+    This does not ensure the file exists!
+    """
+    key = CacheController.cache_url(url)
+    return filecache._fn(key)

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/file_cache.pyc


+ 41 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py

@@ -0,0 +1,41 @@
+from __future__ import division
+
+from datetime import datetime
+
+
+def total_seconds(td):
+    """Python 2.6 compatability"""
+    if hasattr(td, 'total_seconds'):
+        return td.total_seconds()
+
+    ms = td.microseconds
+    secs = (td.seconds + td.days * 24 * 3600)
+    return (ms + secs * 10**6) / 10**6
+
+
+class RedisCache(object):
+
+    def __init__(self, conn):
+        self.conn = conn
+
+    def get(self, key):
+        return self.conn.get(key)
+
+    def set(self, key, value, expires=None):
+        if not expires:
+            self.conn.set(key, value)
+        else:
+            expires = expires - datetime.now()
+            self.conn.setex(key, total_seconds(expires), value)
+
+    def delete(self, key):
+        self.conn.delete(key)
+
+    def clear(self):
+        """Helper for clearing all the keys in a database. Use with
+        caution!"""
+        for key in self.conn.keys():
+            self.conn.delete(key)
+
+    def close(self):
+        self.conn.disconnect()

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.pyc


+ 14 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.py

@@ -0,0 +1,14 @@
+try:
+    from urllib.parse import urljoin
+except ImportError:
+    from urlparse import urljoin
+
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+
+from pip._vendor.requests.packages.urllib3.response import HTTPResponse
+from pip._vendor.requests.packages.urllib3.util import is_fp_closed

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/compat.pyc


+ 299 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.py

@@ -0,0 +1,299 @@
+"""
+The httplib2 algorithms ported for use with requests.
+"""
+import re
+import calendar
+import time
+from email.utils import parsedate_tz
+
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from .cache import DictCache
+from .serialize import Serializer
+
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+
+def parse_uri(uri):
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    groups = URI.match(uri).groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+
+class CacheController(object):
+    """An interface to see if request should cached or not.
+    """
+    def __init__(self, cache=None, cache_etags=True, serializer=None):
+        self.cache = cache or DictCache()
+        self.cache_etags = cache_etags
+        self.serializer = serializer or Serializer()
+
+    @classmethod
+    def _urlnorm(cls, uri):
+        """Normalize the URL to create a safe key for the cache"""
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+        if not scheme or not authority:
+            raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
+
+        scheme = scheme.lower()
+        authority = authority.lower()
+
+        if not path:
+            path = "/"
+
+        # Could do syntax based normalization of the URI before
+        # computing the digest. See Section 6.2.2 of Std 66.
+        request_uri = query and "?".join([path, query]) or path
+        defrag_uri = scheme + "://" + authority + request_uri
+
+        return defrag_uri
+
+    @classmethod
+    def cache_url(cls, uri):
+        return cls._urlnorm(uri)
+
+    def parse_cache_control(self, headers):
+        """
+        Parse the cache control headers returning a dictionary with values
+        for the different directives.
+        """
+        retval = {}
+
+        cc_header = 'cache-control'
+        if 'Cache-Control' in headers:
+            cc_header = 'Cache-Control'
+
+        if cc_header in headers:
+            parts = headers[cc_header].split(',')
+            parts_with_args = [
+                tuple([x.strip().lower() for x in part.split("=", 1)])
+                for part in parts if -1 != part.find("=")
+            ]
+            parts_wo_args = [
+                (name.strip().lower(), 1)
+                for name in parts if -1 == name.find("=")
+            ]
+            retval = dict(parts_with_args + parts_wo_args)
+        return retval
+
+    def cached_request(self, request):
+        """
+        Return a cached response if it exists in the cache, otherwise
+        return False.
+        """
+        cache_url = self.cache_url(request.url)
+        cc = self.parse_cache_control(request.headers)
+
+        # non-caching states
+        no_cache = True if 'no-cache' in cc else False
+        if 'max-age' in cc and cc['max-age'] == 0:
+            no_cache = True
+
+        # Bail out if no-cache was set
+        if no_cache:
+            return False
+
+        # It is in the cache, so lets see if it is going to be
+        # fresh enough
+        resp = self.serializer.loads(request, self.cache.get(cache_url))
+
+        # Check to see if we have a cached object
+        if not resp:
+            return False
+
+        # If we have a cached 301, return it immediately. We don't
+        # need to test our response for other headers b/c it is
+        # intrinsically "cacheable" as it is Permanent.
+        # See:
+        #   https://tools.ietf.org/html/rfc7231#section-6.4.2
+        #
+        # Client can try to refresh the value by repeating the request
+        # with cache busting headers as usual (ie no-cache).
+        if resp.status == 301:
+            return resp
+
+        headers = CaseInsensitiveDict(resp.headers)
+        if not headers or 'date' not in headers:
+            # With date or etag, the cached response can never be used
+            # and should be deleted.
+            if 'etag' not in headers:
+                self.cache.delete(cache_url)
+            return False
+
+        now = time.time()
+        date = calendar.timegm(
+            parsedate_tz(headers['date'])
+        )
+        current_age = max(0, now - date)
+
+        # TODO: There is an assumption that the result will be a
+        #       urllib3 response object. This may not be best since we
+        #       could probably avoid instantiating or constructing the
+        #       response until we know we need it.
+        resp_cc = self.parse_cache_control(headers)
+
+        # determine freshness
+        freshness_lifetime = 0
+
+        # Check the max-age pragma in the cache control header
+        if 'max-age' in resp_cc and resp_cc['max-age'].isdigit():
+            freshness_lifetime = int(resp_cc['max-age'])
+
+        # If there isn't a max-age, check for an expires header
+        elif 'expires' in headers:
+            expires = parsedate_tz(headers['expires'])
+            if expires is not None:
+                expire_time = calendar.timegm(expires) - date
+                freshness_lifetime = max(0, expire_time)
+
+        # determine if we are setting freshness limit in the req
+        if 'max-age' in cc:
+            try:
+                freshness_lifetime = int(cc['max-age'])
+            except ValueError:
+                freshness_lifetime = 0
+
+        if 'min-fresh' in cc:
+            try:
+                min_fresh = int(cc['min-fresh'])
+            except ValueError:
+                min_fresh = 0
+            # adjust our current age by our min fresh
+            current_age += min_fresh
+
+        # see how fresh we actually are
+        fresh = (freshness_lifetime > current_age)
+
+        if fresh:
+            return resp
+
+        # we're not fresh. If we don't have an Etag, clear it out
+        if 'etag' not in headers:
+            self.cache.delete(cache_url)
+
+        # return the original handler
+        return False
+
+    def conditional_headers(self, request):
+        cache_url = self.cache_url(request.url)
+        resp = self.serializer.loads(request, self.cache.get(cache_url))
+        new_headers = {}
+
+        if resp:
+            headers = CaseInsensitiveDict(resp.headers)
+
+            if 'etag' in headers:
+                new_headers['If-None-Match'] = headers['ETag']
+
+            if 'last-modified' in headers:
+                new_headers['If-Modified-Since'] = headers['Last-Modified']
+
+        return new_headers
+
+    def cache_response(self, request, response, body=None):
+        """
+        Algorithm for caching requests.
+
+        This assumes a requests Response object.
+        """
+        # From httplib2: Don't cache 206's since we aren't going to
+        #                handle byte range requests
+        if response.status not in [200, 203, 300, 301]:
+            return
+
+        response_headers = CaseInsensitiveDict(response.headers)
+
+        cc_req = self.parse_cache_control(request.headers)
+        cc = self.parse_cache_control(response_headers)
+
+        cache_url = self.cache_url(request.url)
+
+        # Delete it from the cache if we happen to have it stored there
+        no_store = cc.get('no-store') or cc_req.get('no-store')
+        if no_store and self.cache.get(cache_url):
+            self.cache.delete(cache_url)
+
+        # If we've been given an etag, then keep the response
+        if self.cache_etags and 'etag' in response_headers:
+            self.cache.set(
+                cache_url,
+                self.serializer.dumps(request, response, body=body),
+            )
+
+        # Add to the cache any 301s. We do this before looking that
+        # the Date headers.
+        elif response.status == 301:
+            self.cache.set(
+                cache_url,
+                self.serializer.dumps(request, response)
+            )
+
+        # Add to the cache if the response headers demand it. If there
+        # is no date header then we can't do anything about expiring
+        # the cache.
+        elif 'date' in response_headers:
+            # cache when there is a max-age > 0
+            if cc and cc.get('max-age'):
+                if int(cc['max-age']) > 0:
+                    self.cache.set(
+                        cache_url,
+                        self.serializer.dumps(request, response, body=body),
+                    )
+
+            # If the request can expire, it means we should cache it
+            # in the meantime.
+            elif 'expires' in response_headers:
+                if response_headers['expires']:
+                    self.cache.set(
+                        cache_url,
+                        self.serializer.dumps(request, response, body=body),
+                    )
+
+    def update_cached_response(self, request, response):
+        """On a 304 we will get a new set of headers that we want to
+        update our cached value with, assuming we have one.
+
+        This should only ever be called when we've sent an ETag and
+        gotten a 304 as the response.
+        """
+        cache_url = self.cache_url(request.url)
+
+        cached_response = self.serializer.loads(
+            request,
+            self.cache.get(cache_url)
+        )
+
+        if not cached_response:
+            # we didn't have a cached response
+            return response
+
+        # Lets update our headers with the headers from the new request:
+        # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
+        #
+        # The server isn't supposed to send headers that would make
+        # the cached body invalid. But... just in case, we'll be sure
+        # to strip out ones we know that might be problmatic due to
+        # typical assumptions.
+        excluded_headers = [
+            "content-length",
+        ]
+
+        cached_response.headers.update(
+            dict((k, v) for k, v in response.headers.items()
+                 if k.lower() not in excluded_headers)
+        )
+
+        # we want a 200 b/c we have content via the cache
+        cached_response.status = 200
+
+        # update our cache
+        self.cache.set(
+            cache_url,
+            self.serializer.dumps(request, cached_response),
+        )
+
+        return cached_response

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/controller.pyc


+ 63 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.py

@@ -0,0 +1,63 @@
+from io import BytesIO
+
+
+class CallbackFileWrapper(object):
+    """
+    Small wrapper around a fp object which will tee everything read into a
+    buffer, and when that file is closed it will execute a callback with the
+    contents of that buffer.
+
+    All attributes are proxied to the underlying file object.
+
+    This class uses members with a double underscore (__) leading prefix so as
+    not to accidentally shadow an attribute.
+    """
+
+    def __init__(self, fp, callback):
+        self.__buf = BytesIO()
+        self.__fp = fp
+        self.__callback = callback
+
+    def __getattr__(self, name):
+        # The vaguaries of garbage collection means that self.__fp is
+        # not always set.  By using __getattribute__ and the private
+        # name[0] allows looking up the attribute value and raising an
+        # AttributeError when it doesn't exist. This stop thigns from
+        # infinitely recursing calls to getattr in the case where
+        # self.__fp hasn't been set.
+        #
+        # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
+        fp = self.__getattribute__('_CallbackFileWrapper__fp')
+        return getattr(fp, name)
+
+    def __is_fp_closed(self):
+        try:
+            return self.__fp.fp is None
+        except AttributeError:
+            pass
+
+        try:
+            return self.__fp.closed
+        except AttributeError:
+            pass
+
+        # We just don't cache it then.
+        # TODO: Add some logging here...
+        return False
+
+    def read(self, amt=None):
+        data = self.__fp.read(amt)
+        self.__buf.write(data)
+
+        if self.__is_fp_closed():
+            if self.__callback:
+                self.__callback(self.__buf.getvalue())
+
+            # We assign this to None here, because otherwise we can get into
+            # really tricky problems where the CPython interpreter dead locks
+            # because the callback is holding a reference to something which
+            # has a __del__ method. Setting this to None breaks the cycle
+            # and allows the garbage collector to do it's thing normally.
+            self.__callback = None
+
+        return data

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/filewrapper.pyc


+ 134 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.py

@@ -0,0 +1,134 @@
+import calendar
+import time
+
+from email.utils import formatdate, parsedate, parsedate_tz
+
+from datetime import datetime, timedelta
+
+TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
+
+
+def expire_after(delta, date=None):
+    date = date or datetime.now()
+    return date + delta
+
+
+def datetime_to_header(dt):
+    return formatdate(calendar.timegm(dt.timetuple()))
+
+
+class BaseHeuristic(object):
+
+    def warning(self, response):
+        """
+        Return a valid 1xx warning header value describing the cache
+        adjustments.
+
+        The response is provided too allow warnings like 113
+        http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
+        to explicitly say response is over 24 hours old.
+        """
+        return '110 - "Response is Stale"'
+
+    def update_headers(self, response):
+        """Update the response headers with any new headers.
+
+        NOTE: This SHOULD always include some Warning header to
+              signify that the response was cached by the client, not
+              by way of the provided headers.
+        """
+        return {}
+
+    def apply(self, response):
+        warning_header_value = self.warning(response)
+        response.headers.update(self.update_headers(response))
+        if warning_header_value is not None:
+            response.headers.update({'Warning': warning_header_value})
+        return response
+
+
+class OneDayCache(BaseHeuristic):
+    """
+    Cache the response by providing an expires 1 day in the
+    future.
+    """
+    def update_headers(self, response):
+        headers = {}
+
+        if 'expires' not in response.headers:
+            date = parsedate(response.headers['date'])
+            expires = expire_after(timedelta(days=1),
+                                   date=datetime(*date[:6]))
+            headers['expires'] = datetime_to_header(expires)
+            headers['cache-control'] = 'public'
+        return headers
+
+
+class ExpiresAfter(BaseHeuristic):
+    """
+    Cache **all** requests for a defined time period.
+    """
+
+    def __init__(self, **kw):
+        self.delta = timedelta(**kw)
+
+    def update_headers(self, response):
+        expires = expire_after(self.delta)
+        return {
+            'expires': datetime_to_header(expires),
+            'cache-control': 'public',
+        }
+
+    def warning(self, response):
+        tmpl = '110 - Automatically cached for %s. Response might be stale'
+        return tmpl % self.delta
+
+
+class LastModified(BaseHeuristic):
+    """
+    If there is no Expires header already, fall back on Last-Modified
+    using the heuristic from
+    http://tools.ietf.org/html/rfc7234#section-4.2.2
+    to calculate a reasonable value.
+
+    Firefox also does something like this per
+    https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
+    http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
+    Unlike mozilla we limit this to 24-hr.
+    """
+    cacheable_by_default_statuses = set([
+        200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
+    ])
+
+    def update_headers(self, resp):
+        headers = resp.headers
+
+        if 'expires' in headers:
+            return {}
+
+        if 'cache-control' in headers and headers['cache-control'] != 'public':
+            return {}
+
+        if resp.status not in self.cacheable_by_default_statuses:
+            return {}
+
+        if 'date' not in headers or 'last-modified' not in headers:
+            return {}
+
+        date = calendar.timegm(parsedate_tz(headers['date']))
+        last_modified = parsedate(headers['last-modified'])
+        if date is None or last_modified is None:
+            return {}
+
+        now = time.time()
+        current_age = max(0, now - date)
+        delta = date - calendar.timegm(last_modified)
+        freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
+        if freshness_lifetime <= current_age:
+            return {}
+
+        expires = date + freshness_lifetime
+        return {'expires': time.strftime(TIME_FMT, time.gmtime(expires))}
+
+    def warning(self, resp):
+        return None

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/heuristics.pyc


+ 184 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.py

@@ -0,0 +1,184 @@
+import base64
+import io
+import json
+import zlib
+
+from pip._vendor.requests.structures import CaseInsensitiveDict
+
+from .compat import HTTPResponse, pickle
+
+
+def _b64_encode_bytes(b):
+    return base64.b64encode(b).decode("ascii")
+
+
+def _b64_encode_str(s):
+    return _b64_encode_bytes(s.encode("utf8"))
+
+
+def _b64_decode_bytes(b):
+    return base64.b64decode(b.encode("ascii"))
+
+
+def _b64_decode_str(s):
+    return _b64_decode_bytes(s).decode("utf8")
+
+
+class Serializer(object):
+
+    def dumps(self, request, response, body=None):
+        response_headers = CaseInsensitiveDict(response.headers)
+
+        if body is None:
+            body = response.read(decode_content=False)
+
+            # NOTE: 99% sure this is dead code. I'm only leaving it
+            #       here b/c I don't have a test yet to prove
+            #       it. Basically, before using
+            #       `cachecontrol.filewrapper.CallbackFileWrapper`,
+            #       this made an effort to reset the file handle. The
+            #       `CallbackFileWrapper` short circuits this code by
+            #       setting the body as the content is consumed, the
+            #       result being a `body` argument is *always* passed
+            #       into cache_response, and in turn,
+            #       `Serializer.dump`.
+            response._fp = io.BytesIO(body)
+
+        data = {
+            "response": {
+                "body": _b64_encode_bytes(body),
+                "headers": dict(
+                    (_b64_encode_str(k), _b64_encode_str(v))
+                    for k, v in response.headers.items()
+                ),
+                "status": response.status,
+                "version": response.version,
+                "reason": _b64_encode_str(response.reason),
+                "strict": response.strict,
+                "decode_content": response.decode_content,
+            },
+        }
+
+        # Construct our vary headers
+        data["vary"] = {}
+        if "vary" in response_headers:
+            varied_headers = response_headers['vary'].split(',')
+            for header in varied_headers:
+                header = header.strip()
+                data["vary"][header] = request.headers.get(header, None)
+
+        # Encode our Vary headers to ensure they can be serialized as JSON
+        data["vary"] = dict(
+            (_b64_encode_str(k), _b64_encode_str(v) if v is not None else v)
+            for k, v in data["vary"].items()
+        )
+
+        return b",".join([
+            b"cc=2",
+            zlib.compress(
+                json.dumps(
+                    data, separators=(",", ":"), sort_keys=True,
+                ).encode("utf8"),
+            ),
+        ])
+
+    def loads(self, request, data):
+        # Short circuit if we've been given an empty set of data
+        if not data:
+            return
+
+        # Determine what version of the serializer the data was serialized
+        # with
+        try:
+            ver, data = data.split(b",", 1)
+        except ValueError:
+            ver = b"cc=0"
+
+        # Make sure that our "ver" is actually a version and isn't a false
+        # positive from a , being in the data stream.
+        if ver[:3] != b"cc=":
+            data = ver + data
+            ver = b"cc=0"
+
+        # Get the version number out of the cc=N
+        ver = ver.split(b"=", 1)[-1].decode("ascii")
+
+        # Dispatch to the actual load method for the given version
+        try:
+            return getattr(self, "_loads_v{0}".format(ver))(request, data)
+        except AttributeError:
+            # This is a version we don't have a loads function for, so we'll
+            # just treat it as a miss and return None
+            return
+
+    def prepare_response(self, request, cached):
+        """Verify our vary headers match and construct a real urllib3
+        HTTPResponse object.
+        """
+        # Special case the '*' Vary value as it means we cannot actually
+        # determine if the cached response is suitable for this request.
+        if "*" in cached.get("vary", {}):
+            return
+
+        # Ensure that the Vary headers for the cached response match our
+        # request
+        for header, value in cached.get("vary", {}).items():
+            if request.headers.get(header, None) != value:
+                return
+
+        body_raw = cached["response"].pop("body")
+
+        try:
+            body = io.BytesIO(body_raw)
+        except TypeError:
+            # This can happen if cachecontrol serialized to v1 format (pickle)
+            # using Python 2. A Python 2 str(byte string) will be unpickled as
+            # a Python 3 str (unicode string), which will cause the above to
+            # fail with:
+            #
+            #     TypeError: 'str' does not support the buffer interface
+            body = io.BytesIO(body_raw.encode('utf8'))
+
+        return HTTPResponse(
+            body=body,
+            preload_content=False,
+            **cached["response"]
+        )
+
+    def _loads_v0(self, request, data):
+        # The original legacy cache data. This doesn't contain enough
+        # information to construct everything we need, so we'll treat this as
+        # a miss.
+        return
+
+    def _loads_v1(self, request, data):
+        try:
+            cached = pickle.loads(data)
+        except ValueError:
+            return
+
+        return self.prepare_response(request, cached)
+
+    def _loads_v2(self, request, data):
+        try:
+            cached = json.loads(zlib.decompress(data).decode("utf8"))
+        except ValueError:
+            return
+
+        # We need to decode the items that we've base64 encoded
+        cached["response"]["body"] = _b64_decode_bytes(
+            cached["response"]["body"]
+        )
+        cached["response"]["headers"] = dict(
+            (_b64_decode_str(k), _b64_decode_str(v))
+            for k, v in cached["response"]["headers"].items()
+        )
+        cached["response"]["reason"] = _b64_decode_str(
+            cached["response"]["reason"],
+        )
+        cached["vary"] = dict(
+            (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
+            for k, v in cached["vary"].items()
+        )
+
+        return self.prepare_response(request, cached)

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/serialize.pyc


+ 21 - 0
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.py

@@ -0,0 +1,21 @@
+from .adapter import CacheControlAdapter
+from .cache import DictCache
+
+
+def CacheControl(sess,
+                 cache=None,
+                 cache_etags=True,
+                 serializer=None,
+                 heuristic=None):
+
+    cache = cache or DictCache()
+    adapter = CacheControlAdapter(
+        cache,
+        cache_etags=cache_etags,
+        serializer=serializer,
+        heuristic=heuristic,
+    )
+    sess.mount('http://', adapter)
+    sess.mount('https://', adapter)
+
+    return sess

BIN
venv/lib/python2.7/site-packages/pip/_vendor/cachecontrol/wrapper.pyc


+ 2411 - 0
venv/lib/python2.7/site-packages/pip/_vendor/ipaddress.py

@@ -0,0 +1,2411 @@
+# Copyright 2007 Google Inc.
+#  Licensed to PSF under a Contributor Agreement.
+
+"""A fast, lightweight IPv4/IPv6 manipulation library in Python.
+
+This library is used to create/poke/manipulate IPv4 and IPv6 addresses
+and networks.
+
+"""
+
+from __future__ import unicode_literals
+
+
+import itertools
+import struct
+
+__version__ = '1.0.14'
+
+# Compatibility functions
+_compat_int_types = (int,)
+try:
+    _compat_int_types = (int, long)
+except NameError:
+    pass
+try:
+    _compat_str = unicode
+except NameError:
+    _compat_str = str
+    assert bytes != str
+if b'\0'[0] == 0:  # Python 3 semantics
+    def _compat_bytes_to_byte_vals(byt):
+        return byt
+else:
+    def _compat_bytes_to_byte_vals(byt):
+        return [struct.unpack(b'!B', b)[0] for b in byt]
+try:
+    _compat_int_from_byte_vals = int.from_bytes
+except AttributeError:
+    def _compat_int_from_byte_vals(bytvals, endianess):
+        assert endianess == 'big'
+        res = 0
+        for bv in bytvals:
+            assert isinstance(bv, _compat_int_types)
+            res = (res << 8) + bv
+        return res
+
+
+def _compat_to_bytes(intval, length, endianess):
+    assert isinstance(intval, _compat_int_types)
+    assert endianess == 'big'
+    if length == 4:
+        if intval < 0 or intval >= 2 ** 32:
+            raise struct.error("integer out of range for 'I' format code")
+        return struct.pack(b'!I', intval)
+    elif length == 16:
+        if intval < 0 or intval >= 2 ** 128:
+            raise struct.error("integer out of range for 'QQ' format code")
+        return struct.pack(b'!QQ', intval >> 64, intval & 0xffffffffffffffff)
+    else:
+        raise NotImplementedError()
+if hasattr(int, 'bit_length'):
+    # Not int.bit_length , since that won't work in 2.7 where long exists
+    def _compat_bit_length(i):
+        return i.bit_length()
+else:
+    def _compat_bit_length(i):
+        for res in itertools.count():
+            if i >> res == 0:
+                return res
+
+
+def _compat_range(start, end, step=1):
+    assert step > 0
+    i = start
+    while i < end:
+        yield i
+        i += step
+
+
+class _TotalOrderingMixin(object):
+    __slots__ = ()
+
+    # Helper that derives the other comparison operations from
+    # __lt__ and __eq__
+    # We avoid functools.total_ordering because it doesn't handle
+    # NotImplemented correctly yet (http://bugs.python.org/issue10042)
+    def __eq__(self, other):
+        raise NotImplementedError
+
+    def __ne__(self, other):
+        equal = self.__eq__(other)
+        if equal is NotImplemented:
+            return NotImplemented
+        return not equal
+
+    def __lt__(self, other):
+        raise NotImplementedError
+
+    def __le__(self, other):
+        less = self.__lt__(other)
+        if less is NotImplemented or not less:
+            return self.__eq__(other)
+        return less
+
+    def __gt__(self, other):
+        less = self.__lt__(other)
+        if less is NotImplemented:
+            return NotImplemented
+        equal = self.__eq__(other)
+        if equal is NotImplemented:
+            return NotImplemented
+        return not (less or equal)
+
+    def __ge__(self, other):
+        less = self.__lt__(other)
+        if less is NotImplemented:
+            return NotImplemented
+        return not less
+
+
+IPV4LENGTH = 32
+IPV6LENGTH = 128
+
+
+class AddressValueError(ValueError):
+    """A Value Error related to the address."""
+
+
+class NetmaskValueError(ValueError):
+    """A Value Error related to the netmask."""
+
+
+def ip_address(address):
+    """Take an IP string/int and return an object of the correct type.
+
+    Args:
+        address: A string or integer, the IP address.  Either IPv4 or
+          IPv6 addresses may be supplied; integers less than 2**32 will
+          be considered to be IPv4 by default.
+
+    Returns:
+        An IPv4Address or IPv6Address object.
+
+    Raises:
+        ValueError: if the *address* passed isn't either a v4 or a v6
+          address
+
+    """
+    try:
+        return IPv4Address(address)
+    except (AddressValueError, NetmaskValueError):
+        pass
+
+    try:
+        return IPv6Address(address)
+    except (AddressValueError, NetmaskValueError):
+        pass
+
+    if isinstance(address, bytes):
+        raise AddressValueError(
+            '%r does not appear to be an IPv4 or IPv6 address. '
+            'Did you pass in a bytes (str in Python 2) instead of'
+            ' a unicode object?' % address)
+
+    raise ValueError('%r does not appear to be an IPv4 or IPv6 address' %
+                     address)
+
+
+def ip_network(address, strict=True):
+    """Take an IP string/int and return an object of the correct type.
+
+    Args:
+        address: A string or integer, the IP network.  Either IPv4 or
+          IPv6 networks may be supplied; integers less than 2**32 will
+          be considered to be IPv4 by default.
+
+    Returns:
+        An IPv4Network or IPv6Network object.
+
+    Raises:
+        ValueError: if the string passed isn't either a v4 or a v6
+          address. Or if the network has host bits set.
+
+    """
+    try:
+        return IPv4Network(address, strict)
+    except (AddressValueError, NetmaskValueError):
+        pass
+
+    try:
+        return IPv6Network(address, strict)
+    except (AddressValueError, NetmaskValueError):
+        pass
+
+    raise ValueError('%r does not appear to be an IPv4 or IPv6 network' %
+                     address)
+
+
+def ip_interface(address):
+    """Take an IP string/int and return an object of the correct type.
+
+    Args:
+        address: A string or integer, the IP address.  Either IPv4 or
+          IPv6 addresses may be supplied; integers less than 2**32 will
+          be considered to be IPv4 by default.
+
+    Returns:
+        An IPv4Interface or IPv6Interface object.
+
+    Raises:
+        ValueError: if the string passed isn't either a v4 or a v6
+          address.
+
+    Notes:
+        The IPv?Interface classes describe an Address on a particular
+        Network, so they're basically a combination of both the Address
+        and Network classes.
+
+    """
+    try:
+        return IPv4Interface(address)
+    except (AddressValueError, NetmaskValueError):
+        pass
+
+    try:
+        return IPv6Interface(address)
+    except (AddressValueError, NetmaskValueError):
+        pass
+
+    raise ValueError('%r does not appear to be an IPv4 or IPv6 interface' %
+                     address)
+
+
+def v4_int_to_packed(address):
+    """Represent an address as 4 packed bytes in network (big-endian) order.
+
+    Args:
+        address: An integer representation of an IPv4 IP address.
+
+    Returns:
+        The integer address packed as 4 bytes in network (big-endian) order.
+
+    Raises:
+        ValueError: If the integer is negative or too large to be an
+          IPv4 IP address.
+
+    """
+    try:
+        return _compat_to_bytes(address, 4, 'big')
+    except (struct.error, OverflowError):
+        raise ValueError("Address negative or too large for IPv4")
+
+
+def v6_int_to_packed(address):
+    """Represent an address as 16 packed bytes in network (big-endian) order.
+
+    Args:
+        address: An integer representation of an IPv6 IP address.
+
+    Returns:
+        The integer address packed as 16 bytes in network (big-endian) order.
+
+    """
+    try:
+        return _compat_to_bytes(address, 16, 'big')
+    except (struct.error, OverflowError):
+        raise ValueError("Address negative or too large for IPv6")
+
+
+def _split_optional_netmask(address):
+    """Helper to split the netmask and raise AddressValueError if needed"""
+    addr = _compat_str(address).split('/')
+    if len(addr) > 2:
+        raise AddressValueError("Only one '/' permitted in %r" % address)
+    return addr
+
+
+def _find_address_range(addresses):
+    """Find a sequence of sorted deduplicated IPv#Address.
+
+    Args:
+        addresses: a list of IPv#Address objects.
+
+    Yields:
+        A tuple containing the first and last IP addresses in the sequence.
+
+    """
+    it = iter(addresses)
+    first = last = next(it)
+    for ip in it:
+        if ip._ip != last._ip + 1:
+            yield first, last
+            first = ip
+        last = ip
+    yield first, last
+
+
+def _count_righthand_zero_bits(number, bits):
+    """Count the number of zero bits on the right hand side.
+
+    Args:
+        number: an integer.
+        bits: maximum number of bits to count.
+
+    Returns:
+        The number of zero bits on the right hand side of the number.
+
+    """
+    if number == 0:
+        return bits
+    return min(bits, _compat_bit_length(~number & (number - 1)))
+
+
+def summarize_address_range(first, last):
+    """Summarize a network range given the first and last IP addresses.
+
+    Example:
+        >>> list(summarize_address_range(IPv4Address('192.0.2.0'),
+        ...                              IPv4Address('192.0.2.130')))
+        ...                                #doctest: +NORMALIZE_WHITESPACE
+        [IPv4Network('192.0.2.0/25'), IPv4Network('192.0.2.128/31'),
+         IPv4Network('192.0.2.130/32')]
+
+    Args:
+        first: the first IPv4Address or IPv6Address in the range.
+        last: the last IPv4Address or IPv6Address in the range.
+
+    Returns:
+        An iterator of the summarized IPv(4|6) network objects.
+
+    Raise:
+        TypeError:
+            If the first and last objects are not IP addresses.
+            If the first and last objects are not the same version.
+        ValueError:
+            If the last object is not greater than the first.
+            If the version of the first address is not 4 or 6.
+
+    """
+    if (not (isinstance(first, _BaseAddress) and
+             isinstance(last, _BaseAddress))):
+        raise TypeError('first and last must be IP addresses, not networks')
+    if first.version != last.version:
+        raise TypeError("%s and %s are not of the same version" % (
+                        first, last))
+    if first > last:
+        raise ValueError('last IP address must be greater than first')
+
+    if first.version == 4:
+        ip = IPv4Network
+    elif first.version == 6:
+        ip = IPv6Network
+    else:
+        raise ValueError('unknown IP version')
+
+    ip_bits = first._max_prefixlen
+    first_int = first._ip
+    last_int = last._ip
+    while first_int <= last_int:
+        nbits = min(_count_righthand_zero_bits(first_int, ip_bits),
+                    _compat_bit_length(last_int - first_int + 1) - 1)
+        net = ip((first_int, ip_bits - nbits))
+        yield net
+        first_int += 1 << nbits
+        if first_int - 1 == ip._ALL_ONES:
+            break
+
+
+def _collapse_addresses_internal(addresses):
+    """Loops through the addresses, collapsing concurrent netblocks.
+
+    Example:
+
+        ip1 = IPv4Network('192.0.2.0/26')
+        ip2 = IPv4Network('192.0.2.64/26')
+        ip3 = IPv4Network('192.0.2.128/26')
+        ip4 = IPv4Network('192.0.2.192/26')
+
+        _collapse_addresses_internal([ip1, ip2, ip3, ip4]) ->
+          [IPv4Network('192.0.2.0/24')]
+
+        This shouldn't be called directly; it is called via
+          collapse_addresses([]).
+
+    Args:
+        addresses: A list of IPv4Network's or IPv6Network's
+
+    Returns:
+        A list of IPv4Network's or IPv6Network's depending on what we were
+        passed.
+
+    """
+    # First merge
+    to_merge = list(addresses)
+    subnets = {}
+    while to_merge:
+        net = to_merge.pop()
+        supernet = net.supernet()
+        existing = subnets.get(supernet)
+        if existing is None:
+            subnets[supernet] = net
+        elif existing != net:
+            # Merge consecutive subnets
+            del subnets[supernet]
+            to_merge.append(supernet)
+    # Then iterate over resulting networks, skipping subsumed subnets
+    last = None
+    for net in sorted(subnets.values()):
+        if last is not None:
+            # Since they are sorted,
+            # last.network_address <= net.network_address is a given.
+            if last.broadcast_address >= net.broadcast_address:
+                continue
+        yield net
+        last = net
+
+
+def collapse_addresses(addresses):
+    """Collapse a list of IP objects.
+
+    Example:
+        collapse_addresses([IPv4Network('192.0.2.0/25'),
+                            IPv4Network('192.0.2.128/25')]) ->
+                           [IPv4Network('192.0.2.0/24')]
+
+    Args:
+        addresses: An iterator of IPv4Network or IPv6Network objects.
+
+    Returns:
+        An iterator of the collapsed IPv(4|6)Network objects.
+
+    Raises:
+        TypeError: If passed a list of mixed version objects.
+
+    """
+    addrs = []
+    ips = []
+    nets = []
+
+    # split IP addresses and networks
+    for ip in addresses:
+        if isinstance(ip, _BaseAddress):
+            if ips and ips[-1]._version != ip._version:
+                raise TypeError("%s and %s are not of the same version" % (
+                                ip, ips[-1]))
+            ips.append(ip)
+        elif ip._prefixlen == ip._max_prefixlen:
+            if ips and ips[-1]._version != ip._version:
+                raise TypeError("%s and %s are not of the same version" % (
+                                ip, ips[-1]))
+            try:
+                ips.append(ip.ip)
+            except AttributeError:
+                ips.append(ip.network_address)
+        else:
+            if nets and nets[-1]._version != ip._version:
+                raise TypeError("%s and %s are not of the same version" % (
+                                ip, nets[-1]))
+            nets.append(ip)
+
+    # sort and dedup
+    ips = sorted(set(ips))
+
+    # find consecutive address ranges in the sorted sequence and summarize them
+    if ips:
+        for first, last in _find_address_range(ips):
+            addrs.extend(summarize_address_range(first, last))
+
+    return _collapse_addresses_internal(addrs + nets)
+
+
+def get_mixed_type_key(obj):
+    """Return a key suitable for sorting between networks and addresses.
+
+    Address and Network objects are not sortable by default; they're
+    fundamentally different so the expression
+
+        IPv4Address('192.0.2.0') <= IPv4Network('192.0.2.0/24')
+
+    doesn't make any sense.  There are some times however, where you may wish
+    to have ipaddress sort these for you anyway. If you need to do this, you
+    can use this function as the key= argument to sorted().
+
+    Args:
+      obj: either a Network or Address object.
+    Returns:
+      appropriate key.
+
+    """
+    if isinstance(obj, _BaseNetwork):
+        return obj._get_networks_key()
+    elif isinstance(obj, _BaseAddress):
+        return obj._get_address_key()
+    return NotImplemented
+
+
+class _IPAddressBase(_TotalOrderingMixin):
+
+    """The mother class."""
+
+    __slots__ = ()
+
+    @property
+    def exploded(self):
+        """Return the longhand version of the IP address as a string."""
+        return self._explode_shorthand_ip_string()
+
+    @property
+    def compressed(self):
+        """Return the shorthand version of the IP address as a string."""
+        return _compat_str(self)
+
+    @property
+    def reverse_pointer(self):
+        """The name of the reverse DNS pointer for the IP address, e.g.:
+            >>> ipaddress.ip_address("127.0.0.1").reverse_pointer
+            '1.0.0.127.in-addr.arpa'
+            >>> ipaddress.ip_address("2001:db8::1").reverse_pointer
+            '1.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.0.8.b.d.0.1.0.0.2.ip6.arpa'
+
+        """
+        return self._reverse_pointer()
+
+    @property
+    def version(self):
+        msg = '%200s has no version specified' % (type(self),)
+        raise NotImplementedError(msg)
+
+    def _check_int_address(self, address):
+        if address < 0:
+            msg = "%d (< 0) is not permitted as an IPv%d address"
+            raise AddressValueError(msg % (address, self._version))
+        if address > self._ALL_ONES:
+            msg = "%d (>= 2**%d) is not permitted as an IPv%d address"
+            raise AddressValueError(msg % (address, self._max_prefixlen,
+                                           self._version))
+
+    def _check_packed_address(self, address, expected_len):
+        address_len = len(address)
+        if address_len != expected_len:
+            msg = (
+                '%r (len %d != %d) is not permitted as an IPv%d address. '
+                'Did you pass in a bytes (str in Python 2) instead of'
+                ' a unicode object?'
+            )
+            raise AddressValueError(msg % (address, address_len,
+                                           expected_len, self._version))
+
+    @classmethod
+    def _ip_int_from_prefix(cls, prefixlen):
+        """Turn the prefix length into a bitwise netmask
+
+        Args:
+            prefixlen: An integer, the prefix length.
+
+        Returns:
+            An integer.
+
+        """
+        return cls._ALL_ONES ^ (cls._ALL_ONES >> prefixlen)
+
+    @classmethod
+    def _prefix_from_ip_int(cls, ip_int):
+        """Return prefix length from the bitwise netmask.
+
+        Args:
+            ip_int: An integer, the netmask in expanded bitwise format
+
+        Returns:
+            An integer, the prefix length.
+
+        Raises:
+            ValueError: If the input intermingles zeroes & ones
+        """
+        trailing_zeroes = _count_righthand_zero_bits(ip_int,
+                                                     cls._max_prefixlen)
+        prefixlen = cls._max_prefixlen - trailing_zeroes
+        leading_ones = ip_int >> trailing_zeroes
+        all_ones = (1 << prefixlen) - 1
+        if leading_ones != all_ones:
+            byteslen = cls._max_prefixlen // 8
+            details = _compat_to_bytes(ip_int, byteslen, 'big')
+            msg = 'Netmask pattern %r mixes zeroes & ones'
+            raise ValueError(msg % details)
+        return prefixlen
+
+    @classmethod
+    def _report_invalid_netmask(cls, netmask_str):
+        msg = '%r is not a valid netmask' % netmask_str
+        raise NetmaskValueError(msg)
+
+    @classmethod
+    def _prefix_from_prefix_string(cls, prefixlen_str):
+        """Return prefix length from a numeric string
+
+        Args:
+            prefixlen_str: The string to be converted
+
+        Returns:
+            An integer, the prefix length.
+
+        Raises:
+            NetmaskValueError: If the input is not a valid netmask
+        """
+        # int allows a leading +/- as well as surrounding whitespace,
+        # so we ensure that isn't the case
+        if not _BaseV4._DECIMAL_DIGITS.issuperset(prefixlen_str):
+            cls._report_invalid_netmask(prefixlen_str)
+        try:
+            prefixlen = int(prefixlen_str)
+        except ValueError:
+            cls._report_invalid_netmask(prefixlen_str)
+        if not (0 <= prefixlen <= cls._max_prefixlen):
+            cls._report_invalid_netmask(prefixlen_str)
+        return prefixlen
+
+    @classmethod
+    def _prefix_from_ip_string(cls, ip_str):
+        """Turn a netmask/hostmask string into a prefix length
+
+        Args:
+            ip_str: The netmask/hostmask to be converted
+
+        Returns:
+            An integer, the prefix length.
+
+        Raises:
+            NetmaskValueError: If the input is not a valid netmask/hostmask
+        """
+        # Parse the netmask/hostmask like an IP address.
+        try:
+            ip_int = cls._ip_int_from_string(ip_str)
+        except AddressValueError:
+            cls._report_invalid_netmask(ip_str)
+
+        # Try matching a netmask (this would be /1*0*/ as a bitwise regexp).
+        # Note that the two ambiguous cases (all-ones and all-zeroes) are
+        # treated as netmasks.
+        try:
+            return cls._prefix_from_ip_int(ip_int)
+        except ValueError:
+            pass
+
+        # Invert the bits, and try matching a /0+1+/ hostmask instead.
+        ip_int ^= cls._ALL_ONES
+        try:
+            return cls._prefix_from_ip_int(ip_int)
+        except ValueError:
+            cls._report_invalid_netmask(ip_str)
+
+    def __reduce__(self):
+        return self.__class__, (_compat_str(self),)
+
+
+class _BaseAddress(_IPAddressBase):
+
+    """A generic IP object.
+
+    This IP class contains the version independent methods which are
+    used by single IP addresses.
+    """
+
+    __slots__ = ()
+
+    def __int__(self):
+        return self._ip
+
+    def __eq__(self, other):
+        try:
+            return (self._ip == other._ip and
+                    self._version == other._version)
+        except AttributeError:
+            return NotImplemented
+
+    def __lt__(self, other):
+        if not isinstance(other, _IPAddressBase):
+            return NotImplemented
+        if not isinstance(other, _BaseAddress):
+            raise TypeError('%s and %s are not of the same type' % (
+                self, other))
+        if self._version != other._version:
+            raise TypeError('%s and %s are not of the same version' % (
+                self, other))
+        if self._ip != other._ip:
+            return self._ip < other._ip
+        return False
+
+    # Shorthand for Integer addition and subtraction. This is not
+    # meant to ever support addition/subtraction of addresses.
+    def __add__(self, other):
+        if not isinstance(other, _compat_int_types):
+            return NotImplemented
+        return self.__class__(int(self) + other)
+
+    def __sub__(self, other):
+        if not isinstance(other, _compat_int_types):
+            return NotImplemented
+        return self.__class__(int(self) - other)
+
+    def __repr__(self):
+        return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+    def __str__(self):
+        return _compat_str(self._string_from_ip_int(self._ip))
+
+    def __hash__(self):
+        return hash(hex(int(self._ip)))
+
+    def _get_address_key(self):
+        return (self._version, self)
+
+    def __reduce__(self):
+        return self.__class__, (self._ip,)
+
+
+class _BaseNetwork(_IPAddressBase):
+
+    """A generic IP network object.
+
+    This IP class contains the version independent methods which are
+    used by networks.
+
+    """
+    def __init__(self, address):
+        self._cache = {}
+
+    def __repr__(self):
+        return '%s(%r)' % (self.__class__.__name__, _compat_str(self))
+
+    def __str__(self):
+        return '%s/%d' % (self.network_address, self.prefixlen)
+
+    def hosts(self):
+        """Generate Iterator over usable hosts in a network.
+
+        This is like __iter__ except it doesn't return the network
+        or broadcast addresses.
+
+        """
+        network = int(self.network_address)
+        broadcast = int(self.broadcast_address)
+        for x in _compat_range(network + 1, broadcast):
+            yield self._address_class(x)
+
+    def __iter__(self):
+        network = int(self.network_address)
+        broadcast = int(self.broadcast_address)
+        for x in _compat_range(network, broadcast + 1):
+            yield self._address_class(x)
+
+    def __getitem__(self, n):
+        network = int(self.network_address)
+        broadcast = int(self.broadcast_address)
+        if n >= 0:
+            if network + n > broadcast:
+                raise IndexError
+            return self._address_class(network + n)
+        else:
+            n += 1
+            if broadcast + n < network:
+                raise IndexError
+            return self._address_class(broadcast + n)
+
+    def __lt__(self, other):
+        if not isinstance(other, _IPAddressBase):
+            return NotImplemented
+        if not isinstance(other, _BaseNetwork):
+            raise TypeError('%s and %s are not of the same type' % (
+                            self, other))
+        if self._version != other._version:
+            raise TypeError('%s and %s are not of the same version' % (
+                            self, other))
+        if self.network_address != other.network_address:
+            return self.network_address < other.network_address
+        if self.netmask != other.netmask:
+            return self.netmask < other.netmask
+        return False
+
+    def __eq__(self, other):
+        try:
+            return (self._version == other._version and
+                    self.network_address == other.network_address and
+                    int(self.netmask) == int(other.netmask))
+        except AttributeError:
+            return NotImplemented
+
+    def __hash__(self):
+        return hash(int(self.network_address) ^ int(self.netmask))
+
+    def __contains__(self, other):
+        # always false if one is v4 and the other is v6.
+        if self._version != other._version:
+            return False
+        # dealing with another network.
+        if isinstance(other, _BaseNetwork):
+            return False
+        # dealing with another address
+        else:
+            # address
+            return (int(self.network_address) <= int(other._ip) <=
+                    int(self.broadcast_address))
+
+    def overlaps(self, other):
+        """Tell if self is partly contained in other."""
+        return self.network_address in other or (
+            self.broadcast_address in other or (
+                other.network_address in self or (
+                    other.broadcast_address in self)))
+
+    @property
+    def broadcast_address(self):
+        x = self._cache.get('broadcast_address')
+        if x is None:
+            x = self._address_class(int(self.network_address) |
+                                    int(self.hostmask))
+            self._cache['broadcast_address'] = x
+        return x
+
+    @property
+    def hostmask(self):
+        x = self._cache.get('hostmask')
+        if x is None:
+            x = self._address_class(int(self.netmask) ^ self._ALL_ONES)
+            self._cache['hostmask'] = x
+        return x
+
+    @property
+    def with_prefixlen(self):
+        return '%s/%d' % (self.network_address, self._prefixlen)
+
+    @property
+    def with_netmask(self):
+        return '%s/%s' % (self.network_address, self.netmask)
+
+    @property
+    def with_hostmask(self):
+        return '%s/%s' % (self.network_address, self.hostmask)
+
+    @property
+    def num_addresses(self):
+        """Number of hosts in the current subnet."""
+        return int(self.broadcast_address) - int(self.network_address) + 1
+
+    @property
+    def _address_class(self):
+        # Returning bare address objects (rather than interfaces) allows for
+        # more consistent behaviour across the network address, broadcast
+        # address and individual host addresses.
+        msg = '%200s has no associated address class' % (type(self),)
+        raise NotImplementedError(msg)
+
+    @property
+    def prefixlen(self):
+        return self._prefixlen
+
+    def address_exclude(self, other):
+        """Remove an address from a larger block.
+
+        For example:
+
+            addr1 = ip_network('192.0.2.0/28')
+            addr2 = ip_network('192.0.2.1/32')
+            addr1.address_exclude(addr2) =
+                [IPv4Network('192.0.2.0/32'), IPv4Network('192.0.2.2/31'),
+                IPv4Network('192.0.2.4/30'), IPv4Network('192.0.2.8/29')]
+
+        or IPv6:
+
+            addr1 = ip_network('2001:db8::1/32')
+            addr2 = ip_network('2001:db8::1/128')
+            addr1.address_exclude(addr2) =
+                [ip_network('2001:db8::1/128'),
+                ip_network('2001:db8::2/127'),
+                ip_network('2001:db8::4/126'),
+                ip_network('2001:db8::8/125'),
+                ...
+                ip_network('2001:db8:8000::/33')]
+
+        Args:
+            other: An IPv4Network or IPv6Network object of the same type.
+
+        Returns:
+            An iterator of the IPv(4|6)Network objects which is self
+            minus other.
+
+        Raises:
+            TypeError: If self and other are of differing address
+              versions, or if other is not a network object.
+            ValueError: If other is not completely contained by self.
+
+        """
+        if not self._version == other._version:
+            raise TypeError("%s and %s are not of the same version" % (
+                            self, other))
+
+        if not isinstance(other, _BaseNetwork):
+            raise TypeError("%s is not a network object" % other)
+
+        if not other.subnet_of(self):
+            raise ValueError('%s not contained in %s' % (other, self))
+        if other == self:
+            return
+
+        # Make sure we're comparing the network of other.
+        other = other.__class__('%s/%s' % (other.network_address,
+                                           other.prefixlen))
+
+        s1, s2 = self.subnets()
+        while s1 != other and s2 != other:
+            if other.subnet_of(s1):
+                yield s2
+                s1, s2 = s1.subnets()
+            elif other.subnet_of(s2):
+                yield s1
+                s1, s2 = s2.subnets()
+            else:
+                # If we got here, there's a bug somewhere.
+                raise AssertionError('Error performing exclusion: '
+                                     's1: %s s2: %s other: %s' %
+                                     (s1, s2, other))
+        if s1 == other:
+            yield s2
+        elif s2 == other:
+            yield s1
+        else:
+            # If we got here, there's a bug somewhere.
+            raise AssertionError('Error performing exclusion: '
+                                 's1: %s s2: %s other: %s' %
+                                 (s1, s2, other))
+
+    def compare_networks(self, other):
+        """Compare two IP objects.
+
+        This is only concerned about the comparison of the integer
+        representation of the network addresses.  This means that the
+        host bits aren't considered at all in this method.  If you want
+        to compare host bits, you can easily enough do a
+        'HostA._ip < HostB._ip'
+
+        Args:
+            other: An IP object.
+
+        Returns:
+            If the IP versions of self and other are the same, returns:
+
+            -1 if self < other:
+              eg: IPv4Network('192.0.2.0/25') < IPv4Network('192.0.2.128/25')
+              IPv6Network('2001:db8::1000/124') <
+                  IPv6Network('2001:db8::2000/124')
+            0 if self == other
+              eg: IPv4Network('192.0.2.0/24') == IPv4Network('192.0.2.0/24')
+              IPv6Network('2001:db8::1000/124') ==
+                  IPv6Network('2001:db8::1000/124')
+            1 if self > other
+              eg: IPv4Network('192.0.2.128/25') > IPv4Network('192.0.2.0/25')
+                  IPv6Network('2001:db8::2000/124') >
+                      IPv6Network('2001:db8::1000/124')
+
+          Raises:
+              TypeError if the IP versions are different.
+
+        """
+        # does this need to raise a ValueError?
+        if self._version != other._version:
+            raise TypeError('%s and %s are not of the same type' % (
+                            self, other))
+        # self._version == other._version below here:
+        if self.network_address < other.network_address:
+            return -1
+        if self.network_address > other.network_address:
+            return 1
+        # self.network_address == other.network_address below here:
+        if self.netmask < other.netmask:
+            return -1
+        if self.netmask > other.netmask:
+            return 1
+        return 0
+
+    def _get_networks_key(self):
+        """Network-only key function.
+
+        Returns an object that identifies this address' network and
+        netmask. This function is a suitable "key" argument for sorted()
+        and list.sort().
+
+        """
+        return (self._version, self.network_address, self.netmask)
+
+    def subnets(self, prefixlen_diff=1, new_prefix=None):
+        """The subnets which join to make the current subnet.
+
+        In the case that self contains only one IP
+        (self._prefixlen == 32 for IPv4 or self._prefixlen == 128
+        for IPv6), yield an iterator with just ourself.
+
+        Args:
+            prefixlen_diff: An integer, the amount the prefix length
+              should be increased by. This should not be set if
+              new_prefix is also set.
+            new_prefix: The desired new prefix length. This must be a
+              larger number (smaller prefix) than the existing prefix.
+              This should not be set if prefixlen_diff is also set.
+
+        Returns:
+            An iterator of IPv(4|6) objects.
+
+        Raises:
+            ValueError: The prefixlen_diff is too small or too large.
+                OR
+            prefixlen_diff and new_prefix are both set or new_prefix
+              is a smaller number than the current prefix (smaller
+              number means a larger network)
+
+        """
+        if self._prefixlen == self._max_prefixlen:
+            yield self
+            return
+
+        if new_prefix is not None:
+            if new_prefix < self._prefixlen:
+                raise ValueError('new prefix must be longer')
+            if prefixlen_diff != 1:
+                raise ValueError('cannot set prefixlen_diff and new_prefix')
+            prefixlen_diff = new_prefix - self._prefixlen
+
+        if prefixlen_diff < 0:
+            raise ValueError('prefix length diff must be > 0')
+        new_prefixlen = self._prefixlen + prefixlen_diff
+
+        if new_prefixlen > self._max_prefixlen:
+            raise ValueError(
+                'prefix length diff %d is invalid for netblock %s' % (
+                    new_prefixlen, self))
+
+        start = int(self.network_address)
+        end = int(self.broadcast_address)
+        step = (int(self.hostmask) + 1) >> prefixlen_diff
+        for new_addr in _compat_range(start, end, step):
+            current = self.__class__((new_addr, new_prefixlen))
+            yield current
+
+    def supernet(self, prefixlen_diff=1, new_prefix=None):
+        """The supernet containing the current network.
+
+        Args:
+            prefixlen_diff: An integer, the amount the prefix length of
+              the network should be decreased by.  For example, given a
+              /24 network and a prefixlen_diff of 3, a supernet with a
+              /21 netmask is returned.
+
+        Returns:
+            An IPv4 network object.
+
+        Raises:
+            ValueError: If self.prefixlen - prefixlen_diff < 0. I.e., you have
+              a negative prefix length.
+                OR
+            If prefixlen_diff and new_prefix are both set or new_prefix is a
+              larger number than the current prefix (larger number means a
+              smaller network)
+
+        """
+        if self._prefixlen == 0:
+            return self
+
+        if new_prefix is not None:
+            if new_prefix > self._prefixlen:
+                raise ValueError('new prefix must be shorter')
+            if prefixlen_diff != 1:
+                raise ValueError('cannot set prefixlen_diff and new_prefix')
+            prefixlen_diff = self._prefixlen - new_prefix
+
+        new_prefixlen = self.prefixlen - prefixlen_diff
+        if new_prefixlen < 0:
+            raise ValueError(
+                'current prefixlen is %d, cannot have a prefixlen_diff of %d' %
+                (self.prefixlen, prefixlen_diff))
+        return self.__class__((
+            int(self.network_address) & (int(self.netmask) << prefixlen_diff),
+            new_prefixlen
+        ))
+
+    @property
+    def is_multicast(self):
+        """Test if the address is reserved for multicast use.
+
+        Returns:
+            A boolean, True if the address is a multicast address.
+            See RFC 2373 2.7 for details.
+
+        """
+        return (self.network_address.is_multicast and
+                self.broadcast_address.is_multicast)
+
+    def subnet_of(self, other):
+        # always false if one is v4 and the other is v6.
+        if self._version != other._version:
+            return False
+        # dealing with another network.
+        if (hasattr(other, 'network_address') and
+                hasattr(other, 'broadcast_address')):
+            return (other.network_address <= self.network_address and
+                    other.broadcast_address >= self.broadcast_address)
+        # dealing with another address
+        else:
+            raise TypeError('Unable to test subnet containment with element '
+                            'of type %s' % type(other))
+
+    def supernet_of(self, other):
+        # always false if one is v4 and the other is v6.
+        if self._version != other._version:
+            return False
+        # dealing with another network.
+        if (hasattr(other, 'network_address') and
+                hasattr(other, 'broadcast_address')):
+            return (other.network_address >= self.network_address and
+                    other.broadcast_address <= self.broadcast_address)
+        # dealing with another address
+        else:
+            raise TypeError('Unable to test subnet containment with element '
+                            'of type %s' % type(other))
+
+    @property
+    def is_reserved(self):
+        """Test if the address is otherwise IETF reserved.
+
+        Returns:
+            A boolean, True if the address is within one of the
+            reserved IPv6 Network ranges.
+
+        """
+        return (self.network_address.is_reserved and
+                self.broadcast_address.is_reserved)
+
+    @property
+    def is_link_local(self):
+        """Test if the address is reserved for link-local.
+
+        Returns:
+            A boolean, True if the address is reserved per RFC 4291.
+
+        """
+        return (self.network_address.is_link_local and
+                self.broadcast_address.is_link_local)
+
+    @property
+    def is_private(self):
+        """Test if this address is allocated for private networks.
+
+        Returns:
+            A boolean, True if the address is reserved per
+            iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+        """
+        return (self.network_address.is_private and
+                self.broadcast_address.is_private)
+
+    @property
+    def is_global(self):
+        """Test if this address is allocated for public networks.
+
+        Returns:
+            A boolean, True if the address is not reserved per
+            iana-ipv4-special-registry or iana-ipv6-special-registry.
+
+        """
+        return not self.is_private
+
+    @property
+    def is_unspecified(self):
+        """Test if the address is unspecified.
+
+        Returns:
+            A boolean, True if this is the unspecified address as defined in
+            RFC 2373 2.5.2.
+
+        """
+        return (self.network_address.is_unspecified and
+                self.broadcast_address.is_unspecified)
+
+    @property
+    def is_loopback(self):
+        """Test if the address is a loopback address.
+
+        Returns:
+            A boolean, True if the address is a loopback address as defined in
+            RFC 2373 2.5.3.
+
+        """
+        return (self.network_address.is_loopback and
+                self.broadcast_address.is_loopback)
+
+
+class _BaseV4(object):
+
+    """Base IPv4 object.
+
+    The following methods are used by IPv4 objects in both single IP
+    addresses and networks.
+
+    """
+
+    __slots__ = ()
+    _version = 4
+    # Equivalent to 255.255.255.255 or 32 bits of 1's.
+    _ALL_ONES = (2 ** IPV4LENGTH) - 1
+    _DECIMAL_DIGITS = frozenset('0123456789')
+
+    # the valid octets for host and netmasks. only useful for IPv4.
+    _valid_mask_octets = frozenset([255, 254, 252, 248, 240, 224, 192, 128, 0])
+
+    _max_prefixlen = IPV4LENGTH
+    # There are only a handful of valid v4 netmasks, so we cache them all
+    # when constructed (see _make_netmask()).
+    _netmask_cache = {}
+
+    def _explode_shorthand_ip_string(self):
+        return _compat_str(self)
+
+    @classmethod
+    def _make_netmask(cls, arg):
+        """Make a (netmask, prefix_len) tuple from the given argument.
+
+        Argument can be:
+        - an integer (the prefix length)
+        - a string representing the prefix length (e.g. "24")
+        - a string representing the prefix netmask (e.g. "255.255.255.0")
+        """
+        if arg not in cls._netmask_cache:
+            if isinstance(arg, _compat_int_types):
+                prefixlen = arg
+            else:
+                try:
+                    # Check for a netmask in prefix length form
+                    prefixlen = cls._prefix_from_prefix_string(arg)
+                except NetmaskValueError:
+                    # Check for a netmask or hostmask in dotted-quad form.
+                    # This may raise NetmaskValueError.
+                    prefixlen = cls._prefix_from_ip_string(arg)
+            netmask = IPv4Address(cls._ip_int_from_prefix(prefixlen))
+            cls._netmask_cache[arg] = netmask, prefixlen
+        return cls._netmask_cache[arg]
+
+    @classmethod
+    def _ip_int_from_string(cls, ip_str):
+        """Turn the given IP string into an integer for comparison.
+
+        Args:
+            ip_str: A string, the IP ip_str.
+
+        Returns:
+            The IP ip_str as an integer.
+
+        Raises:
+            AddressValueError: if ip_str isn't a valid IPv4 Address.
+
+        """
+        if not ip_str:
+            raise AddressValueError('Address cannot be empty')
+
+        octets = ip_str.split('.')
+        if len(octets) != 4:
+            raise AddressValueError("Expected 4 octets in %r" % ip_str)
+
+        try:
+            return _compat_int_from_byte_vals(
+                map(cls._parse_octet, octets), 'big')
+        except ValueError as exc:
+            raise AddressValueError("%s in %r" % (exc, ip_str))
+
+    @classmethod
+    def _parse_octet(cls, octet_str):
+        """Convert a decimal octet into an integer.
+
+        Args:
+            octet_str: A string, the number to parse.
+
+        Returns:
+            The octet as an integer.
+
+        Raises:
+            ValueError: if the octet isn't strictly a decimal from [0..255].
+
+        """
+        if not octet_str:
+            raise ValueError("Empty octet not permitted")
+        # Whitelist the characters, since int() allows a lot of bizarre stuff.
+        if not cls._DECIMAL_DIGITS.issuperset(octet_str):
+            msg = "Only decimal digits permitted in %r"
+            raise ValueError(msg % octet_str)
+        # We do the length check second, since the invalid character error
+        # is likely to be more informative for the user
+        if len(octet_str) > 3:
+            msg = "At most 3 characters permitted in %r"
+            raise ValueError(msg % octet_str)
+        # Convert to integer (we know digits are legal)
+        octet_int = int(octet_str, 10)
+        # Any octets that look like they *might* be written in octal,
+        # and which don't look exactly the same in both octal and
+        # decimal are rejected as ambiguous
+        if octet_int > 7 and octet_str[0] == '0':
+            msg = "Ambiguous (octal/decimal) value in %r not permitted"
+            raise ValueError(msg % octet_str)
+        if octet_int > 255:
+            raise ValueError("Octet %d (> 255) not permitted" % octet_int)
+        return octet_int
+
+    @classmethod
+    def _string_from_ip_int(cls, ip_int):
+        """Turns a 32-bit integer into dotted decimal notation.
+
+        Args:
+            ip_int: An integer, the IP address.
+
+        Returns:
+            The IP address as a string in dotted decimal notation.
+
+        """
+        return '.'.join(_compat_str(struct.unpack(b'!B', b)[0]
+                                    if isinstance(b, bytes)
+                                    else b)
+                        for b in _compat_to_bytes(ip_int, 4, 'big'))
+
+    def _is_hostmask(self, ip_str):
+        """Test if the IP string is a hostmask (rather than a netmask).
+
+        Args:
+            ip_str: A string, the potential hostmask.
+
+        Returns:
+            A boolean, True if the IP string is a hostmask.
+
+        """
+        bits = ip_str.split('.')
+        try:
+            parts = [x for x in map(int, bits) if x in self._valid_mask_octets]
+        except ValueError:
+            return False
+        if len(parts) != len(bits):
+            return False
+        if parts[0] < parts[-1]:
+            return True
+        return False
+
+    def _reverse_pointer(self):
+        """Return the reverse DNS pointer name for the IPv4 address.
+
+        This implements the method described in RFC1035 3.5.
+
+        """
+        reverse_octets = _compat_str(self).split('.')[::-1]
+        return '.'.join(reverse_octets) + '.in-addr.arpa'
+
+    @property
+    def max_prefixlen(self):
+        return self._max_prefixlen
+
+    @property
+    def version(self):
+        return self._version
+
+
+class IPv4Address(_BaseV4, _BaseAddress):
+
+    """Represent and manipulate single IPv4 Addresses."""
+
+    __slots__ = ('_ip', '__weakref__')
+
+    def __init__(self, address):
+
+        """
+        Args:
+            address: A string or integer representing the IP
+
+              Additionally, an integer can be passed, so
+              IPv4Address('192.0.2.1') == IPv4Address(3221225985).
+              or, more generally
+              IPv4Address(int(IPv4Address('192.0.2.1'))) ==
+                IPv4Address('192.0.2.1')
+
+        Raises:
+            AddressValueError: If ipaddress isn't a valid IPv4 address.
+
+        """
+        # Efficient constructor from integer.
+        if isinstance(address, _compat_int_types):
+            self._check_int_address(address)
+            self._ip = address
+            return
+
+        # Constructing from a packed address
+        if isinstance(address, bytes):
+            self._check_packed_address(address, 4)
+            bvs = _compat_bytes_to_byte_vals(address)
+            self._ip = _compat_int_from_byte_vals(bvs, 'big')
+            return
+
+        # Assume input argument to be string or any object representation
+        # which converts into a formatted IP string.
+        addr_str = _compat_str(address)
+        if '/' in addr_str:
+            raise AddressValueError("Unexpected '/' in %r" % address)
+        self._ip = self._ip_int_from_string(addr_str)
+
+    @property
+    def packed(self):
+        """The binary representation of this address."""
+        return v4_int_to_packed(self._ip)
+
+    @property
+    def is_reserved(self):
+        """Test if the address is otherwise IETF reserved.
+
+         Returns:
+             A boolean, True if the address is within the
+             reserved IPv4 Network range.
+
+        """
+        return self in self._constants._reserved_network
+
+    @property
+    def is_private(self):
+        """Test if this address is allocated for private networks.
+
+        Returns:
+            A boolean, True if the address is reserved per
+            iana-ipv4-special-registry.
+
+        """
+        return any(self in net for net in self._constants._private_networks)
+
+    @property
+    def is_multicast(self):
+        """Test if the address is reserved for multicast use.
+
+        Returns:
+            A boolean, True if the address is multicast.
+            See RFC 3171 for details.
+
+        """
+        return self in self._constants._multicast_network
+
+    @property
+    def is_unspecified(self):
+        """Test if the address is unspecified.
+
+        Returns:
+            A boolean, True if this is the unspecified address as defined in
+            RFC 5735 3.
+
+        """
+        return self == self._constants._unspecified_address
+
+    @property
+    def is_loopback(self):
+        """Test if the address is a loopback address.
+
+        Returns:
+            A boolean, True if the address is a loopback per RFC 3330.
+
+        """
+        return self in self._constants._loopback_network
+
+    @property
+    def is_link_local(self):
+        """Test if the address is reserved for link-local.
+
+        Returns:
+            A boolean, True if the address is link-local per RFC 3927.
+
+        """
+        return self in self._constants._linklocal_network
+
+
+class IPv4Interface(IPv4Address):
+
+    def __init__(self, address):
+        if isinstance(address, (bytes, _compat_int_types)):
+            IPv4Address.__init__(self, address)
+            self.network = IPv4Network(self._ip)
+            self._prefixlen = self._max_prefixlen
+            return
+
+        if isinstance(address, tuple):
+            IPv4Address.__init__(self, address[0])
+            if len(address) > 1:
+                self._prefixlen = int(address[1])
+            else:
+                self._prefixlen = self._max_prefixlen
+
+            self.network = IPv4Network(address, strict=False)
+            self.netmask = self.network.netmask
+            self.hostmask = self.network.hostmask
+            return
+
+        addr = _split_optional_netmask(address)
+        IPv4Address.__init__(self, addr[0])
+
+        self.network = IPv4Network(address, strict=False)
+        self._prefixlen = self.network._prefixlen
+
+        self.netmask = self.network.netmask
+        self.hostmask = self.network.hostmask
+
+    def __str__(self):
+        return '%s/%d' % (self._string_from_ip_int(self._ip),
+                          self.network.prefixlen)
+
+    def __eq__(self, other):
+        address_equal = IPv4Address.__eq__(self, other)
+        if not address_equal or address_equal is NotImplemented:
+            return address_equal
+        try:
+            return self.network == other.network
+        except AttributeError:
+            # An interface with an associated network is NOT the
+            # same as an unassociated address. That's why the hash
+            # takes the extra info into account.
+            return False
+
+    def __lt__(self, other):
+        address_less = IPv4Address.__lt__(self, other)
+        if address_less is NotImplemented:
+            return NotImplemented
+        try:
+            return self.network < other.network
+        except AttributeError:
+            # We *do* allow addresses and interfaces to be sorted. The
+            # unassociated address is considered less than all interfaces.
+            return False
+
+    def __hash__(self):
+        return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+    __reduce__ = _IPAddressBase.__reduce__
+
+    @property
+    def ip(self):
+        return IPv4Address(self._ip)
+
+    @property
+    def with_prefixlen(self):
+        return '%s/%s' % (self._string_from_ip_int(self._ip),
+                          self._prefixlen)
+
+    @property
+    def with_netmask(self):
+        return '%s/%s' % (self._string_from_ip_int(self._ip),
+                          self.netmask)
+
+    @property
+    def with_hostmask(self):
+        return '%s/%s' % (self._string_from_ip_int(self._ip),
+                          self.hostmask)
+
+
+class IPv4Network(_BaseV4, _BaseNetwork):
+
+    """This class represents and manipulates 32-bit IPv4 network + addresses..
+
+    Attributes: [examples for IPv4Network('192.0.2.0/27')]
+        .network_address: IPv4Address('192.0.2.0')
+        .hostmask: IPv4Address('0.0.0.31')
+        .broadcast_address: IPv4Address('192.0.2.32')
+        .netmask: IPv4Address('255.255.255.224')
+        .prefixlen: 27
+
+    """
+    # Class to use when creating address objects
+    _address_class = IPv4Address
+
+    def __init__(self, address, strict=True):
+
+        """Instantiate a new IPv4 network object.
+
+        Args:
+            address: A string or integer representing the IP [& network].
+              '192.0.2.0/24'
+              '192.0.2.0/255.255.255.0'
+              '192.0.0.2/0.0.0.255'
+              are all functionally the same in IPv4. Similarly,
+              '192.0.2.1'
+              '192.0.2.1/255.255.255.255'
+              '192.0.2.1/32'
+              are also functionally equivalent. That is to say, failing to
+              provide a subnetmask will create an object with a mask of /32.
+
+              If the mask (portion after the / in the argument) is given in
+              dotted quad form, it is treated as a netmask if it starts with a
+              non-zero field (e.g. /255.0.0.0 == /8) and as a hostmask if it
+              starts with a zero field (e.g. 0.255.255.255 == /8), with the
+              single exception of an all-zero mask which is treated as a
+              netmask == /0. If no mask is given, a default of /32 is used.
+
+              Additionally, an integer can be passed, so
+              IPv4Network('192.0.2.1') == IPv4Network(3221225985)
+              or, more generally
+              IPv4Interface(int(IPv4Interface('192.0.2.1'))) ==
+                IPv4Interface('192.0.2.1')
+
+        Raises:
+            AddressValueError: If ipaddress isn't a valid IPv4 address.
+            NetmaskValueError: If the netmask isn't valid for
+              an IPv4 address.
+            ValueError: If strict is True and a network address is not
+              supplied.
+
+        """
+        _BaseNetwork.__init__(self, address)
+
+        # Constructing from a packed address or integer
+        if isinstance(address, (_compat_int_types, bytes)):
+            self.network_address = IPv4Address(address)
+            self.netmask, self._prefixlen = self._make_netmask(
+                self._max_prefixlen)
+            # fixme: address/network test here.
+            return
+
+        if isinstance(address, tuple):
+            if len(address) > 1:
+                arg = address[1]
+            else:
+                # We weren't given an address[1]
+                arg = self._max_prefixlen
+            self.network_address = IPv4Address(address[0])
+            self.netmask, self._prefixlen = self._make_netmask(arg)
+            packed = int(self.network_address)
+            if packed & int(self.netmask) != packed:
+                if strict:
+                    raise ValueError('%s has host bits set' % self)
+                else:
+                    self.network_address = IPv4Address(packed &
+                                                       int(self.netmask))
+            return
+
+        # Assume input argument to be string or any object representation
+        # which converts into a formatted IP prefix string.
+        addr = _split_optional_netmask(address)
+        self.network_address = IPv4Address(self._ip_int_from_string(addr[0]))
+
+        if len(addr) == 2:
+            arg = addr[1]
+        else:
+            arg = self._max_prefixlen
+        self.netmask, self._prefixlen = self._make_netmask(arg)
+
+        if strict:
+            if (IPv4Address(int(self.network_address) & int(self.netmask)) !=
+                    self.network_address):
+                raise ValueError('%s has host bits set' % self)
+        self.network_address = IPv4Address(int(self.network_address) &
+                                           int(self.netmask))
+
+        if self._prefixlen == (self._max_prefixlen - 1):
+            self.hosts = self.__iter__
+
+    @property
+    def is_global(self):
+        """Test if this address is allocated for public networks.
+
+        Returns:
+            A boolean, True if the address is not reserved per
+            iana-ipv4-special-registry.
+
+        """
+        return (not (self.network_address in IPv4Network('100.64.0.0/10') and
+                self.broadcast_address in IPv4Network('100.64.0.0/10')) and
+                not self.is_private)
+
+
+class _IPv4Constants(object):
+
+    _linklocal_network = IPv4Network('169.254.0.0/16')
+
+    _loopback_network = IPv4Network('127.0.0.0/8')
+
+    _multicast_network = IPv4Network('224.0.0.0/4')
+
+    _private_networks = [
+        IPv4Network('0.0.0.0/8'),
+        IPv4Network('10.0.0.0/8'),
+        IPv4Network('127.0.0.0/8'),
+        IPv4Network('169.254.0.0/16'),
+        IPv4Network('172.16.0.0/12'),
+        IPv4Network('192.0.0.0/29'),
+        IPv4Network('192.0.0.170/31'),
+        IPv4Network('192.0.2.0/24'),
+        IPv4Network('192.168.0.0/16'),
+        IPv4Network('198.18.0.0/15'),
+        IPv4Network('198.51.100.0/24'),
+        IPv4Network('203.0.113.0/24'),
+        IPv4Network('240.0.0.0/4'),
+        IPv4Network('255.255.255.255/32'),
+    ]
+
+    _reserved_network = IPv4Network('240.0.0.0/4')
+
+    _unspecified_address = IPv4Address('0.0.0.0')
+
+
+IPv4Address._constants = _IPv4Constants
+
+
+class _BaseV6(object):
+
+    """Base IPv6 object.
+
+    The following methods are used by IPv6 objects in both single IP
+    addresses and networks.
+
+    """
+
+    __slots__ = ()
+    _version = 6
+    _ALL_ONES = (2 ** IPV6LENGTH) - 1
+    _HEXTET_COUNT = 8
+    _HEX_DIGITS = frozenset('0123456789ABCDEFabcdef')
+    _max_prefixlen = IPV6LENGTH
+
+    # There are only a bunch of valid v6 netmasks, so we cache them all
+    # when constructed (see _make_netmask()).
+    _netmask_cache = {}
+
+    @classmethod
+    def _make_netmask(cls, arg):
+        """Make a (netmask, prefix_len) tuple from the given argument.
+
+        Argument can be:
+        - an integer (the prefix length)
+        - a string representing the prefix length (e.g. "24")
+        - a string representing the prefix netmask (e.g. "255.255.255.0")
+        """
+        if arg not in cls._netmask_cache:
+            if isinstance(arg, _compat_int_types):
+                prefixlen = arg
+            else:
+                prefixlen = cls._prefix_from_prefix_string(arg)
+            netmask = IPv6Address(cls._ip_int_from_prefix(prefixlen))
+            cls._netmask_cache[arg] = netmask, prefixlen
+        return cls._netmask_cache[arg]
+
+    @classmethod
+    def _ip_int_from_string(cls, ip_str):
+        """Turn an IPv6 ip_str into an integer.
+
+        Args:
+            ip_str: A string, the IPv6 ip_str.
+
+        Returns:
+            An int, the IPv6 address
+
+        Raises:
+            AddressValueError: if ip_str isn't a valid IPv6 Address.
+
+        """
+        if not ip_str:
+            raise AddressValueError('Address cannot be empty')
+
+        parts = ip_str.split(':')
+
+        # An IPv6 address needs at least 2 colons (3 parts).
+        _min_parts = 3
+        if len(parts) < _min_parts:
+            msg = "At least %d parts expected in %r" % (_min_parts, ip_str)
+            raise AddressValueError(msg)
+
+        # If the address has an IPv4-style suffix, convert it to hexadecimal.
+        if '.' in parts[-1]:
+            try:
+                ipv4_int = IPv4Address(parts.pop())._ip
+            except AddressValueError as exc:
+                raise AddressValueError("%s in %r" % (exc, ip_str))
+            parts.append('%x' % ((ipv4_int >> 16) & 0xFFFF))
+            parts.append('%x' % (ipv4_int & 0xFFFF))
+
+        # An IPv6 address can't have more than 8 colons (9 parts).
+        # The extra colon comes from using the "::" notation for a single
+        # leading or trailing zero part.
+        _max_parts = cls._HEXTET_COUNT + 1
+        if len(parts) > _max_parts:
+            msg = "At most %d colons permitted in %r" % (
+                _max_parts - 1, ip_str)
+            raise AddressValueError(msg)
+
+        # Disregarding the endpoints, find '::' with nothing in between.
+        # This indicates that a run of zeroes has been skipped.
+        skip_index = None
+        for i in _compat_range(1, len(parts) - 1):
+            if not parts[i]:
+                if skip_index is not None:
+                    # Can't have more than one '::'
+                    msg = "At most one '::' permitted in %r" % ip_str
+                    raise AddressValueError(msg)
+                skip_index = i
+
+        # parts_hi is the number of parts to copy from above/before the '::'
+        # parts_lo is the number of parts to copy from below/after the '::'
+        if skip_index is not None:
+            # If we found a '::', then check if it also covers the endpoints.
+            parts_hi = skip_index
+            parts_lo = len(parts) - skip_index - 1
+            if not parts[0]:
+                parts_hi -= 1
+                if parts_hi:
+                    msg = "Leading ':' only permitted as part of '::' in %r"
+                    raise AddressValueError(msg % ip_str)  # ^: requires ^::
+            if not parts[-1]:
+                parts_lo -= 1
+                if parts_lo:
+                    msg = "Trailing ':' only permitted as part of '::' in %r"
+                    raise AddressValueError(msg % ip_str)  # :$ requires ::$
+            parts_skipped = cls._HEXTET_COUNT - (parts_hi + parts_lo)
+            if parts_skipped < 1:
+                msg = "Expected at most %d other parts with '::' in %r"
+                raise AddressValueError(msg % (cls._HEXTET_COUNT - 1, ip_str))
+        else:
+            # Otherwise, allocate the entire address to parts_hi.  The
+            # endpoints could still be empty, but _parse_hextet() will check
+            # for that.
+            if len(parts) != cls._HEXTET_COUNT:
+                msg = "Exactly %d parts expected without '::' in %r"
+                raise AddressValueError(msg % (cls._HEXTET_COUNT, ip_str))
+            if not parts[0]:
+                msg = "Leading ':' only permitted as part of '::' in %r"
+                raise AddressValueError(msg % ip_str)  # ^: requires ^::
+            if not parts[-1]:
+                msg = "Trailing ':' only permitted as part of '::' in %r"
+                raise AddressValueError(msg % ip_str)  # :$ requires ::$
+            parts_hi = len(parts)
+            parts_lo = 0
+            parts_skipped = 0
+
+        try:
+            # Now, parse the hextets into a 128-bit integer.
+            ip_int = 0
+            for i in range(parts_hi):
+                ip_int <<= 16
+                ip_int |= cls._parse_hextet(parts[i])
+            ip_int <<= 16 * parts_skipped
+            for i in range(-parts_lo, 0):
+                ip_int <<= 16
+                ip_int |= cls._parse_hextet(parts[i])
+            return ip_int
+        except ValueError as exc:
+            raise AddressValueError("%s in %r" % (exc, ip_str))
+
+    @classmethod
+    def _parse_hextet(cls, hextet_str):
+        """Convert an IPv6 hextet string into an integer.
+
+        Args:
+            hextet_str: A string, the number to parse.
+
+        Returns:
+            The hextet as an integer.
+
+        Raises:
+            ValueError: if the input isn't strictly a hex number from
+              [0..FFFF].
+
+        """
+        # Whitelist the characters, since int() allows a lot of bizarre stuff.
+        if not cls._HEX_DIGITS.issuperset(hextet_str):
+            raise ValueError("Only hex digits permitted in %r" % hextet_str)
+        # We do the length check second, since the invalid character error
+        # is likely to be more informative for the user
+        if len(hextet_str) > 4:
+            msg = "At most 4 characters permitted in %r"
+            raise ValueError(msg % hextet_str)
+        # Length check means we can skip checking the integer value
+        return int(hextet_str, 16)
+
+    @classmethod
+    def _compress_hextets(cls, hextets):
+        """Compresses a list of hextets.
+
+        Compresses a list of strings, replacing the longest continuous
+        sequence of "0" in the list with "" and adding empty strings at
+        the beginning or at the end of the string such that subsequently
+        calling ":".join(hextets) will produce the compressed version of
+        the IPv6 address.
+
+        Args:
+            hextets: A list of strings, the hextets to compress.
+
+        Returns:
+            A list of strings.
+
+        """
+        best_doublecolon_start = -1
+        best_doublecolon_len = 0
+        doublecolon_start = -1
+        doublecolon_len = 0
+        for index, hextet in enumerate(hextets):
+            if hextet == '0':
+                doublecolon_len += 1
+                if doublecolon_start == -1:
+                    # Start of a sequence of zeros.
+                    doublecolon_start = index
+                if doublecolon_len > best_doublecolon_len:
+                    # This is the longest sequence of zeros so far.
+                    best_doublecolon_len = doublecolon_len
+                    best_doublecolon_start = doublecolon_start
+            else:
+                doublecolon_len = 0
+                doublecolon_start = -1
+
+        if best_doublecolon_len > 1:
+            best_doublecolon_end = (best_doublecolon_start +
+                                    best_doublecolon_len)
+            # For zeros at the end of the address.
+            if best_doublecolon_end == len(hextets):
+                hextets += ['']
+            hextets[best_doublecolon_start:best_doublecolon_end] = ['']
+            # For zeros at the beginning of the address.
+            if best_doublecolon_start == 0:
+                hextets = [''] + hextets
+
+        return hextets
+
+    @classmethod
+    def _string_from_ip_int(cls, ip_int=None):
+        """Turns a 128-bit integer into hexadecimal notation.
+
+        Args:
+            ip_int: An integer, the IP address.
+
+        Returns:
+            A string, the hexadecimal representation of the address.
+
+        Raises:
+            ValueError: The address is bigger than 128 bits of all ones.
+
+        """
+        if ip_int is None:
+            ip_int = int(cls._ip)
+
+        if ip_int > cls._ALL_ONES:
+            raise ValueError('IPv6 address is too large')
+
+        hex_str = '%032x' % ip_int
+        hextets = ['%x' % int(hex_str[x:x + 4], 16) for x in range(0, 32, 4)]
+
+        hextets = cls._compress_hextets(hextets)
+        return ':'.join(hextets)
+
+    def _explode_shorthand_ip_string(self):
+        """Expand a shortened IPv6 address.
+
+        Args:
+            ip_str: A string, the IPv6 address.
+
+        Returns:
+            A string, the expanded IPv6 address.
+
+        """
+        if isinstance(self, IPv6Network):
+            ip_str = _compat_str(self.network_address)
+        elif isinstance(self, IPv6Interface):
+            ip_str = _compat_str(self.ip)
+        else:
+            ip_str = _compat_str(self)
+
+        ip_int = self._ip_int_from_string(ip_str)
+        hex_str = '%032x' % ip_int
+        parts = [hex_str[x:x + 4] for x in range(0, 32, 4)]
+        if isinstance(self, (_BaseNetwork, IPv6Interface)):
+            return '%s/%d' % (':'.join(parts), self._prefixlen)
+        return ':'.join(parts)
+
+    def _reverse_pointer(self):
+        """Return the reverse DNS pointer name for the IPv6 address.
+
+        This implements the method described in RFC3596 2.5.
+
+        """
+        reverse_chars = self.exploded[::-1].replace(':', '')
+        return '.'.join(reverse_chars) + '.ip6.arpa'
+
+    @property
+    def max_prefixlen(self):
+        return self._max_prefixlen
+
+    @property
+    def version(self):
+        return self._version
+
+
+class IPv6Address(_BaseV6, _BaseAddress):
+
+    """Represent and manipulate single IPv6 Addresses."""
+
+    __slots__ = ('_ip', '__weakref__')
+
+    def __init__(self, address):
+        """Instantiate a new IPv6 address object.
+
+        Args:
+            address: A string or integer representing the IP
+
+              Additionally, an integer can be passed, so
+              IPv6Address('2001:db8::') ==
+                IPv6Address(42540766411282592856903984951653826560)
+              or, more generally
+              IPv6Address(int(IPv6Address('2001:db8::'))) ==
+                IPv6Address('2001:db8::')
+
+        Raises:
+            AddressValueError: If address isn't a valid IPv6 address.
+
+        """
+        # Efficient constructor from integer.
+        if isinstance(address, _compat_int_types):
+            self._check_int_address(address)
+            self._ip = address
+            return
+
+        # Constructing from a packed address
+        if isinstance(address, bytes):
+            self._check_packed_address(address, 16)
+            bvs = _compat_bytes_to_byte_vals(address)
+            self._ip = _compat_int_from_byte_vals(bvs, 'big')
+            return
+
+        # Assume input argument to be string or any object representation
+        # which converts into a formatted IP string.
+        addr_str = _compat_str(address)
+        if '/' in addr_str:
+            raise AddressValueError("Unexpected '/' in %r" % address)
+        self._ip = self._ip_int_from_string(addr_str)
+
+    @property
+    def packed(self):
+        """The binary representation of this address."""
+        return v6_int_to_packed(self._ip)
+
+    @property
+    def is_multicast(self):
+        """Test if the address is reserved for multicast use.
+
+        Returns:
+            A boolean, True if the address is a multicast address.
+            See RFC 2373 2.7 for details.
+
+        """
+        return self in self._constants._multicast_network
+
+    @property
+    def is_reserved(self):
+        """Test if the address is otherwise IETF reserved.
+
+        Returns:
+            A boolean, True if the address is within one of the
+            reserved IPv6 Network ranges.
+
+        """
+        return any(self in x for x in self._constants._reserved_networks)
+
+    @property
+    def is_link_local(self):
+        """Test if the address is reserved for link-local.
+
+        Returns:
+            A boolean, True if the address is reserved per RFC 4291.
+
+        """
+        return self in self._constants._linklocal_network
+
+    @property
+    def is_site_local(self):
+        """Test if the address is reserved for site-local.
+
+        Note that the site-local address space has been deprecated by RFC 3879.
+        Use is_private to test if this address is in the space of unique local
+        addresses as defined by RFC 4193.
+
+        Returns:
+            A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+        """
+        return self in self._constants._sitelocal_network
+
+    @property
+    def is_private(self):
+        """Test if this address is allocated for private networks.
+
+        Returns:
+            A boolean, True if the address is reserved per
+            iana-ipv6-special-registry.
+
+        """
+        return any(self in net for net in self._constants._private_networks)
+
+    @property
+    def is_global(self):
+        """Test if this address is allocated for public networks.
+
+        Returns:
+            A boolean, true if the address is not reserved per
+            iana-ipv6-special-registry.
+
+        """
+        return not self.is_private
+
+    @property
+    def is_unspecified(self):
+        """Test if the address is unspecified.
+
+        Returns:
+            A boolean, True if this is the unspecified address as defined in
+            RFC 2373 2.5.2.
+
+        """
+        return self._ip == 0
+
+    @property
+    def is_loopback(self):
+        """Test if the address is a loopback address.
+
+        Returns:
+            A boolean, True if the address is a loopback address as defined in
+            RFC 2373 2.5.3.
+
+        """
+        return self._ip == 1
+
+    @property
+    def ipv4_mapped(self):
+        """Return the IPv4 mapped address.
+
+        Returns:
+            If the IPv6 address is a v4 mapped address, return the
+            IPv4 mapped address. Return None otherwise.
+
+        """
+        if (self._ip >> 32) != 0xFFFF:
+            return None
+        return IPv4Address(self._ip & 0xFFFFFFFF)
+
+    @property
+    def teredo(self):
+        """Tuple of embedded teredo IPs.
+
+        Returns:
+            Tuple of the (server, client) IPs or None if the address
+            doesn't appear to be a teredo address (doesn't start with
+            2001::/32)
+
+        """
+        if (self._ip >> 96) != 0x20010000:
+            return None
+        return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF),
+                IPv4Address(~self._ip & 0xFFFFFFFF))
+
+    @property
+    def sixtofour(self):
+        """Return the IPv4 6to4 embedded address.
+
+        Returns:
+            The IPv4 6to4-embedded address if present or None if the
+            address doesn't appear to contain a 6to4 embedded address.
+
+        """
+        if (self._ip >> 112) != 0x2002:
+            return None
+        return IPv4Address((self._ip >> 80) & 0xFFFFFFFF)
+
+
+class IPv6Interface(IPv6Address):
+
+    def __init__(self, address):
+        if isinstance(address, (bytes, _compat_int_types)):
+            IPv6Address.__init__(self, address)
+            self.network = IPv6Network(self._ip)
+            self._prefixlen = self._max_prefixlen
+            return
+        if isinstance(address, tuple):
+            IPv6Address.__init__(self, address[0])
+            if len(address) > 1:
+                self._prefixlen = int(address[1])
+            else:
+                self._prefixlen = self._max_prefixlen
+            self.network = IPv6Network(address, strict=False)
+            self.netmask = self.network.netmask
+            self.hostmask = self.network.hostmask
+            return
+
+        addr = _split_optional_netmask(address)
+        IPv6Address.__init__(self, addr[0])
+        self.network = IPv6Network(address, strict=False)
+        self.netmask = self.network.netmask
+        self._prefixlen = self.network._prefixlen
+        self.hostmask = self.network.hostmask
+
+    def __str__(self):
+        return '%s/%d' % (self._string_from_ip_int(self._ip),
+                          self.network.prefixlen)
+
+    def __eq__(self, other):
+        address_equal = IPv6Address.__eq__(self, other)
+        if not address_equal or address_equal is NotImplemented:
+            return address_equal
+        try:
+            return self.network == other.network
+        except AttributeError:
+            # An interface with an associated network is NOT the
+            # same as an unassociated address. That's why the hash
+            # takes the extra info into account.
+            return False
+
+    def __lt__(self, other):
+        address_less = IPv6Address.__lt__(self, other)
+        if address_less is NotImplemented:
+            return NotImplemented
+        try:
+            return self.network < other.network
+        except AttributeError:
+            # We *do* allow addresses and interfaces to be sorted. The
+            # unassociated address is considered less than all interfaces.
+            return False
+
+    def __hash__(self):
+        return self._ip ^ self._prefixlen ^ int(self.network.network_address)
+
+    __reduce__ = _IPAddressBase.__reduce__
+
+    @property
+    def ip(self):
+        return IPv6Address(self._ip)
+
+    @property
+    def with_prefixlen(self):
+        return '%s/%s' % (self._string_from_ip_int(self._ip),
+                          self._prefixlen)
+
+    @property
+    def with_netmask(self):
+        return '%s/%s' % (self._string_from_ip_int(self._ip),
+                          self.netmask)
+
+    @property
+    def with_hostmask(self):
+        return '%s/%s' % (self._string_from_ip_int(self._ip),
+                          self.hostmask)
+
+    @property
+    def is_unspecified(self):
+        return self._ip == 0 and self.network.is_unspecified
+
+    @property
+    def is_loopback(self):
+        return self._ip == 1 and self.network.is_loopback
+
+
+class IPv6Network(_BaseV6, _BaseNetwork):
+
+    """This class represents and manipulates 128-bit IPv6 networks.
+
+    Attributes: [examples for IPv6('2001:db8::1000/124')]
+        .network_address: IPv6Address('2001:db8::1000')
+        .hostmask: IPv6Address('::f')
+        .broadcast_address: IPv6Address('2001:db8::100f')
+        .netmask: IPv6Address('ffff:ffff:ffff:ffff:ffff:ffff:ffff:fff0')
+        .prefixlen: 124
+
+    """
+
+    # Class to use when creating address objects
+    _address_class = IPv6Address
+
+    def __init__(self, address, strict=True):
+        """Instantiate a new IPv6 Network object.
+
+        Args:
+            address: A string or integer representing the IPv6 network or the
+              IP and prefix/netmask.
+              '2001:db8::/128'
+              '2001:db8:0000:0000:0000:0000:0000:0000/128'
+              '2001:db8::'
+              are all functionally the same in IPv6.  That is to say,
+              failing to provide a subnetmask will create an object with
+              a mask of /128.
+
+              Additionally, an integer can be passed, so
+              IPv6Network('2001:db8::') ==
+                IPv6Network(42540766411282592856903984951653826560)
+              or, more generally
+              IPv6Network(int(IPv6Network('2001:db8::'))) ==
+                IPv6Network('2001:db8::')
+
+            strict: A boolean. If true, ensure that we have been passed
+              A true network address, eg, 2001:db8::1000/124 and not an
+              IP address on a network, eg, 2001:db8::1/124.
+
+        Raises:
+            AddressValueError: If address isn't a valid IPv6 address.
+            NetmaskValueError: If the netmask isn't valid for
+              an IPv6 address.
+            ValueError: If strict was True and a network address was not
+              supplied.
+
+        """
+        _BaseNetwork.__init__(self, address)
+
+        # Efficient constructor from integer or packed address
+        if isinstance(address, (bytes, _compat_int_types)):
+            self.network_address = IPv6Address(address)
+            self.netmask, self._prefixlen = self._make_netmask(
+                self._max_prefixlen)
+            return
+
+        if isinstance(address, tuple):
+            if len(address) > 1:
+                arg = address[1]
+            else:
+                arg = self._max_prefixlen
+            self.netmask, self._prefixlen = self._make_netmask(arg)
+            self.network_address = IPv6Address(address[0])
+            packed = int(self.network_address)
+            if packed & int(self.netmask) != packed:
+                if strict:
+                    raise ValueError('%s has host bits set' % self)
+                else:
+                    self.network_address = IPv6Address(packed &
+                                                       int(self.netmask))
+            return
+
+        # Assume input argument to be string or any object representation
+        # which converts into a formatted IP prefix string.
+        addr = _split_optional_netmask(address)
+
+        self.network_address = IPv6Address(self._ip_int_from_string(addr[0]))
+
+        if len(addr) == 2:
+            arg = addr[1]
+        else:
+            arg = self._max_prefixlen
+        self.netmask, self._prefixlen = self._make_netmask(arg)
+
+        if strict:
+            if (IPv6Address(int(self.network_address) & int(self.netmask)) !=
+                    self.network_address):
+                raise ValueError('%s has host bits set' % self)
+        self.network_address = IPv6Address(int(self.network_address) &
+                                           int(self.netmask))
+
+        if self._prefixlen == (self._max_prefixlen - 1):
+            self.hosts = self.__iter__
+
+    def hosts(self):
+        """Generate Iterator over usable hosts in a network.
+
+          This is like __iter__ except it doesn't return the
+          Subnet-Router anycast address.
+
+        """
+        network = int(self.network_address)
+        broadcast = int(self.broadcast_address)
+        for x in _compat_range(network + 1, broadcast + 1):
+            yield self._address_class(x)
+
+    @property
+    def is_site_local(self):
+        """Test if the address is reserved for site-local.
+
+        Note that the site-local address space has been deprecated by RFC 3879.
+        Use is_private to test if this address is in the space of unique local
+        addresses as defined by RFC 4193.
+
+        Returns:
+            A boolean, True if the address is reserved per RFC 3513 2.5.6.
+
+        """
+        return (self.network_address.is_site_local and
+                self.broadcast_address.is_site_local)
+
+
+class _IPv6Constants(object):
+
+    _linklocal_network = IPv6Network('fe80::/10')
+
+    _multicast_network = IPv6Network('ff00::/8')
+
+    _private_networks = [
+        IPv6Network('::1/128'),
+        IPv6Network('::/128'),
+        IPv6Network('::ffff:0:0/96'),
+        IPv6Network('100::/64'),
+        IPv6Network('2001::/23'),
+        IPv6Network('2001:2::/48'),
+        IPv6Network('2001:db8::/32'),
+        IPv6Network('2001:10::/28'),
+        IPv6Network('fc00::/7'),
+        IPv6Network('fe80::/10'),
+    ]
+
+    _reserved_networks = [
+        IPv6Network('::/8'), IPv6Network('100::/8'),
+        IPv6Network('200::/7'), IPv6Network('400::/6'),
+        IPv6Network('800::/5'), IPv6Network('1000::/4'),
+        IPv6Network('4000::/3'), IPv6Network('6000::/3'),
+        IPv6Network('8000::/3'), IPv6Network('A000::/3'),
+        IPv6Network('C000::/3'), IPv6Network('E000::/4'),
+        IPv6Network('F000::/5'), IPv6Network('F800::/6'),
+        IPv6Network('FE00::/9'),
+    ]
+
+    _sitelocal_network = IPv6Network('fec0::/10')
+
+
+IPv6Address._constants = _IPv6Constants

BIN
venv/lib/python2.7/site-packages/pip/_vendor/ipaddress.pyc


+ 326 - 0
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.py

@@ -0,0 +1,326 @@
+"""
+lockfile.py - Platform-independent advisory file locks.
+
+Requires Python 2.5 unless you apply 2.4.diff
+Locking is done on a per-thread basis instead of a per-process basis.
+
+Usage:
+
+>>> lock = LockFile('somefile')
+>>> try:
+...     lock.acquire()
+... except AlreadyLocked:
+...     print 'somefile', 'is locked already.'
+... except LockFailed:
+...     print 'somefile', 'can\\'t be locked.'
+... else:
+...     print 'got lock'
+got lock
+>>> print lock.is_locked()
+True
+>>> lock.release()
+
+>>> lock = LockFile('somefile')
+>>> print lock.is_locked()
+False
+>>> with lock:
+...    print lock.is_locked()
+True
+>>> print lock.is_locked()
+False
+
+>>> lock = LockFile('somefile')
+>>> # It is okay to lock twice from the same thread...
+>>> with lock:
+...     lock.acquire()
+...
+>>> # Though no counter is kept, so you can't unlock multiple times...
+>>> print lock.is_locked()
+False
+
+Exceptions:
+
+    Error - base class for other exceptions
+        LockError - base class for all locking exceptions
+            AlreadyLocked - Another thread or process already holds the lock
+            LockFailed - Lock failed for some other reason
+        UnlockError - base class for all unlocking exceptions
+            AlreadyUnlocked - File was not locked.
+            NotMyLock - File was locked but not by the current thread/process
+"""
+
+from __future__ import absolute_import
+
+import sys
+import socket
+import os
+import threading
+import time
+import urllib
+import warnings
+import functools
+
+# Work with PEP8 and non-PEP8 versions of threading module.
+if not hasattr(threading, "current_thread"):
+    threading.current_thread = threading.currentThread
+if not hasattr(threading.Thread, "get_name"):
+    threading.Thread.get_name = threading.Thread.getName
+
+__all__ = ['Error', 'LockError', 'LockTimeout', 'AlreadyLocked',
+           'LockFailed', 'UnlockError', 'NotLocked', 'NotMyLock',
+           'LinkLockFile', 'MkdirLockFile', 'SQLiteLockFile',
+           'LockBase', 'locked']
+
+class Error(Exception):
+    """
+    Base class for other exceptions.
+
+    >>> try:
+    ...   raise Error
+    ... except Exception:
+    ...   pass
+    """
+    pass
+
+class LockError(Error):
+    """
+    Base class for error arising from attempts to acquire the lock.
+
+    >>> try:
+    ...   raise LockError
+    ... except Error:
+    ...   pass
+    """
+    pass
+
+class LockTimeout(LockError):
+    """Raised when lock creation fails within a user-defined period of time.
+
+    >>> try:
+    ...   raise LockTimeout
+    ... except LockError:
+    ...   pass
+    """
+    pass
+
+class AlreadyLocked(LockError):
+    """Some other thread/process is locking the file.
+
+    >>> try:
+    ...   raise AlreadyLocked
+    ... except LockError:
+    ...   pass
+    """
+    pass
+
+class LockFailed(LockError):
+    """Lock file creation failed for some other reason.
+
+    >>> try:
+    ...   raise LockFailed
+    ... except LockError:
+    ...   pass
+    """
+    pass
+
+class UnlockError(Error):
+    """
+    Base class for errors arising from attempts to release the lock.
+
+    >>> try:
+    ...   raise UnlockError
+    ... except Error:
+    ...   pass
+    """
+    pass
+
+class NotLocked(UnlockError):
+    """Raised when an attempt is made to unlock an unlocked file.
+
+    >>> try:
+    ...   raise NotLocked
+    ... except UnlockError:
+    ...   pass
+    """
+    pass
+
+class NotMyLock(UnlockError):
+    """Raised when an attempt is made to unlock a file someone else locked.
+
+    >>> try:
+    ...   raise NotMyLock
+    ... except UnlockError:
+    ...   pass
+    """
+    pass
+
+class LockBase:
+    """Base class for platform-specific lock classes."""
+    def __init__(self, path, threaded=True, timeout=None):
+        """
+        >>> lock = LockBase('somefile')
+        >>> lock = LockBase('somefile', threaded=False)
+        """
+        self.path = path
+        self.lock_file = os.path.abspath(path) + ".lock"
+        self.hostname = socket.gethostname()
+        self.pid = os.getpid()
+        if threaded:
+            t = threading.current_thread()
+            # Thread objects in Python 2.4 and earlier do not have ident
+            # attrs.  Worm around that.
+            ident = getattr(t, "ident", hash(t))
+            self.tname = "-%x" % (ident & 0xffffffff)
+        else:
+            self.tname = ""
+        dirname = os.path.dirname(self.lock_file)
+
+        # unique name is mostly about the current process, but must
+        # also contain the path -- otherwise, two adjacent locked
+        # files conflict (one file gets locked, creating lock-file and
+        # unique file, the other one gets locked, creating lock-file
+        # and overwriting the already existing lock-file, then one
+        # gets unlocked, deleting both lock-file and unique file,
+        # finally the last lock errors out upon releasing.
+        self.unique_name = os.path.join(dirname,
+                                        "%s%s.%s%s" % (self.hostname,
+                                                       self.tname,
+                                                       self.pid,
+                                                       hash(self.path)))
+        self.timeout = timeout
+
+    def acquire(self, timeout=None):
+        """
+        Acquire the lock.
+
+        * If timeout is omitted (or None), wait forever trying to lock the
+          file.
+
+        * If timeout > 0, try to acquire the lock for that many seconds.  If
+          the lock period expires and the file is still locked, raise
+          LockTimeout.
+
+        * If timeout <= 0, raise AlreadyLocked immediately if the file is
+          already locked.
+        """
+        raise NotImplemented("implement in subclass")
+
+    def release(self):
+        """
+        Release the lock.
+
+        If the file is not locked, raise NotLocked.
+        """
+        raise NotImplemented("implement in subclass")
+
+    def is_locked(self):
+        """
+        Tell whether or not the file is locked.
+        """
+        raise NotImplemented("implement in subclass")
+
+    def i_am_locking(self):
+        """
+        Return True if this object is locking the file.
+        """
+        raise NotImplemented("implement in subclass")
+
+    def break_lock(self):
+        """
+        Remove a lock.  Useful if a locking thread failed to unlock.
+        """
+        raise NotImplemented("implement in subclass")
+
+    def __enter__(self):
+        """
+        Context manager support.
+        """
+        self.acquire()
+        return self
+
+    def __exit__(self, *_exc):
+        """
+        Context manager support.
+        """
+        self.release()
+
+    def __repr__(self):
+        return "<%s: %r -- %r>" % (self.__class__.__name__, self.unique_name,
+                                   self.path)
+
+def _fl_helper(cls, mod, *args, **kwds):
+    warnings.warn("Import from %s module instead of lockfile package" % mod,
+                  DeprecationWarning, stacklevel=2)
+    # This is a bit funky, but it's only for awhile.  The way the unit tests
+    # are constructed this function winds up as an unbound method, so it
+    # actually takes three args, not two.  We want to toss out self.
+    if not isinstance(args[0], str):
+        # We are testing, avoid the first arg
+        args = args[1:]
+    if len(args) == 1 and not kwds:
+        kwds["threaded"] = True
+    return cls(*args, **kwds)
+
+def LinkFileLock(*args, **kwds):
+    """Factory function provided for backwards compatibility.
+
+    Do not use in new code.  Instead, import LinkLockFile from the
+    lockfile.linklockfile module.
+    """
+    from . import linklockfile
+    return _fl_helper(linklockfile.LinkLockFile, "lockfile.linklockfile",
+                      *args, **kwds)
+
+def MkdirFileLock(*args, **kwds):
+    """Factory function provided for backwards compatibility.
+
+    Do not use in new code.  Instead, import MkdirLockFile from the
+    lockfile.mkdirlockfile module.
+    """
+    from . import mkdirlockfile
+    return _fl_helper(mkdirlockfile.MkdirLockFile, "lockfile.mkdirlockfile",
+                      *args, **kwds)
+
+def SQLiteFileLock(*args, **kwds):
+    """Factory function provided for backwards compatibility.
+
+    Do not use in new code.  Instead, import SQLiteLockFile from the
+    lockfile.mkdirlockfile module.
+    """
+    from . import sqlitelockfile
+    return _fl_helper(sqlitelockfile.SQLiteLockFile, "lockfile.sqlitelockfile",
+                      *args, **kwds)
+
+def locked(path, timeout=None):
+    """Decorator which enables locks for decorated function.
+
+    Arguments:
+     - path: path for lockfile.
+     - timeout (optional): Timeout for acquiring lock.
+
+     Usage:
+         @locked('/var/run/myname', timeout=0)
+         def myname(...):
+             ...
+    """
+    def decor(func):
+        @functools.wraps(func)
+        def wrapper(*args, **kwargs):
+            lock = FileLock(path, timeout=timeout)
+            lock.acquire()
+            try:
+                return func(*args, **kwargs)
+            finally:
+                lock.release()
+        return wrapper
+    return decor
+
+if hasattr(os, "link"):
+    from . import linklockfile as _llf
+    LockFile = _llf.LinkLockFile
+else:
+    from . import mkdirlockfile as _mlf
+    LockFile = _mlf.MkdirLockFile
+
+FileLock = LockFile
+

BIN
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/__init__.pyc


+ 73 - 0
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.py

@@ -0,0 +1,73 @@
+from __future__ import absolute_import
+
+import time
+import os
+
+from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
+               AlreadyLocked)
+
+class LinkLockFile(LockBase):
+    """Lock access to a file using atomic property of link(2).
+
+    >>> lock = LinkLockFile('somefile')
+    >>> lock = LinkLockFile('somefile', threaded=False)
+    """
+
+    def acquire(self, timeout=None):
+        try:
+            open(self.unique_name, "wb").close()
+        except IOError:
+            raise LockFailed("failed to create %s" % self.unique_name)
+
+        timeout = timeout is not None and timeout or self.timeout
+        end_time = time.time()
+        if timeout is not None and timeout > 0:
+            end_time += timeout
+
+        while True:
+            # Try and create a hard link to it.
+            try:
+                os.link(self.unique_name, self.lock_file)
+            except OSError:
+                # Link creation failed.  Maybe we've double-locked?
+                nlinks = os.stat(self.unique_name).st_nlink
+                if nlinks == 2:
+                    # The original link plus the one I created == 2.  We're
+                    # good to go.
+                    return
+                else:
+                    # Otherwise the lock creation failed.
+                    if timeout is not None and time.time() > end_time:
+                        os.unlink(self.unique_name)
+                        if timeout > 0:
+                            raise LockTimeout("Timeout waiting to acquire"
+                                              " lock for %s" %
+                                              self.path)
+                        else:
+                            raise AlreadyLocked("%s is already locked" %
+                                                self.path)
+                    time.sleep(timeout is not None and timeout/10 or 0.1)
+            else:
+                # Link creation succeeded.  We're good to go.
+                return
+
+    def release(self):
+        if not self.is_locked():
+            raise NotLocked("%s is not locked" % self.path)
+        elif not os.path.exists(self.unique_name):
+            raise NotMyLock("%s is locked, but not by me" % self.path)
+        os.unlink(self.unique_name)
+        os.unlink(self.lock_file)
+
+    def is_locked(self):
+        return os.path.exists(self.lock_file)
+
+    def i_am_locking(self):
+        return (self.is_locked() and
+                os.path.exists(self.unique_name) and
+                os.stat(self.unique_name).st_nlink == 2)
+
+    def break_lock(self):
+        if os.path.exists(self.lock_file):
+            os.unlink(self.lock_file)
+

BIN
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/linklockfile.pyc


+ 83 - 0
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.py

@@ -0,0 +1,83 @@
+from __future__ import absolute_import, division
+
+import time
+import os
+import sys
+import errno
+
+from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
+               AlreadyLocked)
+
+class MkdirLockFile(LockBase):
+    """Lock file by creating a directory."""
+    def __init__(self, path, threaded=True, timeout=None):
+        """
+        >>> lock = MkdirLockFile('somefile')
+        >>> lock = MkdirLockFile('somefile', threaded=False)
+        """
+        LockBase.__init__(self, path, threaded, timeout)
+        # Lock file itself is a directory.  Place the unique file name into
+        # it.
+        self.unique_name  = os.path.join(self.lock_file,
+                                         "%s.%s%s" % (self.hostname,
+                                                      self.tname,
+                                                      self.pid))
+
+    def acquire(self, timeout=None):
+        timeout = timeout is not None and timeout or self.timeout
+        end_time = time.time()
+        if timeout is not None and timeout > 0:
+            end_time += timeout
+
+        if timeout is None:
+            wait = 0.1
+        else:
+            wait = max(0, timeout / 10)
+
+        while True:
+            try:
+                os.mkdir(self.lock_file)
+            except OSError:
+                err = sys.exc_info()[1]
+                if err.errno == errno.EEXIST:
+                    # Already locked.
+                    if os.path.exists(self.unique_name):
+                        # Already locked by me.
+                        return
+                    if timeout is not None and time.time() > end_time:
+                        if timeout > 0:
+                            raise LockTimeout("Timeout waiting to acquire"
+                                              " lock for %s" %
+                                              self.path)
+                        else:
+                            # Someone else has the lock.
+                            raise AlreadyLocked("%s is already locked" %
+                                                self.path)
+                    time.sleep(wait)
+                else:
+                    # Couldn't create the lock for some other reason
+                    raise LockFailed("failed to create %s" % self.lock_file)
+            else:
+                open(self.unique_name, "wb").close()
+                return
+
+    def release(self):
+        if not self.is_locked():
+            raise NotLocked("%s is not locked" % self.path)
+        elif not os.path.exists(self.unique_name):
+            raise NotMyLock("%s is locked, but not by me" % self.path)
+        os.unlink(self.unique_name)
+        os.rmdir(self.lock_file)
+
+    def is_locked(self):
+        return os.path.exists(self.lock_file)
+
+    def i_am_locking(self):
+        return (self.is_locked() and
+                os.path.exists(self.unique_name))
+
+    def break_lock(self):
+        if os.path.exists(self.lock_file):
+            for name in os.listdir(self.lock_file):
+                os.unlink(os.path.join(self.lock_file, name))
+            os.rmdir(self.lock_file)

BIN
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/mkdirlockfile.pyc


+ 193 - 0
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.py

@@ -0,0 +1,193 @@
+# -*- coding: utf-8 -*-
+
+# pidlockfile.py
+#
+# Copyright © 2008–2009 Ben Finney <ben+python@benfinney.id.au>
+#
+# This is free software: you may copy, modify, and/or distribute this work
+# under the terms of the Python Software Foundation License, version 2 or
+# later as published by the Python Software Foundation.
+# No warranty expressed or implied. See the file LICENSE.PSF-2 for details.
+
+""" Lockfile behaviour implemented via Unix PID files.
+    """
+
+from __future__ import absolute_import
+
+import os
+import sys
+import errno
+import time
+
+from . import (LockBase, AlreadyLocked, LockFailed, NotLocked, NotMyLock,
+               LockTimeout)
+
+
+class PIDLockFile(LockBase):
+    """ Lockfile implemented as a Unix PID file.
+
+    The lock file is a normal file named by the attribute `path`.
+    A lock's PID file contains a single line of text, containing
+    the process ID (PID) of the process that acquired the lock.
+
+    >>> lock = PIDLockFile('somefile')
+    >>> lock = PIDLockFile('somefile')
+    """
+
+    def __init__(self, path, threaded=False, timeout=None):
+        # pid lockfiles don't support threaded operation, so always force
+        # False as the threaded arg.
+        LockBase.__init__(self, path, False, timeout)
+        dirname = os.path.dirname(self.lock_file)
+        basename = os.path.split(self.path)[-1]
+        self.unique_name = self.path
+
+    def read_pid(self):
+        """ Get the PID from the lock file.
+            """
+        return read_pid_from_pidfile(self.path)
+
+    def is_locked(self):
+        """ Test if the lock is currently held.
+
+            The lock is held if the PID file for this lock exists.
+
+            """
+        return os.path.exists(self.path)
+
+    def i_am_locking(self):
+        """ Test if the lock is held by the current process.
+
+        Returns ``True`` if the current process ID matches the
+        number stored in the PID file.
+        """
+        return self.is_locked() and os.getpid() == self.read_pid()
+
+    def acquire(self, timeout=None):
+        """ Acquire the lock.
+
+        Creates the PID file for this lock, or raises an error if
+        the lock could not be acquired.
+        """
+
+        timeout = timeout is not None and timeout or self.timeout
+        end_time = time.time()
+        if timeout is not None and timeout > 0:
+            end_time += timeout
+
+        while True:
+            try:
+                write_pid_to_pidfile(self.path)
+            except OSError as exc:
+                if exc.errno == errno.EEXIST:
+                    # The lock creation failed.  Maybe sleep a bit.
+                    if timeout is not None and time.time() > end_time:
+                        if timeout > 0:
+                            raise LockTimeout("Timeout waiting to acquire"
+                                              " lock for %s" %
+                                              self.path)
+                        else:
+                            raise AlreadyLocked("%s is already locked" %
+                                                self.path)
+                    time.sleep(timeout is not None and timeout/10 or 0.1)
+                else:
+                    raise LockFailed("failed to create %s" % self.path)
+            else:
+                return
+
+    def release(self):
+        """ Release the lock.
+
+            Removes the PID file to release the lock, or raises an
+            error if the current process does not hold the lock.
+
+            """
+        if not self.is_locked():
+            raise NotLocked("%s is not locked" % self.path)
+        if not self.i_am_locking():
+            raise NotMyLock("%s is locked, but not by me" % self.path)
+        remove_existing_pidfile(self.path)
+
+    def break_lock(self):
+        """ Break an existing lock.
+
+            Removes the PID file if it already exists, otherwise does
+            nothing.
+
+            """
+        remove_existing_pidfile(self.path)
+
+def read_pid_from_pidfile(pidfile_path):
+    """ Read the PID recorded in the named PID file.
+
+        Read and return the numeric PID recorded as text in the named
+        PID file. If the PID file cannot be read, or if the content is
+        not a valid PID, return ``None``.
+
+        """
+    pid = None
+    try:
+        pidfile = open(pidfile_path, 'r')
+    except IOError:
+        pass
+    else:
+        # According to the FHS 2.3 section on PID files in /var/run:
+        # 
+        #   The file must consist of the process identifier in
+        #   ASCII-encoded decimal, followed by a newline character.
+        # 
+        #   Programs that read PID files should be somewhat flexible
+        #   in what they accept; i.e., they should ignore extra
+        #   whitespace, leading zeroes, absence of the trailing
+        #   newline, or additional lines in the PID file.
+
+        line = pidfile.readline().strip()
+        try:
+            pid = int(line)
+        except ValueError:
+            pass
+        pidfile.close()
+
+    return pid
+
+
+def write_pid_to_pidfile(pidfile_path):
+    """ Write the PID in the named PID file.
+
+        Get the numeric process ID (“PID”) of the current process
+        and write it to the named file as a line of text.
+
+        """
+    open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
+    open_mode = 0o644
+    pidfile_fd = os.open(pidfile_path, open_flags, open_mode)
+    pidfile = os.fdopen(pidfile_fd, 'w')
+
+    # According to the FHS 2.3 section on PID files in /var/run:
+    #
+    #   The file must consist of the process identifier in
+    #   ASCII-encoded decimal, followed by a newline character. For
+    #   example, if crond was process number 25, /var/run/crond.pid
+    #   would contain three characters: two, five, and newline.
+
+    pid = os.getpid()
+    line = "%(pid)d\n" % vars()
+    pidfile.write(line)
+    pidfile.close()
+
+
+def remove_existing_pidfile(pidfile_path):
+    """ Remove the named PID file if it exists.
+
+        Removing a PID file that doesn't already exist puts us in the
+        desired state, so we ignore the condition if the file does not
+        exist.
+
+        """
+    try:
+        os.remove(pidfile_path)
+    except OSError as exc:
+        if exc.errno == errno.ENOENT:
+            pass
+        else:
+            raise

BIN
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/pidlockfile.pyc


+ 155 - 0
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.py

@@ -0,0 +1,155 @@
+from __future__ import absolute_import, division
+
+import time
+import os
+
+try:
+    unicode
+except NameError:
+    unicode = str
+
+from . import LockBase, NotLocked, NotMyLock, LockTimeout, AlreadyLocked
+
+class SQLiteLockFile(LockBase):
+    "Demonstrate SQL-based locking."
+
+    testdb = None
+
+    def __init__(self, path, threaded=True, timeout=None):
+        """
+        >>> lock = SQLiteLockFile('somefile')
+        >>> lock = SQLiteLockFile('somefile', threaded=False)
+        """
+        LockBase.__init__(self, path, threaded, timeout)
+        self.lock_file = unicode(self.lock_file)
+        self.unique_name = unicode(self.unique_name)
+
+        if SQLiteLockFile.testdb is None:
+            import tempfile
+            _fd, testdb = tempfile.mkstemp()
+            os.close(_fd)
+            os.unlink(testdb)
+            del _fd, tempfile
+            SQLiteLockFile.testdb = testdb
+
+        import sqlite3
+        self.connection = sqlite3.connect(SQLiteLockFile.testdb)
+        
+        c = self.connection.cursor()
+        try:
+            c.execute("create table locks"
+                      "("
+                      "   lock_file varchar(32),"
+                      "   unique_name varchar(32)"
+                      ")")
+        except sqlite3.OperationalError:
+            pass
+        else:
+            self.connection.commit()
+            import atexit
+            atexit.register(os.unlink, SQLiteLockFile.testdb)
+
+    def acquire(self, timeout=None):
+        timeout = timeout is not None and timeout or self.timeout
+        end_time = time.time()
+        if timeout is not None and timeout > 0:
+            end_time += timeout
+
+        if timeout is None:
+            wait = 0.1
+        elif timeout <= 0:
+            wait = 0
+        else:
+            wait = timeout / 10
+
+        cursor = self.connection.cursor()
+
+        while True:
+            if not self.is_locked():
+                # Not locked.  Try to lock it.
+                cursor.execute("insert into locks"
+                               "  (lock_file, unique_name)"
+                               "  values"
+                               "  (?, ?)",
+                               (self.lock_file, self.unique_name))
+                self.connection.commit()
+
+                # Check to see if we are the only lock holder.
+                cursor.execute("select * from locks"
+                               "  where unique_name = ?",
+                               (self.unique_name,))
+                rows = cursor.fetchall()
+                if len(rows) > 1:
+                    # Nope.  Someone else got there.  Remove our lock.
+                    cursor.execute("delete from locks"
+                                   "  where unique_name = ?",
+                                   (self.unique_name,))
+                    self.connection.commit()
+                else:
+                    # Yup.  We're done, so go home.
+                    return
+            else:
+                # Check to see if we are the only lock holder.
+                cursor.execute("select * from locks"
+                               "  where unique_name = ?",
+                               (self.unique_name,))
+                rows = cursor.fetchall()
+                if len(rows) == 1:
+                    # We're the locker, so go home.
+                    return
+                    
+            # Maybe we should wait a bit longer.
+            if timeout is not None and time.time() > end_time:
+                if timeout > 0:
+                    # No more waiting.
+                    raise LockTimeout("Timeout waiting to acquire"
+                                      " lock for %s" %
+                                      self.path)
+                else:
+                    # Someone else has the lock and we are impatient..
+                    raise AlreadyLocked("%s is already locked" % self.path)
+
+            # Well, okay.  We'll give it a bit longer.
+            time.sleep(wait)
+
+    def release(self):
+        if not self.is_locked():
+            raise NotLocked("%s is not locked" % self.path)
+        if not self.i_am_locking():
+            raise NotMyLock("%s is locked, but not by me (by %s)" %
+                            (self.unique_name, self._who_is_locking()))
+        cursor = self.connection.cursor()
+        cursor.execute("delete from locks"
+                       "  where unique_name = ?",
+                       (self.unique_name,))
+        self.connection.commit()
+
+    def _who_is_locking(self):
+        cursor = self.connection.cursor()
+        cursor.execute("select unique_name from locks"
+                       "  where lock_file = ?",
+                       (self.lock_file,))
+        return cursor.fetchone()[0]
+        
+    def is_locked(self):
+        cursor = self.connection.cursor()
+        cursor.execute("select * from locks"
+                       "  where lock_file = ?",
+                       (self.lock_file,))
+        rows = cursor.fetchall()
+        return not not rows
+
+    def i_am_locking(self):
+        cursor = self.connection.cursor()
+        cursor.execute("select * from locks"
+                       "  where lock_file = ?"
+                       "    and unique_name = ?",
+                       (self.lock_file, self.unique_name))
+        return not not cursor.fetchall()
+
+    def break_lock(self):
+        cursor = self.connection.cursor()
+        cursor.execute("delete from locks"
+                       "  where lock_file = ?",
+                       (self.lock_file,))
+        self.connection.commit()

BIN
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/sqlitelockfile.pyc


+ 69 - 0
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.py

@@ -0,0 +1,69 @@
+from __future__ import absolute_import
+
+import time
+import os
+
+from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
+               AlreadyLocked)
+
+class SymlinkLockFile(LockBase):
+    """Lock access to a file using symlink(2)."""
+
+    def __init__(self, path, threaded=True, timeout=None):
+        # super(SymlinkLockFile).__init(...)
+        LockBase.__init__(self, path, threaded, timeout)
+        # split it back!
+        self.unique_name = os.path.split(self.unique_name)[1]
+
+    def acquire(self, timeout=None):
+        # Hopefully unnecessary for symlink.
+        #try:
+        #    open(self.unique_name, "wb").close()
+        #except IOError:
+        #    raise LockFailed("failed to create %s" % self.unique_name)
+        timeout = timeout is not None and timeout or self.timeout
+        end_time = time.time()
+        if timeout is not None and timeout > 0:
+            end_time += timeout
+
+        while True:
+            # Try and create a symbolic link to it.
+            try:
+                os.symlink(self.unique_name, self.lock_file)
+            except OSError:
+                # Link creation failed.  Maybe we've double-locked?
+                if self.i_am_locking():
+                    # Linked to out unique name. Proceed.
+                    return
+                else:
+                    # Otherwise the lock creation failed.
+                    if timeout is not None and time.time() > end_time:
+                        if timeout > 0:
+                            raise LockTimeout("Timeout waiting to acquire"
+                                              " lock for %s" %
+                                              self.path)
+                        else:
+                            raise AlreadyLocked("%s is already locked" %
+                                                self.path)
+                    time.sleep(timeout/10 if timeout is not None else 0.1)
+            else:
+                # Link creation succeeded.  We're good to go.
+                return
+
+    def release(self):
+        if not self.is_locked():
+            raise NotLocked("%s is not locked" % self.path)
+        elif not self.i_am_locking():
+            raise NotMyLock("%s is locked, but not by me" % self.path)
+        os.unlink(self.lock_file)
+
+    def is_locked(self):
+        return os.path.islink(self.lock_file)
+
+    def i_am_locking(self):
+        return os.path.islink(self.lock_file) and \
+         os.readlink(self.lock_file) == self.unique_name
+
+    def break_lock(self):
+        if os.path.islink(self.lock_file):  # exists && link
+            os.unlink(self.lock_file)

BIN
venv/lib/python2.7/site-packages/pip/_vendor/lockfile/symlinklockfile.pyc


+ 31 - 0
venv/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.py

@@ -0,0 +1,31 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+__all__ = [
+    "__title__", "__summary__", "__uri__", "__version__", "__author__",
+    "__email__", "__license__", "__copyright__",
+]
+
+__title__ = "packaging"
+__summary__ = "Core utilities for Python packages"
+__uri__ = "https://github.com/pypa/packaging"
+
+__version__ = "15.3"
+
+__author__ = "Donald Stufft"
+__email__ = "donald@stufft.io"
+
+__license__ = "Apache License, Version 2.0"
+__copyright__ = "Copyright 2014 %s" % __author__

BIN
venv/lib/python2.7/site-packages/pip/_vendor/packaging/__about__.pyc


+ 24 - 0
venv/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.py

@@ -0,0 +1,24 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+from .__about__ import (
+    __author__, __copyright__, __email__, __license__, __summary__, __title__,
+    __uri__, __version__
+)
+
+__all__ = [
+    "__title__", "__summary__", "__uri__", "__version__", "__author__",
+    "__email__", "__license__", "__copyright__",
+]

BIN
venv/lib/python2.7/site-packages/pip/_vendor/packaging/__init__.pyc


+ 40 - 0
venv/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.py

@@ -0,0 +1,40 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+import sys
+
+
+PY2 = sys.version_info[0] == 2
+PY3 = sys.version_info[0] == 3
+
+# flake8: noqa
+
+if PY3:
+    string_types = str,
+else:
+    string_types = basestring,
+
+
+def with_metaclass(meta, *bases):
+    """
+    Create a base class with a metaclass.
+    """
+    # This requires a bit of explanation: the basic idea is to make a dummy
+    # metaclass for one level of class instantiation that replaces itself with
+    # the actual metaclass.
+    class metaclass(meta):
+        def __new__(cls, name, this_bases, d):
+            return meta(name, bases, d)
+    return type.__new__(metaclass, 'temporary_class', (), {})

BIN
venv/lib/python2.7/site-packages/pip/_vendor/packaging/_compat.pyc


+ 78 - 0
venv/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.py

@@ -0,0 +1,78 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+
+class Infinity(object):
+
+    def __repr__(self):
+        return "Infinity"
+
+    def __hash__(self):
+        return hash(repr(self))
+
+    def __lt__(self, other):
+        return False
+
+    def __le__(self, other):
+        return False
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __ne__(self, other):
+        return not isinstance(other, self.__class__)
+
+    def __gt__(self, other):
+        return True
+
+    def __ge__(self, other):
+        return True
+
+    def __neg__(self):
+        return NegativeInfinity
+
+Infinity = Infinity()
+
+
+class NegativeInfinity(object):
+
+    def __repr__(self):
+        return "-Infinity"
+
+    def __hash__(self):
+        return hash(repr(self))
+
+    def __lt__(self, other):
+        return True
+
+    def __le__(self, other):
+        return True
+
+    def __eq__(self, other):
+        return isinstance(other, self.__class__)
+
+    def __ne__(self, other):
+        return not isinstance(other, self.__class__)
+
+    def __gt__(self, other):
+        return False
+
+    def __ge__(self, other):
+        return False
+
+    def __neg__(self):
+        return Infinity
+
+NegativeInfinity = NegativeInfinity()

BIN
venv/lib/python2.7/site-packages/pip/_vendor/packaging/_structures.pyc


+ 784 - 0
venv/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.py

@@ -0,0 +1,784 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+import abc
+import functools
+import itertools
+import re
+
+from ._compat import string_types, with_metaclass
+from .version import Version, LegacyVersion, parse
+
+
+class InvalidSpecifier(ValueError):
+    """
+    An invalid specifier was found, users should refer to PEP 440.
+    """
+
+
+class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
+
+    @abc.abstractmethod
+    def __str__(self):
+        """
+        Returns the str representation of this Specifier like object. This
+        should be representative of the Specifier itself.
+        """
+
+    @abc.abstractmethod
+    def __hash__(self):
+        """
+        Returns a hash value for this Specifier like object.
+        """
+
+    @abc.abstractmethod
+    def __eq__(self, other):
+        """
+        Returns a boolean representing whether or not the two Specifier like
+        objects are equal.
+        """
+
+    @abc.abstractmethod
+    def __ne__(self, other):
+        """
+        Returns a boolean representing whether or not the two Specifier like
+        objects are not equal.
+        """
+
+    @abc.abstractproperty
+    def prereleases(self):
+        """
+        Returns whether or not pre-releases as a whole are allowed by this
+        specifier.
+        """
+
+    @prereleases.setter
+    def prereleases(self, value):
+        """
+        Sets whether or not pre-releases as a whole are allowed by this
+        specifier.
+        """
+
+    @abc.abstractmethod
+    def contains(self, item, prereleases=None):
+        """
+        Determines if the given item is contained within this specifier.
+        """
+
+    @abc.abstractmethod
+    def filter(self, iterable, prereleases=None):
+        """
+        Takes an iterable of items and filters them so that only items which
+        are contained within this specifier are allowed in it.
+        """
+
+
+class _IndividualSpecifier(BaseSpecifier):
+
+    _operators = {}
+
+    def __init__(self, spec="", prereleases=None):
+        match = self._regex.search(spec)
+        if not match:
+            raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
+
+        self._spec = (
+            match.group("operator").strip(),
+            match.group("version").strip(),
+        )
+
+        # Store whether or not this Specifier should accept prereleases
+        self._prereleases = prereleases
+
+    def __repr__(self):
+        pre = (
+            ", prereleases={0!r}".format(self.prereleases)
+            if self._prereleases is not None
+            else ""
+        )
+
+        return "<{0}({1!r}{2})>".format(
+            self.__class__.__name__,
+            str(self),
+            pre,
+        )
+
+    def __str__(self):
+        return "{0}{1}".format(*self._spec)
+
+    def __hash__(self):
+        return hash(self._spec)
+
+    def __eq__(self, other):
+        if isinstance(other, string_types):
+            try:
+                other = self.__class__(other)
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._spec == other._spec
+
+    def __ne__(self, other):
+        if isinstance(other, string_types):
+            try:
+                other = self.__class__(other)
+            except InvalidSpecifier:
+                return NotImplemented
+        elif not isinstance(other, self.__class__):
+            return NotImplemented
+
+        return self._spec != other._spec
+
+    def _get_operator(self, op):
+        return getattr(self, "_compare_{0}".format(self._operators[op]))
+
+    def _coerce_version(self, version):
+        if not isinstance(version, (LegacyVersion, Version)):
+            version = parse(version)
+        return version
+
+    @property
+    def operator(self):
+        return self._spec[0]
+
+    @property
+    def version(self):
+        return self._spec[1]
+
+    @property
+    def prereleases(self):
+        return self._prereleases
+
+    @prereleases.setter
+    def prereleases(self, value):
+        self._prereleases = value
+
+    def __contains__(self, item):
+        return self.contains(item)
+
+    def contains(self, item, prereleases=None):
+        # Determine if prereleases are to be allowed or not.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # Normalize item to a Version or LegacyVersion, this allows us to have
+        # a shortcut for ``"2.0" in Specifier(">=2")
+        item = self._coerce_version(item)
+
+        # Determine if we should be supporting prereleases in this specifier
+        # or not, if we do not support prereleases than we can short circuit
+        # logic if this version is a prereleases.
+        if item.is_prerelease and not prereleases:
+            return False
+
+        # Actually do the comparison to determine if this item is contained
+        # within this Specifier or not.
+        return self._get_operator(self.operator)(item, self.version)
+
+    def filter(self, iterable, prereleases=None):
+        yielded = False
+        found_prereleases = []
+
+        kw = {"prereleases": prereleases if prereleases is not None else True}
+
+        # Attempt to iterate over all the values in the iterable and if any of
+        # them match, yield them.
+        for version in iterable:
+            parsed_version = self._coerce_version(version)
+
+            if self.contains(parsed_version, **kw):
+                # If our version is a prerelease, and we were not set to allow
+                # prereleases, then we'll store it for later incase nothing
+                # else matches this specifier.
+                if (parsed_version.is_prerelease
+                        and not (prereleases or self.prereleases)):
+                    found_prereleases.append(version)
+                # Either this is not a prerelease, or we should have been
+                # accepting prereleases from the begining.
+                else:
+                    yielded = True
+                    yield version
+
+        # Now that we've iterated over everything, determine if we've yielded
+        # any values, and if we have not and we have any prereleases stored up
+        # then we will go ahead and yield the prereleases.
+        if not yielded and found_prereleases:
+            for version in found_prereleases:
+                yield version
+
+
+class LegacySpecifier(_IndividualSpecifier):
+
+    _regex = re.compile(
+        r"""
+        ^
+        \s*
+        (?P<operator>(==|!=|<=|>=|<|>))
+        \s*
+        (?P<version>
+            [^\s]* # We just match everything, except for whitespace since this
+                   # is a "legacy" specifier and the version string can be just
+                   # about anything.
+        )
+        \s*
+        $
+        """,
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    _operators = {
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+    }
+
+    def _coerce_version(self, version):
+        if not isinstance(version, LegacyVersion):
+            version = LegacyVersion(str(version))
+        return version
+
+    def _compare_equal(self, prospective, spec):
+        return prospective == self._coerce_version(spec)
+
+    def _compare_not_equal(self, prospective, spec):
+        return prospective != self._coerce_version(spec)
+
+    def _compare_less_than_equal(self, prospective, spec):
+        return prospective <= self._coerce_version(spec)
+
+    def _compare_greater_than_equal(self, prospective, spec):
+        return prospective >= self._coerce_version(spec)
+
+    def _compare_less_than(self, prospective, spec):
+        return prospective < self._coerce_version(spec)
+
+    def _compare_greater_than(self, prospective, spec):
+        return prospective > self._coerce_version(spec)
+
+
+def _require_version_compare(fn):
+    @functools.wraps(fn)
+    def wrapped(self, prospective, spec):
+        if not isinstance(prospective, Version):
+            return False
+        return fn(self, prospective, spec)
+    return wrapped
+
+
+class Specifier(_IndividualSpecifier):
+
+    _regex = re.compile(
+        r"""
+        ^
+        \s*
+        (?P<operator>(~=|==|!=|<=|>=|<|>|===))
+        (?P<version>
+            (?:
+                # The identity operators allow for an escape hatch that will
+                # do an exact string match of the version you wish to install.
+                # This will not be parsed by PEP 440 and we cannot determine
+                # any semantic meaning from it. This operator is discouraged
+                # but included entirely as an escape hatch.
+                (?<====)  # Only match for the identity operator
+                \s*
+                [^\s]*    # We just match everything, except for whitespace
+                          # since we are only testing for strict identity.
+            )
+            |
+            (?:
+                # The (non)equality operators allow for wild card and local
+                # versions to be specified so we have to define these two
+                # operators separately to enable that.
+                (?<===|!=)            # Only match for equals and not equals
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+
+                # You cannot use a wild card and a dev or local version
+                # together so group them with a | and make them optional.
+                (?:
+                    (?:[-_\.]?dev[-_\.]?[0-9]*)?         # dev release
+                    (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
+                    |
+                    \.\*  # Wild card syntax of .*
+                )?
+            )
+            |
+            (?:
+                # The compatible operator requires at least two digits in the
+                # release segment.
+                (?<=~=)               # Only match for the compatible operator
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)+   # release  (We have a + instead of a *)
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+            |
+            (?:
+                # All other operators only allow a sub set of what the
+                # (non)equality operators do. Specifically they do not allow
+                # local versions to be specified nor do they allow the prefix
+                # matching wild cards.
+                (?<!==|!=|~=)         # We have special cases for these
+                                      # operators so we want to make sure they
+                                      # don't match here.
+
+                \s*
+                v?
+                (?:[0-9]+!)?          # epoch
+                [0-9]+(?:\.[0-9]+)*   # release
+                (?:                   # pre release
+                    [-_\.]?
+                    (a|b|c|rc|alpha|beta|pre|preview)
+                    [-_\.]?
+                    [0-9]*
+                )?
+                (?:                                   # post release
+                    (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
+                )?
+                (?:[-_\.]?dev[-_\.]?[0-9]*)?          # dev release
+            )
+        )
+        \s*
+        $
+        """,
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    _operators = {
+        "~=": "compatible",
+        "==": "equal",
+        "!=": "not_equal",
+        "<=": "less_than_equal",
+        ">=": "greater_than_equal",
+        "<": "less_than",
+        ">": "greater_than",
+        "===": "arbitrary",
+    }
+
+    @_require_version_compare
+    def _compare_compatible(self, prospective, spec):
+        # Compatible releases have an equivalent combination of >= and ==. That
+        # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
+        # implement this in terms of the other specifiers instead of
+        # implementing it ourselves. The only thing we need to do is construct
+        # the other specifiers.
+
+        # We want everything but the last item in the version, but we want to
+        # ignore post and dev releases and we want to treat the pre-release as
+        # it's own separate segment.
+        prefix = ".".join(
+            list(
+                itertools.takewhile(
+                    lambda x: (not x.startswith("post")
+                               and not x.startswith("dev")),
+                    _version_split(spec),
+                )
+            )[:-1]
+        )
+
+        # Add the prefix notation to the end of our string
+        prefix += ".*"
+
+        return (self._get_operator(">=")(prospective, spec)
+                and self._get_operator("==")(prospective, prefix))
+
+    @_require_version_compare
+    def _compare_equal(self, prospective, spec):
+        # We need special logic to handle prefix matching
+        if spec.endswith(".*"):
+            # Split the spec out by dots, and pretend that there is an implicit
+            # dot in between a release segment and a pre-release segment.
+            spec = _version_split(spec[:-2])  # Remove the trailing .*
+
+            # Split the prospective version out by dots, and pretend that there
+            # is an implicit dot in between a release segment and a pre-release
+            # segment.
+            prospective = _version_split(str(prospective))
+
+            # Shorten the prospective version to be the same length as the spec
+            # so that we can determine if the specifier is a prefix of the
+            # prospective version or not.
+            prospective = prospective[:len(spec)]
+
+            # Pad out our two sides with zeros so that they both equal the same
+            # length.
+            spec, prospective = _pad_version(spec, prospective)
+        else:
+            # Convert our spec string into a Version
+            spec = Version(spec)
+
+            # If the specifier does not have a local segment, then we want to
+            # act as if the prospective version also does not have a local
+            # segment.
+            if not spec.local:
+                prospective = Version(prospective.public)
+
+        return prospective == spec
+
+    @_require_version_compare
+    def _compare_not_equal(self, prospective, spec):
+        return not self._compare_equal(prospective, spec)
+
+    @_require_version_compare
+    def _compare_less_than_equal(self, prospective, spec):
+        return prospective <= Version(spec)
+
+    @_require_version_compare
+    def _compare_greater_than_equal(self, prospective, spec):
+        return prospective >= Version(spec)
+
+    @_require_version_compare
+    def _compare_less_than(self, prospective, spec):
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec)
+
+        # Check to see if the prospective version is less than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective < spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a pre-release version, that we do not accept pre-release
+        # versions for the version mentioned in the specifier (e.g. <3.1 should
+        # not match 3.1.dev0, but should match 3.0.dev0).
+        if not spec.is_prerelease and prospective.is_prerelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # less than the spec version *and* it's not a pre-release of the same
+        # version in the spec.
+        return True
+
+    @_require_version_compare
+    def _compare_greater_than(self, prospective, spec):
+        # Convert our spec to a Version instance, since we'll want to work with
+        # it as a version.
+        spec = Version(spec)
+
+        # Check to see if the prospective version is greater than the spec
+        # version. If it's not we can short circuit and just return False now
+        # instead of doing extra unneeded work.
+        if not prospective > spec:
+            return False
+
+        # This special case is here so that, unless the specifier itself
+        # includes is a post-release version, that we do not accept
+        # post-release versions for the version mentioned in the specifier
+        # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
+        if not spec.is_postrelease and prospective.is_postrelease:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # Ensure that we do not allow a local version of the version mentioned
+        # in the specifier, which is techincally greater than, to match.
+        if prospective.local is not None:
+            if Version(prospective.base_version) == Version(spec.base_version):
+                return False
+
+        # If we've gotten to here, it means that prospective version is both
+        # greater than the spec version *and* it's not a pre-release of the
+        # same version in the spec.
+        return True
+
+    def _compare_arbitrary(self, prospective, spec):
+        return str(prospective).lower() == str(spec).lower()
+
+    @property
+    def prereleases(self):
+        # If there is an explicit prereleases set for this, then we'll just
+        # blindly use that.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # Look at all of our specifiers and determine if they are inclusive
+        # operators, and if they are if they are including an explicit
+        # prerelease.
+        operator, version = self._spec
+        if operator in ["==", ">=", "<=", "~=", "==="]:
+            # The == specifier can include a trailing .*, if it does we
+            # want to remove before parsing.
+            if operator == "==" and version.endswith(".*"):
+                version = version[:-2]
+
+            # Parse the version, and if it is a pre-release than this
+            # specifier allows pre-releases.
+            if parse(version).is_prerelease:
+                return True
+
+        return False
+
+    @prereleases.setter
+    def prereleases(self, value):
+        self._prereleases = value
+
+
+_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
+
+
+def _version_split(version):
+    result = []
+    for item in version.split("."):
+        match = _prefix_regex.search(item)
+        if match:
+            result.extend(match.groups())
+        else:
+            result.append(item)
+    return result
+
+
+def _pad_version(left, right):
+    left_split, right_split = [], []
+
+    # Get the release segment of our versions
+    left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
+    right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
+
+    # Get the rest of our versions
+    left_split.append(left[len(left_split):])
+    right_split.append(left[len(right_split):])
+
+    # Insert our padding
+    left_split.insert(
+        1,
+        ["0"] * max(0, len(right_split[0]) - len(left_split[0])),
+    )
+    right_split.insert(
+        1,
+        ["0"] * max(0, len(left_split[0]) - len(right_split[0])),
+    )
+
+    return (
+        list(itertools.chain(*left_split)),
+        list(itertools.chain(*right_split)),
+    )
+
+
+class SpecifierSet(BaseSpecifier):
+
+    def __init__(self, specifiers="", prereleases=None):
+        # Split on , to break each indidivual specifier into it's own item, and
+        # strip each item to remove leading/trailing whitespace.
+        specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
+
+        # Parsed each individual specifier, attempting first to make it a
+        # Specifier and falling back to a LegacySpecifier.
+        parsed = set()
+        for specifier in specifiers:
+            try:
+                parsed.add(Specifier(specifier))
+            except InvalidSpecifier:
+                parsed.add(LegacySpecifier(specifier))
+
+        # Turn our parsed specifiers into a frozen set and save them for later.
+        self._specs = frozenset(parsed)
+
+        # Store our prereleases value so we can use it later to determine if
+        # we accept prereleases or not.
+        self._prereleases = prereleases
+
+    def __repr__(self):
+        pre = (
+            ", prereleases={0!r}".format(self.prereleases)
+            if self._prereleases is not None
+            else ""
+        )
+
+        return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
+
+    def __str__(self):
+        return ",".join(sorted(str(s) for s in self._specs))
+
+    def __hash__(self):
+        return hash(self._specs)
+
+    def __and__(self, other):
+        if isinstance(other, string_types):
+            other = SpecifierSet(other)
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        specifier = SpecifierSet()
+        specifier._specs = frozenset(self._specs | other._specs)
+
+        if self._prereleases is None and other._prereleases is not None:
+            specifier._prereleases = other._prereleases
+        elif self._prereleases is not None and other._prereleases is None:
+            specifier._prereleases = self._prereleases
+        elif self._prereleases == other._prereleases:
+            specifier._prereleases = self._prereleases
+        else:
+            raise ValueError(
+                "Cannot combine SpecifierSets with True and False prerelease "
+                "overrides."
+            )
+
+        return specifier
+
+    def __eq__(self, other):
+        if isinstance(other, string_types):
+            other = SpecifierSet(other)
+        elif isinstance(other, _IndividualSpecifier):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs == other._specs
+
+    def __ne__(self, other):
+        if isinstance(other, string_types):
+            other = SpecifierSet(other)
+        elif isinstance(other, _IndividualSpecifier):
+            other = SpecifierSet(str(other))
+        elif not isinstance(other, SpecifierSet):
+            return NotImplemented
+
+        return self._specs != other._specs
+
+    def __len__(self):
+        return len(self._specs)
+
+    def __iter__(self):
+        return iter(self._specs)
+
+    @property
+    def prereleases(self):
+        # If we have been given an explicit prerelease modifier, then we'll
+        # pass that through here.
+        if self._prereleases is not None:
+            return self._prereleases
+
+        # If we don't have any specifiers, and we don't have a forced value,
+        # then we'll just return None since we don't know if this should have
+        # pre-releases or not.
+        if not self._specs:
+            return None
+
+        # Otherwise we'll see if any of the given specifiers accept
+        # prereleases, if any of them do we'll return True, otherwise False.
+        return any(s.prereleases for s in self._specs)
+
+    @prereleases.setter
+    def prereleases(self, value):
+        self._prereleases = value
+
+    def __contains__(self, item):
+        return self.contains(item)
+
+    def contains(self, item, prereleases=None):
+        # Ensure that our item is a Version or LegacyVersion instance.
+        if not isinstance(item, (LegacyVersion, Version)):
+            item = parse(item)
+
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # We can determine if we're going to allow pre-releases by looking to
+        # see if any of the underlying items supports them. If none of them do
+        # and this item is a pre-release then we do not allow it and we can
+        # short circuit that here.
+        # Note: This means that 1.0.dev1 would not be contained in something
+        #       like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
+        if not prereleases and item.is_prerelease:
+            return False
+
+        # We simply dispatch to the underlying specs here to make sure that the
+        # given version is contained within all of them.
+        # Note: This use of all() here means that an empty set of specifiers
+        #       will always return True, this is an explicit design decision.
+        return all(
+            s.contains(item, prereleases=prereleases)
+            for s in self._specs
+        )
+
+    def filter(self, iterable, prereleases=None):
+        # Determine if we're forcing a prerelease or not, if we're not forcing
+        # one for this particular filter call, then we'll use whatever the
+        # SpecifierSet thinks for whether or not we should support prereleases.
+        if prereleases is None:
+            prereleases = self.prereleases
+
+        # If we have any specifiers, then we want to wrap our iterable in the
+        # filter method for each one, this will act as a logical AND amongst
+        # each specifier.
+        if self._specs:
+            for spec in self._specs:
+                iterable = spec.filter(iterable, prereleases=bool(prereleases))
+            return iterable
+        # If we do not have any specifiers, then we need to have a rough filter
+        # which will filter out any pre-releases, unless there are no final
+        # releases, and which will filter out LegacyVersion in general.
+        else:
+            filtered = []
+            found_prereleases = []
+
+            for item in iterable:
+                # Ensure that we some kind of Version class for this item.
+                if not isinstance(item, (LegacyVersion, Version)):
+                    parsed_version = parse(item)
+                else:
+                    parsed_version = item
+
+                # Filter out any item which is parsed as a LegacyVersion
+                if isinstance(parsed_version, LegacyVersion):
+                    continue
+
+                # Store any item which is a pre-release for later unless we've
+                # already found a final version or we are accepting prereleases
+                if parsed_version.is_prerelease and not prereleases:
+                    if not filtered:
+                        found_prereleases.append(item)
+                else:
+                    filtered.append(item)
+
+            # If we've found no items except for pre-releases, then we'll go
+            # ahead and use the pre-releases
+            if not filtered and found_prereleases and prereleases is None:
+                return found_prereleases
+
+            return filtered

BIN
venv/lib/python2.7/site-packages/pip/_vendor/packaging/specifiers.pyc


+ 403 - 0
venv/lib/python2.7/site-packages/pip/_vendor/packaging/version.py

@@ -0,0 +1,403 @@
+# Copyright 2014 Donald Stufft
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from __future__ import absolute_import, division, print_function
+
+import collections
+import itertools
+import re
+
+from ._structures import Infinity
+
+
+__all__ = [
+    "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"
+]
+
+
+_Version = collections.namedtuple(
+    "_Version",
+    ["epoch", "release", "dev", "pre", "post", "local"],
+)
+
+
+def parse(version):
+    """
+    Parse the given version string and return either a :class:`Version` object
+    or a :class:`LegacyVersion` object depending on if the given version is
+    a valid PEP 440 version or a legacy version.
+    """
+    try:
+        return Version(version)
+    except InvalidVersion:
+        return LegacyVersion(version)
+
+
+class InvalidVersion(ValueError):
+    """
+    An invalid version was found, users should refer to PEP 440.
+    """
+
+
+class _BaseVersion(object):
+
+    def __hash__(self):
+        return hash(self._key)
+
+    def __lt__(self, other):
+        return self._compare(other, lambda s, o: s < o)
+
+    def __le__(self, other):
+        return self._compare(other, lambda s, o: s <= o)
+
+    def __eq__(self, other):
+        return self._compare(other, lambda s, o: s == o)
+
+    def __ge__(self, other):
+        return self._compare(other, lambda s, o: s >= o)
+
+    def __gt__(self, other):
+        return self._compare(other, lambda s, o: s > o)
+
+    def __ne__(self, other):
+        return self._compare(other, lambda s, o: s != o)
+
+    def _compare(self, other, method):
+        if not isinstance(other, _BaseVersion):
+            return NotImplemented
+
+        return method(self._key, other._key)
+
+
+class LegacyVersion(_BaseVersion):
+
+    def __init__(self, version):
+        self._version = str(version)
+        self._key = _legacy_cmpkey(self._version)
+
+    def __str__(self):
+        return self._version
+
+    def __repr__(self):
+        return "<LegacyVersion({0})>".format(repr(str(self)))
+
+    @property
+    def public(self):
+        return self._version
+
+    @property
+    def base_version(self):
+        return self._version
+
+    @property
+    def local(self):
+        return None
+
+    @property
+    def is_prerelease(self):
+        return False
+
+    @property
+    def is_postrelease(self):
+        return False
+
+
+_legacy_version_component_re = re.compile(
+    r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE,
+)
+
+_legacy_version_replacement_map = {
+    "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@",
+}
+
+
+def _parse_version_parts(s):
+    for part in _legacy_version_component_re.split(s):
+        part = _legacy_version_replacement_map.get(part, part)
+
+        if not part or part == ".":
+            continue
+
+        if part[:1] in "0123456789":
+            # pad for numeric comparison
+            yield part.zfill(8)
+        else:
+            yield "*" + part
+
+    # ensure that alpha/beta/candidate are before final
+    yield "*final"
+
+
+def _legacy_cmpkey(version):
+    # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
+    # greater than or equal to 0. This will effectively put the LegacyVersion,
+    # which uses the defacto standard originally implemented by setuptools,
+    # as before all PEP 440 versions.
+    epoch = -1
+
+    # This scheme is taken from pkg_resources.parse_version setuptools prior to
+    # it's adoption of the packaging library.
+    parts = []
+    for part in _parse_version_parts(version.lower()):
+        if part.startswith("*"):
+            # remove "-" before a prerelease tag
+            if part < "*final":
+                while parts and parts[-1] == "*final-":
+                    parts.pop()
+
+            # remove trailing zeros from each series of numeric parts
+            while parts and parts[-1] == "00000000":
+                parts.pop()
+
+        parts.append(part)
+    parts = tuple(parts)
+
+    return epoch, parts
+
+# Deliberately not anchored to the start and end of the string, to make it
+# easier for 3rd party code to reuse
+VERSION_PATTERN = r"""
+    v?
+    (?:
+        (?:(?P<epoch>[0-9]+)!)?                           # epoch
+        (?P<release>[0-9]+(?:\.[0-9]+)*)                  # release segment
+        (?P<pre>                                          # pre-release
+            [-_\.]?
+            (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
+            [-_\.]?
+            (?P<pre_n>[0-9]+)?
+        )?
+        (?P<post>                                         # post release
+            (?:-(?P<post_n1>[0-9]+))
+            |
+            (?:
+                [-_\.]?
+                (?P<post_l>post|rev|r)
+                [-_\.]?
+                (?P<post_n2>[0-9]+)?
+            )
+        )?
+        (?P<dev>                                          # dev release
+            [-_\.]?
+            (?P<dev_l>dev)
+            [-_\.]?
+            (?P<dev_n>[0-9]+)?
+        )?
+    )
+    (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
+"""
+
+
+class Version(_BaseVersion):
+
+    _regex = re.compile(
+        r"^\s*" + VERSION_PATTERN + r"\s*$",
+        re.VERBOSE | re.IGNORECASE,
+    )
+
+    def __init__(self, version):
+        # Validate the version and parse it into pieces
+        match = self._regex.search(version)
+        if not match:
+            raise InvalidVersion("Invalid version: '{0}'".format(version))
+
+        # Store the parsed out pieces of the version
+        self._version = _Version(
+            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
+            release=tuple(int(i) for i in match.group("release").split(".")),
+            pre=_parse_letter_version(
+                match.group("pre_l"),
+                match.group("pre_n"),
+            ),
+            post=_parse_letter_version(
+                match.group("post_l"),
+                match.group("post_n1") or match.group("post_n2"),
+            ),
+            dev=_parse_letter_version(
+                match.group("dev_l"),
+                match.group("dev_n"),
+            ),
+            local=_parse_local_version(match.group("local")),
+        )
+
+        # Generate a key which will be used for sorting
+        self._key = _cmpkey(
+            self._version.epoch,
+            self._version.release,
+            self._version.pre,
+            self._version.post,
+            self._version.dev,
+            self._version.local,
+        )
+
+    def __repr__(self):
+        return "<Version({0})>".format(repr(str(self)))
+
+    def __str__(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        # Pre-release
+        if self._version.pre is not None:
+            parts.append("".join(str(x) for x in self._version.pre))
+
+        # Post-release
+        if self._version.post is not None:
+            parts.append(".post{0}".format(self._version.post[1]))
+
+        # Development release
+        if self._version.dev is not None:
+            parts.append(".dev{0}".format(self._version.dev[1]))
+
+        # Local version segment
+        if self._version.local is not None:
+            parts.append(
+                "+{0}".format(".".join(str(x) for x in self._version.local))
+            )
+
+        return "".join(parts)
+
+    @property
+    def public(self):
+        return str(self).split("+", 1)[0]
+
+    @property
+    def base_version(self):
+        parts = []
+
+        # Epoch
+        if self._version.epoch != 0:
+            parts.append("{0}!".format(self._version.epoch))
+
+        # Release segment
+        parts.append(".".join(str(x) for x in self._version.release))
+
+        return "".join(parts)
+
+    @property
+    def local(self):
+        version_string = str(self)
+        if "+" in version_string:
+            return version_string.split("+", 1)[1]
+
+    @property
+    def is_prerelease(self):
+        return bool(self._version.dev or self._version.pre)
+
+    @property
+    def is_postrelease(self):
+        return bool(self._version.post)
+
+
+def _parse_letter_version(letter, number):
+    if letter:
+        # We consider there to be an implicit 0 in a pre-release if there is
+        # not a numeral associated with it.
+        if number is None:
+            number = 0
+
+        # We normalize any letters to their lower case form
+        letter = letter.lower()
+
+        # We consider some words to be alternate spellings of other words and
+        # in those cases we want to normalize the spellings to our preferred
+        # spelling.
+        if letter == "alpha":
+            letter = "a"
+        elif letter == "beta":
+            letter = "b"
+        elif letter in ["c", "pre", "preview"]:
+            letter = "rc"
+        elif letter in ["rev", "r"]:
+            letter = "post"
+
+        return letter, int(number)
+    if not letter and number:
+        # We assume if we are given a number, but we are not given a letter
+        # then this is using the implicit post release syntax (e.g. 1.0-1)
+        letter = "post"
+
+        return letter, int(number)
+
+
+_local_version_seperators = re.compile(r"[\._-]")
+
+
+def _parse_local_version(local):
+    """
+    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
+    """
+    if local is not None:
+        return tuple(
+            part.lower() if not part.isdigit() else int(part)
+            for part in _local_version_seperators.split(local)
+        )
+
+
+def _cmpkey(epoch, release, pre, post, dev, local):
+    # When we compare a release version, we want to compare it with all of the
+    # trailing zeros removed. So we'll use a reverse the list, drop all the now
+    # leading zeros until we come to something non zero, then take the rest
+    # re-reverse it back into the correct order and make it a tuple and use
+    # that for our sorting key.
+    release = tuple(
+        reversed(list(
+            itertools.dropwhile(
+                lambda x: x == 0,
+                reversed(release),
+            )
+        ))
+    )
+
+    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
+    # We'll do this by abusing the pre segment, but we _only_ want to do this
+    # if there is not a pre or a post segment. If we have one of those then
+    # the normal sorting rules will handle this case correctly.
+    if pre is None and post is None and dev is not None:
+        pre = -Infinity
+    # Versions without a pre-release (except as noted above) should sort after
+    # those with one.
+    elif pre is None:
+        pre = Infinity
+
+    # Versions without a post segment should sort before those with one.
+    if post is None:
+        post = -Infinity
+
+    # Versions without a development segment should sort after those with one.
+    if dev is None:
+        dev = Infinity
+
+    if local is None:
+        # Versions without a local segment should sort before those with one.
+        local = -Infinity
+    else:
+        # Versions with a local segment need that segment parsed to implement
+        # the sorting rules in PEP440.
+        # - Alpha numeric segments sort before numeric segments
+        # - Alpha numeric segments sort lexicographically
+        # - Numeric segments sort numerically
+        # - Shorter versions sort before longer versions when the prefixes
+        #   match exactly
+        local = tuple(
+            (i, "") if isinstance(i, int) else (-Infinity, i)
+            for i in local
+        )
+
+    return epoch, release, pre, post, dev, local

BIN
venv/lib/python2.7/site-packages/pip/_vendor/packaging/version.pyc


+ 3107 - 0
venv/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.py

@@ -0,0 +1,3107 @@
+"""
+Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof.  The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is.  Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files.  It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+from __future__ import absolute_import
+
+import sys
+import os
+import io
+import time
+import re
+import types
+import zipfile
+import zipimport
+import warnings
+import stat
+import functools
+import pkgutil
+import token
+import symbol
+import operator
+import platform
+import collections
+import plistlib
+import email.parser
+import tempfile
+import textwrap
+from pkgutil import get_importer
+
+try:
+    import _imp
+except ImportError:
+    # Python 3.2 compatibility
+    import imp as _imp
+
+PY3 = sys.version_info > (3,)
+PY2 = not PY3
+
+if PY3:
+    from urllib.parse import urlparse, urlunparse
+
+if PY2:
+    from urlparse import urlparse, urlunparse
+
+if PY3:
+    string_types = str,
+else:
+    string_types = str, eval('unicode')
+
+iteritems = (lambda i: i.items()) if PY3 else lambda i: i.iteritems()
+
+# capture these to bypass sandboxing
+from os import utime
+try:
+    from os import mkdir, rename, unlink
+    WRITE_SUPPORT = True
+except ImportError:
+    # no write support, probably under GAE
+    WRITE_SUPPORT = False
+
+from os import open as os_open
+from os.path import isdir, split
+
+# Avoid try/except due to potential problems with delayed import mechanisms.
+if sys.version_info >= (3, 3) and sys.implementation.name == "cpython":
+    import importlib.machinery as importlib_machinery
+else:
+    importlib_machinery = None
+
+try:
+    import parser
+except ImportError:
+    pass
+
+import pip._vendor.packaging.version
+import pip._vendor.packaging.specifiers
+packaging = pip._vendor.packaging
+
+
+# declare some globals that will be defined later to
+# satisfy the linters.
+require = None
+working_set = None
+
+
+class PEP440Warning(RuntimeWarning):
+    """
+    Used when there is an issue with a version or specifier not complying with
+    PEP 440.
+    """
+
+
+class _SetuptoolsVersionMixin(object):
+
+    def __hash__(self):
+        return super(_SetuptoolsVersionMixin, self).__hash__()
+
+    def __lt__(self, other):
+        if isinstance(other, tuple):
+            return tuple(self) < other
+        else:
+            return super(_SetuptoolsVersionMixin, self).__lt__(other)
+
+    def __le__(self, other):
+        if isinstance(other, tuple):
+            return tuple(self) <= other
+        else:
+            return super(_SetuptoolsVersionMixin, self).__le__(other)
+
+    def __eq__(self, other):
+        if isinstance(other, tuple):
+            return tuple(self) == other
+        else:
+            return super(_SetuptoolsVersionMixin, self).__eq__(other)
+
+    def __ge__(self, other):
+        if isinstance(other, tuple):
+            return tuple(self) >= other
+        else:
+            return super(_SetuptoolsVersionMixin, self).__ge__(other)
+
+    def __gt__(self, other):
+        if isinstance(other, tuple):
+            return tuple(self) > other
+        else:
+            return super(_SetuptoolsVersionMixin, self).__gt__(other)
+
+    def __ne__(self, other):
+        if isinstance(other, tuple):
+            return tuple(self) != other
+        else:
+            return super(_SetuptoolsVersionMixin, self).__ne__(other)
+
+    def __getitem__(self, key):
+        return tuple(self)[key]
+
+    def __iter__(self):
+        component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
+        replace = {
+            'pre': 'c',
+            'preview': 'c',
+            '-': 'final-',
+            'rc': 'c',
+            'dev': '@',
+        }.get
+
+        def _parse_version_parts(s):
+            for part in component_re.split(s):
+                part = replace(part, part)
+                if not part or part == '.':
+                    continue
+                if part[:1] in '0123456789':
+                    # pad for numeric comparison
+                    yield part.zfill(8)
+                else:
+                    yield '*'+part
+
+            # ensure that alpha/beta/candidate are before final
+            yield '*final'
+
+        def old_parse_version(s):
+            parts = []
+            for part in _parse_version_parts(s.lower()):
+                if part.startswith('*'):
+                    # remove '-' before a prerelease tag
+                    if part < '*final':
+                        while parts and parts[-1] == '*final-':
+                            parts.pop()
+                    # remove trailing zeros from each series of numeric parts
+                    while parts and parts[-1] == '00000000':
+                        parts.pop()
+                parts.append(part)
+            return tuple(parts)
+
+        # Warn for use of this function
+        warnings.warn(
+            "You have iterated over the result of "
+            "pkg_resources.parse_version. This is a legacy behavior which is "
+            "inconsistent with the new version class introduced in setuptools "
+            "8.0. In most cases, conversion to a tuple is unnecessary. For "
+            "comparison of versions, sort the Version instances directly. If "
+            "you have another use case requiring the tuple, please file a "
+            "bug with the setuptools project describing that need.",
+            RuntimeWarning,
+            stacklevel=1,
+        )
+
+        for part in old_parse_version(str(self)):
+            yield part
+
+
+class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
+    pass
+
+
+class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
+                              packaging.version.LegacyVersion):
+    pass
+
+
+def parse_version(v):
+    try:
+        return SetuptoolsVersion(v)
+    except packaging.version.InvalidVersion:
+        return SetuptoolsLegacyVersion(v)
+
+
+_state_vars = {}
+
+def _declare_state(vartype, **kw):
+    globals().update(kw)
+    _state_vars.update(dict.fromkeys(kw, vartype))
+
+def __getstate__():
+    state = {}
+    g = globals()
+    for k, v in _state_vars.items():
+        state[k] = g['_sget_'+v](g[k])
+    return state
+
+def __setstate__(state):
+    g = globals()
+    for k, v in state.items():
+        g['_sset_'+_state_vars[k]](k, g[k], v)
+    return state
+
+def _sget_dict(val):
+    return val.copy()
+
+def _sset_dict(key, ob, state):
+    ob.clear()
+    ob.update(state)
+
+def _sget_object(val):
+    return val.__getstate__()
+
+def _sset_object(key, ob, state):
+    ob.__setstate__(state)
+
+_sget_none = _sset_none = lambda *args: None
+
+
+def get_supported_platform():
+    """Return this platform's maximum compatible version.
+
+    distutils.util.get_platform() normally reports the minimum version
+    of Mac OS X that would be required to *use* extensions produced by
+    distutils.  But what we want when checking compatibility is to know the
+    version of Mac OS X that we are *running*.  To allow usage of packages that
+    explicitly require a newer version of Mac OS X, we must also know the
+    current version of the OS.
+
+    If this condition occurs for any other platform with a version in its
+    platform strings, this function should be extended accordingly.
+    """
+    plat = get_build_platform()
+    m = macosVersionString.match(plat)
+    if m is not None and sys.platform == "darwin":
+        try:
+            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+        except ValueError:
+            # not Mac OS X
+            pass
+    return plat
+
+__all__ = [
+    # Basic resource access and distribution/entry point discovery
+    'require', 'run_script', 'get_provider',  'get_distribution',
+    'load_entry_point', 'get_entry_map', 'get_entry_info',
+    'iter_entry_points',
+    'resource_string', 'resource_stream', 'resource_filename',
+    'resource_listdir', 'resource_exists', 'resource_isdir',
+
+    # Environmental control
+    'declare_namespace', 'working_set', 'add_activation_listener',
+    'find_distributions', 'set_extraction_path', 'cleanup_resources',
+    'get_default_cache',
+
+    # Primary implementation classes
+    'Environment', 'WorkingSet', 'ResourceManager',
+    'Distribution', 'Requirement', 'EntryPoint',
+
+    # Exceptions
+    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
+    'UnknownExtra', 'ExtractionError',
+
+    # Warnings
+    'PEP440Warning',
+
+    # Parsing functions and string utilities
+    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
+
+    # filesystem utilities
+    'ensure_directory', 'normalize_path',
+
+    # Distribution "precedence" constants
+    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+    # "Provider" interfaces, implementations, and registration/lookup APIs
+    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+    'register_finder', 'register_namespace_handler', 'register_loader_type',
+    'fixup_namespace_packages', 'get_importer',
+
+    # Deprecated/backward compatibility only
+    'run_main', 'AvailableDistributions',
+]
+
+class ResolutionError(Exception):
+    """Abstract base for dependency resolution errors"""
+    def __repr__(self):
+        return self.__class__.__name__+repr(self.args)
+
+
+class VersionConflict(ResolutionError):
+    """
+    An already-installed version conflicts with the requested version.
+
+    Should be initialized with the installed Distribution and the requested
+    Requirement.
+    """
+
+    _template = "{self.dist} is installed but {self.req} is required"
+
+    @property
+    def dist(self):
+        return self.args[0]
+
+    @property
+    def req(self):
+        return self.args[1]
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def with_context(self, required_by):
+        """
+        If required_by is non-empty, return a version of self that is a
+        ContextualVersionConflict.
+        """
+        if not required_by:
+            return self
+        args = self.args + (required_by,)
+        return ContextualVersionConflict(*args)
+
+
+class ContextualVersionConflict(VersionConflict):
+    """
+    A VersionConflict that accepts a third parameter, the set of the
+    requirements that required the installed Distribution.
+    """
+
+    _template = VersionConflict._template + ' by {self.required_by}'
+
+    @property
+    def required_by(self):
+        return self.args[2]
+
+
+class DistributionNotFound(ResolutionError):
+    """A requested distribution was not found"""
+
+    _template = ("The '{self.req}' distribution was not found "
+                 "and is required by {self.requirers_str}")
+
+    @property
+    def req(self):
+        return self.args[0]
+
+    @property
+    def requirers(self):
+        return self.args[1]
+
+    @property
+    def requirers_str(self):
+        if not self.requirers:
+            return 'the application'
+        return ', '.join(self.requirers)
+
+    def report(self):
+        return self._template.format(**locals())
+
+    def __str__(self):
+        return self.report()
+
+
+class UnknownExtra(ResolutionError):
+    """Distribution doesn't have an "extra feature" of the given name"""
+_provider_factories = {}
+
+PY_MAJOR = sys.version[:3]
+EGG_DIST = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+def register_loader_type(loader_type, provider_factory):
+    """Register `provider_factory` to make providers for `loader_type`
+
+    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+    and `provider_factory` is a function that, passed a *module* object,
+    returns an ``IResourceProvider`` for that module.
+    """
+    _provider_factories[loader_type] = provider_factory
+
+def get_provider(moduleOrReq):
+    """Return an IResourceProvider for the named module or requirement"""
+    if isinstance(moduleOrReq, Requirement):
+        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+    try:
+        module = sys.modules[moduleOrReq]
+    except KeyError:
+        __import__(moduleOrReq)
+        module = sys.modules[moduleOrReq]
+    loader = getattr(module, '__loader__', None)
+    return _find_adapter(_provider_factories, loader)(module)
+
+def _macosx_vers(_cache=[]):
+    if not _cache:
+        version = platform.mac_ver()[0]
+        # fallback for MacPorts
+        if version == '':
+            plist = '/System/Library/CoreServices/SystemVersion.plist'
+            if os.path.exists(plist):
+                if hasattr(plistlib, 'readPlist'):
+                    plist_content = plistlib.readPlist(plist)
+                    if 'ProductVersion' in plist_content:
+                        version = plist_content['ProductVersion']
+
+        _cache.append(version.split('.'))
+    return _cache[0]
+
+def _macosx_arch(machine):
+    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
+
+def get_build_platform():
+    """Return this platform's string for platform-specific distributions
+
+    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+    needs some hacks for Linux and Mac OS X.
+    """
+    try:
+        # Python 2.7 or >=3.2
+        from sysconfig import get_platform
+    except ImportError:
+        from distutils.util import get_platform
+
+    plat = get_platform()
+    if sys.platform == "darwin" and not plat.startswith('macosx-'):
+        try:
+            version = _macosx_vers()
+            machine = os.uname()[4].replace(" ", "_")
+            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
+                _macosx_arch(machine))
+        except ValueError:
+            # if someone is running a non-Mac darwin system, this will fall
+            # through to the default implementation
+            pass
+    return plat
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+# XXX backward compat
+get_platform = get_build_platform
+
+
+def compatible_platforms(provided, required):
+    """Can code for the `provided` platform run on the `required` platform?
+
+    Returns true if either platform is ``None``, or the platforms are equal.
+
+    XXX Needs compatibility checks for Linux and other unixy OSes.
+    """
+    if provided is None or required is None or provided==required:
+        # easy case
+        return True
+
+    # Mac OS X special cases
+    reqMac = macosVersionString.match(required)
+    if reqMac:
+        provMac = macosVersionString.match(provided)
+
+        # is this a Mac package?
+        if not provMac:
+            # this is backwards compatibility for packages built before
+            # setuptools 0.6. All packages built after this point will
+            # use the new macosx designation.
+            provDarwin = darwinVersionString.match(provided)
+            if provDarwin:
+                dversion = int(provDarwin.group(1))
+                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                if dversion == 7 and macosversion >= "10.3" or \
+                        dversion == 8 and macosversion >= "10.4":
+                    return True
+            # egg isn't macosx or legacy darwin
+            return False
+
+        # are they the same major version and machine type?
+        if provMac.group(1) != reqMac.group(1) or \
+                provMac.group(3) != reqMac.group(3):
+            return False
+
+        # is the required OS major update >= the provided one?
+        if int(provMac.group(2)) > int(reqMac.group(2)):
+            return False
+
+        return True
+
+    # XXX Linux and other platforms' special cases should go here
+    return False
+
+
+def run_script(dist_spec, script_name):
+    """Locate distribution `dist_spec` and run its `script_name` script"""
+    ns = sys._getframe(1).f_globals
+    name = ns['__name__']
+    ns.clear()
+    ns['__name__'] = name
+    require(dist_spec)[0].run_script(script_name, ns)
+
+# backward compatibility
+run_main = run_script
+
+def get_distribution(dist):
+    """Return a current distribution object for a Requirement or string"""
+    if isinstance(dist, string_types):
+        dist = Requirement.parse(dist)
+    if isinstance(dist, Requirement):
+        dist = get_provider(dist)
+    if not isinstance(dist, Distribution):
+        raise TypeError("Expected string, Requirement, or Distribution", dist)
+    return dist
+
+def load_entry_point(dist, group, name):
+    """Return `name` entry point of `group` for `dist` or raise ImportError"""
+    return get_distribution(dist).load_entry_point(group, name)
+
+def get_entry_map(dist, group=None):
+    """Return the entry point map for `group`, or the full entry map"""
+    return get_distribution(dist).get_entry_map(group)
+
+def get_entry_info(dist, group, name):
+    """Return the EntryPoint object for `group`+`name`, or ``None``"""
+    return get_distribution(dist).get_entry_info(group, name)
+
+
+class IMetadataProvider:
+
+    def has_metadata(name):
+        """Does the package's distribution contain the named metadata?"""
+
+    def get_metadata(name):
+        """The named metadata resource as a string"""
+
+    def get_metadata_lines(name):
+        """Yield named metadata resource as list of non-blank non-comment lines
+
+       Leading and trailing whitespace is stripped from each line, and lines
+       with ``#`` as the first non-blank character are omitted."""
+
+    def metadata_isdir(name):
+        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+
+    def metadata_listdir(name):
+        """List of metadata names in the directory (like ``os.listdir()``)"""
+
+    def run_script(script_name, namespace):
+        """Execute the named script in the supplied namespace dictionary"""
+
+
+class IResourceProvider(IMetadataProvider):
+    """An object that provides access to package resources"""
+
+    def get_resource_filename(manager, resource_name):
+        """Return a true filesystem path for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_stream(manager, resource_name):
+        """Return a readable file-like object for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_string(manager, resource_name):
+        """Return a string containing the contents of `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def has_resource(resource_name):
+        """Does the package contain the named resource?"""
+
+    def resource_isdir(resource_name):
+        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+
+    def resource_listdir(resource_name):
+        """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+class WorkingSet(object):
+    """A collection of active distributions on sys.path (or a similar list)"""
+
+    def __init__(self, entries=None):
+        """Create working set from list of path entries (default=sys.path)"""
+        self.entries = []
+        self.entry_keys = {}
+        self.by_key = {}
+        self.callbacks = []
+
+        if entries is None:
+            entries = sys.path
+
+        for entry in entries:
+            self.add_entry(entry)
+
+    @classmethod
+    def _build_master(cls):
+        """
+        Prepare the master working set.
+        """
+        ws = cls()
+        try:
+            from __main__ import __requires__
+        except ImportError:
+            # The main program does not list any requirements
+            return ws
+
+        # ensure the requirements are met
+        try:
+            ws.require(__requires__)
+        except VersionConflict:
+            return cls._build_from_requirements(__requires__)
+
+        return ws
+
+    @classmethod
+    def _build_from_requirements(cls, req_spec):
+        """
+        Build a working set from a requirement spec. Rewrites sys.path.
+        """
+        # try it without defaults already on sys.path
+        # by starting with an empty path
+        ws = cls([])
+        reqs = parse_requirements(req_spec)
+        dists = ws.resolve(reqs, Environment())
+        for dist in dists:
+            ws.add(dist)
+
+        # add any missing entries from sys.path
+        for entry in sys.path:
+            if entry not in ws.entries:
+                ws.add_entry(entry)
+
+        # then copy back to sys.path
+        sys.path[:] = ws.entries
+        return ws
+
+    def add_entry(self, entry):
+        """Add a path item to ``.entries``, finding any distributions on it
+
+        ``find_distributions(entry, True)`` is used to find distributions
+        corresponding to the path entry, and they are added.  `entry` is
+        always appended to ``.entries``, even if it is already present.
+        (This is because ``sys.path`` can contain the same value more than
+        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+        equal ``sys.path``.)
+        """
+        self.entry_keys.setdefault(entry, [])
+        self.entries.append(entry)
+        for dist in find_distributions(entry, True):
+            self.add(dist, entry, False)
+
+    def __contains__(self, dist):
+        """True if `dist` is the active distribution for its project"""
+        return self.by_key.get(dist.key) == dist
+
+    def find(self, req):
+        """Find a distribution matching requirement `req`
+
+        If there is an active distribution for the requested project, this
+        returns it as long as it meets the version requirement specified by
+        `req`.  But, if there is an active distribution for the project and it
+        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+        If there is no active distribution for the requested project, ``None``
+        is returned.
+        """
+        dist = self.by_key.get(req.key)
+        if dist is not None and dist not in req:
+            # XXX add more info
+            raise VersionConflict(dist, req)
+        return dist
+
+    def iter_entry_points(self, group, name=None):
+        """Yield entry point objects from `group` matching `name`
+
+        If `name` is None, yields all entry points in `group` from all
+        distributions in the working set, otherwise only ones matching
+        both `group` and `name` are yielded (in distribution order).
+        """
+        for dist in self:
+            entries = dist.get_entry_map(group)
+            if name is None:
+                for ep in entries.values():
+                    yield ep
+            elif name in entries:
+                yield entries[name]
+
+    def run_script(self, requires, script_name):
+        """Locate distribution for `requires` and run `script_name` script"""
+        ns = sys._getframe(1).f_globals
+        name = ns['__name__']
+        ns.clear()
+        ns['__name__'] = name
+        self.require(requires)[0].run_script(script_name, ns)
+
+    def __iter__(self):
+        """Yield distributions for non-duplicate projects in the working set
+
+        The yield order is the order in which the items' path entries were
+        added to the working set.
+        """
+        seen = {}
+        for item in self.entries:
+            if item not in self.entry_keys:
+                # workaround a cache issue
+                continue
+
+            for key in self.entry_keys[item]:
+                if key not in seen:
+                    seen[key]=1
+                    yield self.by_key[key]
+
+    def add(self, dist, entry=None, insert=True, replace=False):
+        """Add `dist` to working set, associated with `entry`
+
+        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+        On exit from this routine, `entry` is added to the end of the working
+        set's ``.entries`` (if it wasn't already present).
+
+        `dist` is only added to the working set if it's for a project that
+        doesn't already have a distribution in the set, unless `replace=True`.
+        If it's added, any callbacks registered with the ``subscribe()`` method
+        will be called.
+        """
+        if insert:
+            dist.insert_on(self.entries, entry)
+
+        if entry is None:
+            entry = dist.location
+        keys = self.entry_keys.setdefault(entry,[])
+        keys2 = self.entry_keys.setdefault(dist.location,[])
+        if not replace and dist.key in self.by_key:
+            # ignore hidden distros
+            return
+
+        self.by_key[dist.key] = dist
+        if dist.key not in keys:
+            keys.append(dist.key)
+        if dist.key not in keys2:
+            keys2.append(dist.key)
+        self._added_new(dist)
+
+    def resolve(self, requirements, env=None, installer=None,
+            replace_conflicting=False):
+        """List all distributions needed to (recursively) meet `requirements`
+
+        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+        if supplied, should be an ``Environment`` instance.  If
+        not supplied, it defaults to all distributions available within any
+        entry or distribution in the working set.  `installer`, if supplied,
+        will be invoked with each requirement that cannot be met by an
+        already-installed distribution; it should return a ``Distribution`` or
+        ``None``.
+
+        Unless `replace_conflicting=True`, raises a VersionConflict exception if
+        any requirements are found on the path that have the correct name but
+        the wrong version.  Otherwise, if an `installer` is supplied it will be
+        invoked to obtain the correct version of the requirement and activate
+        it.
+        """
+
+        # set up the stack
+        requirements = list(requirements)[::-1]
+        # set of processed requirements
+        processed = {}
+        # key -> dist
+        best = {}
+        to_activate = []
+
+        # Mapping of requirement to set of distributions that required it;
+        # useful for reporting info about conflicts.
+        required_by = collections.defaultdict(set)
+
+        while requirements:
+            # process dependencies breadth-first
+            req = requirements.pop(0)
+            if req in processed:
+                # Ignore cyclic or redundant dependencies
+                continue
+            dist = best.get(req.key)
+            if dist is None:
+                # Find the best distribution and add it to the map
+                dist = self.by_key.get(req.key)
+                if dist is None or (dist not in req and replace_conflicting):
+                    ws = self
+                    if env is None:
+                        if dist is None:
+                            env = Environment(self.entries)
+                        else:
+                            # Use an empty environment and workingset to avoid
+                            # any further conflicts with the conflicting
+                            # distribution
+                            env = Environment([])
+                            ws = WorkingSet([])
+                    dist = best[req.key] = env.best_match(req, ws, installer)
+                    if dist is None:
+                        requirers = required_by.get(req, None)
+                        raise DistributionNotFound(req, requirers)
+                to_activate.append(dist)
+            if dist not in req:
+                # Oops, the "best" so far conflicts with a dependency
+                dependent_req = required_by[req]
+                raise VersionConflict(dist, req).with_context(dependent_req)
+
+            # push the new requirements onto the stack
+            new_requirements = dist.requires(req.extras)[::-1]
+            requirements.extend(new_requirements)
+
+            # Register the new requirements needed by req
+            for new_requirement in new_requirements:
+                required_by[new_requirement].add(req.project_name)
+
+            processed[req] = True
+
+        # return list of distros to activate
+        return to_activate
+
+    def find_plugins(self, plugin_env, full_env=None, installer=None,
+            fallback=True):
+        """Find all activatable distributions in `plugin_env`
+
+        Example usage::
+
+            distributions, errors = working_set.find_plugins(
+                Environment(plugin_dirlist)
+            )
+            # add plugins+libs to sys.path
+            map(working_set.add, distributions)
+            # display errors
+            print('Could not load', errors)
+
+        The `plugin_env` should be an ``Environment`` instance that contains
+        only distributions that are in the project's "plugin directory" or
+        directories. The `full_env`, if supplied, should be an ``Environment``
+        contains all currently-available distributions.  If `full_env` is not
+        supplied, one is created automatically from the ``WorkingSet`` this
+        method is called on, which will typically mean that every directory on
+        ``sys.path`` will be scanned for distributions.
+
+        `installer` is a standard installer callback as used by the
+        ``resolve()`` method. The `fallback` flag indicates whether we should
+        attempt to resolve older versions of a plugin if the newest version
+        cannot be resolved.
+
+        This method returns a 2-tuple: (`distributions`, `error_info`), where
+        `distributions` is a list of the distributions found in `plugin_env`
+        that were loadable, along with any other distributions that are needed
+        to resolve their dependencies.  `error_info` is a dictionary mapping
+        unloadable plugin distributions to an exception instance describing the
+        error that occurred. Usually this will be a ``DistributionNotFound`` or
+        ``VersionConflict`` instance.
+        """
+
+        plugin_projects = list(plugin_env)
+        # scan project names in alphabetic order
+        plugin_projects.sort()
+
+        error_info = {}
+        distributions = {}
+
+        if full_env is None:
+            env = Environment(self.entries)
+            env += plugin_env
+        else:
+            env = full_env + plugin_env
+
+        shadow_set = self.__class__([])
+        # put all our entries in shadow_set
+        list(map(shadow_set.add, self))
+
+        for project_name in plugin_projects:
+
+            for dist in plugin_env[project_name]:
+
+                req = [dist.as_requirement()]
+
+                try:
+                    resolvees = shadow_set.resolve(req, env, installer)
+
+                except ResolutionError as v:
+                    # save error info
+                    error_info[dist] = v
+                    if fallback:
+                        # try the next older version of project
+                        continue
+                    else:
+                        # give up on this project, keep going
+                        break
+
+                else:
+                    list(map(shadow_set.add, resolvees))
+                    distributions.update(dict.fromkeys(resolvees))
+
+                    # success, no need to try any more versions of this project
+                    break
+
+        distributions = list(distributions)
+        distributions.sort()
+
+        return distributions, error_info
+
+    def require(self, *requirements):
+        """Ensure that distributions matching `requirements` are activated
+
+        `requirements` must be a string or a (possibly-nested) sequence
+        thereof, specifying the distributions and versions required.  The
+        return value is a sequence of the distributions that needed to be
+        activated to fulfill the requirements; all relevant distributions are
+        included, even if they were already activated in this working set.
+        """
+        needed = self.resolve(parse_requirements(requirements))
+
+        for dist in needed:
+            self.add(dist)
+
+        return needed
+
+    def subscribe(self, callback):
+        """Invoke `callback` for all distributions (including existing ones)"""
+        if callback in self.callbacks:
+            return
+        self.callbacks.append(callback)
+        for dist in self:
+            callback(dist)
+
+    def _added_new(self, dist):
+        for callback in self.callbacks:
+            callback(dist)
+
+    def __getstate__(self):
+        return (
+            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
+            self.callbacks[:]
+        )
+
+    def __setstate__(self, e_k_b_c):
+        entries, keys, by_key, callbacks = e_k_b_c
+        self.entries = entries[:]
+        self.entry_keys = keys.copy()
+        self.by_key = by_key.copy()
+        self.callbacks = callbacks[:]
+
+
+class Environment(object):
+    """Searchable snapshot of distributions on a search path"""
+
+    def __init__(self, search_path=None, platform=get_supported_platform(),
+            python=PY_MAJOR):
+        """Snapshot distributions available on a search path
+
+        Any distributions found on `search_path` are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.
+
+        `platform` is an optional string specifying the name of the platform
+        that platform-specific distributions must be compatible with.  If
+        unspecified, it defaults to the current platform.  `python` is an
+        optional string naming the desired version of Python (e.g. ``'3.3'``);
+        it defaults to the current version.
+
+        You may explicitly set `platform` (and/or `python`) to ``None`` if you
+        wish to map *all* distributions, not just those compatible with the
+        running platform or Python version.
+        """
+        self._distmap = {}
+        self.platform = platform
+        self.python = python
+        self.scan(search_path)
+
+    def can_add(self, dist):
+        """Is distribution `dist` acceptable for this environment?
+
+        The distribution must match the platform and python version
+        requirements specified when this environment was created, or False
+        is returned.
+        """
+        return (self.python is None or dist.py_version is None
+            or dist.py_version==self.python) \
+            and compatible_platforms(dist.platform, self.platform)
+
+    def remove(self, dist):
+        """Remove `dist` from the environment"""
+        self._distmap[dist.key].remove(dist)
+
+    def scan(self, search_path=None):
+        """Scan `search_path` for distributions usable in this environment
+
+        Any distributions found are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.  Only distributions conforming to
+        the platform/python version defined at initialization are added.
+        """
+        if search_path is None:
+            search_path = sys.path
+
+        for item in search_path:
+            for dist in find_distributions(item):
+                self.add(dist)
+
+    def __getitem__(self, project_name):
+        """Return a newest-to-oldest list of distributions for `project_name`
+
+        Uses case-insensitive `project_name` comparison, assuming all the
+        project's distributions use their project's name converted to all
+        lowercase as their key.
+
+        """
+        distribution_key = project_name.lower()
+        return self._distmap.get(distribution_key, [])
+
+    def add(self, dist):
+        """Add `dist` if we ``can_add()`` it and it has not already been added
+        """
+        if self.can_add(dist) and dist.has_version():
+            dists = self._distmap.setdefault(dist.key, [])
+            if dist not in dists:
+                dists.append(dist)
+                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
+
+    def best_match(self, req, working_set, installer=None):
+        """Find distribution best matching `req` and usable on `working_set`
+
+        This calls the ``find(req)`` method of the `working_set` to see if a
+        suitable distribution is already active.  (This may raise
+        ``VersionConflict`` if an unsuitable version of the project is already
+        active in the specified `working_set`.)  If a suitable distribution
+        isn't active, this method returns the newest distribution in the
+        environment that meets the ``Requirement`` in `req`.  If no suitable
+        distribution is found, and `installer` is supplied, then the result of
+        calling the environment's ``obtain(req, installer)`` method will be
+        returned.
+        """
+        dist = working_set.find(req)
+        if dist is not None:
+            return dist
+        for dist in self[req.key]:
+            if dist in req:
+                return dist
+        # try to download/install
+        return self.obtain(req, installer)
+
+    def obtain(self, requirement, installer=None):
+        """Obtain a distribution matching `requirement` (e.g. via download)
+
+        Obtain a distro that matches requirement (e.g. via download).  In the
+        base ``Environment`` class, this routine just returns
+        ``installer(requirement)``, unless `installer` is None, in which case
+        None is returned instead.  This method is a hook that allows subclasses
+        to attempt other ways of obtaining a distribution before falling back
+        to the `installer` argument."""
+        if installer is not None:
+            return installer(requirement)
+
+    def __iter__(self):
+        """Yield the unique project names of the available distributions"""
+        for key in self._distmap.keys():
+            if self[key]:
+                yield key
+
+    def __iadd__(self, other):
+        """In-place addition of a distribution or environment"""
+        if isinstance(other, Distribution):
+            self.add(other)
+        elif isinstance(other, Environment):
+            for project in other:
+                for dist in other[project]:
+                    self.add(dist)
+        else:
+            raise TypeError("Can't add %r to environment" % (other,))
+        return self
+
+    def __add__(self, other):
+        """Add an environment or distribution to an environment"""
+        new = self.__class__([], platform=None, python=None)
+        for env in self, other:
+            new += env
+        return new
+
+
+# XXX backward compatibility
+AvailableDistributions = Environment
+
+
+class ExtractionError(RuntimeError):
+    """An error occurred extracting a resource
+
+    The following attributes are available from instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+    """
+
+
+class ResourceManager:
+    """Manage resource extraction and packages"""
+    extraction_path = None
+
+    def __init__(self):
+        self.cached_files = {}
+
+    def resource_exists(self, package_or_requirement, resource_name):
+        """Does the named resource exist?"""
+        return get_provider(package_or_requirement).has_resource(resource_name)
+
+    def resource_isdir(self, package_or_requirement, resource_name):
+        """Is the named resource an existing directory?"""
+        return get_provider(package_or_requirement).resource_isdir(
+            resource_name
+        )
+
+    def resource_filename(self, package_or_requirement, resource_name):
+        """Return a true filesystem path for specified resource"""
+        return get_provider(package_or_requirement).get_resource_filename(
+            self, resource_name
+        )
+
+    def resource_stream(self, package_or_requirement, resource_name):
+        """Return a readable file-like object for specified resource"""
+        return get_provider(package_or_requirement).get_resource_stream(
+            self, resource_name
+        )
+
+    def resource_string(self, package_or_requirement, resource_name):
+        """Return specified resource as a string"""
+        return get_provider(package_or_requirement).get_resource_string(
+            self, resource_name
+        )
+
+    def resource_listdir(self, package_or_requirement, resource_name):
+        """List the contents of the named resource directory"""
+        return get_provider(package_or_requirement).resource_listdir(
+            resource_name
+        )
+
+    def extraction_error(self):
+        """Give an error message for problems extracting file(s)"""
+
+        old_exc = sys.exc_info()[1]
+        cache_path = self.extraction_path or get_default_cache()
+
+        err = ExtractionError("""Can't extract file(s) to egg cache
+
+The following error occurred while trying to extract file(s) to the Python egg
+cache:
+
+  %s
+
+The Python egg cache directory is currently set to:
+
+  %s
+
+Perhaps your account does not have write access to this directory?  You can
+change the cache directory by setting the PYTHON_EGG_CACHE environment
+variable to point to an accessible directory.
+""" % (old_exc, cache_path)
+        )
+        err.manager = self
+        err.cache_path = cache_path
+        err.original_error = old_exc
+        raise err
+
+    def get_cache_path(self, archive_name, names=()):
+        """Return absolute location in cache for `archive_name` and `names`
+
+        The parent directory of the resulting path will be created if it does
+        not already exist.  `archive_name` should be the base filename of the
+        enclosing egg (which may not be the name of the enclosing zipfile!),
+        including its ".egg" extension.  `names`, if provided, should be a
+        sequence of path name parts "under" the egg's extraction location.
+
+        This method should only be called by resource providers that need to
+        obtain an extraction location, and only for names they intend to
+        extract, as it tracks the generated names for possible cleanup later.
+        """
+        extract_path = self.extraction_path or get_default_cache()
+        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
+        try:
+            _bypass_ensure_directory(target_path)
+        except:
+            self.extraction_error()
+
+        self._warn_unsafe_extraction_path(extract_path)
+
+        self.cached_files[target_path] = 1
+        return target_path
+
+    @staticmethod
+    def _warn_unsafe_extraction_path(path):
+        """
+        If the default extraction path is overridden and set to an insecure
+        location, such as /tmp, it opens up an opportunity for an attacker to
+        replace an extracted file with an unauthorized payload. Warn the user
+        if a known insecure location is used.
+
+        See Distribute #375 for more details.
+        """
+        if os.name == 'nt' and not path.startswith(os.environ['windir']):
+            # On Windows, permissions are generally restrictive by default
+            #  and temp directories are not writable by other users, so
+            #  bypass the warning.
+            return
+        mode = os.stat(path).st_mode
+        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
+            msg = ("%s is writable by group/others and vulnerable to attack "
+                "when "
+                "used with get_resource_filename. Consider a more secure "
+                "location (set with .set_extraction_path or the "
+                "PYTHON_EGG_CACHE environment variable)." % path)
+            warnings.warn(msg, UserWarning)
+
+    def postprocess(self, tempname, filename):
+        """Perform any platform-specific postprocessing of `tempname`
+
+        This is where Mac header rewrites should be done; other platforms don't
+        have anything special they should do.
+
+        Resource providers should call this method ONLY after successfully
+        extracting a compressed resource.  They must NOT call it on resources
+        that are already in the filesystem.
+
+        `tempname` is the current (temporary) name of the file, and `filename`
+        is the name it will be renamed to by the caller after this routine
+        returns.
+        """
+
+        if os.name == 'posix':
+            # Make the resource executable
+            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
+            os.chmod(tempname, mode)
+
+    def set_extraction_path(self, path):
+        """Set the base path where resources will be extracted to, if needed.
+
+        If you do not call this routine before any extractions take place, the
+        path defaults to the return value of ``get_default_cache()``.  (Which
+        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+        platform-specific fallbacks.  See that routine's documentation for more
+        details.)
+
+        Resources are extracted to subdirectories of this path based upon
+        information given by the ``IResourceProvider``.  You may set this to a
+        temporary directory, but then you must call ``cleanup_resources()`` to
+        delete the extracted files when done.  There is no guarantee that
+        ``cleanup_resources()`` will be able to remove all extracted files.
+
+        (Note: you may not change the extraction path for a given resource
+        manager once resources have been extracted, unless you first call
+        ``cleanup_resources()``.)
+        """
+        if self.cached_files:
+            raise ValueError(
+                "Can't change extraction path, files already extracted"
+            )
+
+        self.extraction_path = path
+
+    def cleanup_resources(self, force=False):
+        """
+        Delete all extracted resource files and directories, returning a list
+        of the file and directory names that could not be successfully removed.
+        This function does not have any concurrency protection, so it should
+        generally only be called when the extraction path is a temporary
+        directory exclusive to a single process.  This method is not
+        automatically called; you must call it explicitly or register it as an
+        ``atexit`` function if you wish to ensure cleanup of a temporary
+        directory used for extractions.
+        """
+        # XXX
+
+def get_default_cache():
+    """Determine the default cache location
+
+    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
+    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
+    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
+    """
+    try:
+        return os.environ['PYTHON_EGG_CACHE']
+    except KeyError:
+        pass
+
+    if os.name!='nt':
+        return os.path.expanduser('~/.python-eggs')
+
+    # XXX this may be locale-specific!
+    app_data = 'Application Data'
+    app_homes = [
+        # best option, should be locale-safe
+        (('APPDATA',), None),
+        (('USERPROFILE',), app_data),
+        (('HOMEDRIVE','HOMEPATH'), app_data),
+        (('HOMEPATH',), app_data),
+        (('HOME',), None),
+        # 95/98/ME
+        (('WINDIR',), app_data),
+    ]
+
+    for keys, subdir in app_homes:
+        dirname = ''
+        for key in keys:
+            if key in os.environ:
+                dirname = os.path.join(dirname, os.environ[key])
+            else:
+                break
+        else:
+            if subdir:
+                dirname = os.path.join(dirname, subdir)
+            return os.path.join(dirname, 'Python-Eggs')
+    else:
+        raise RuntimeError(
+            "Please set the PYTHON_EGG_CACHE enviroment variable"
+        )
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """
+    Convert an arbitrary string to a standard version string
+    """
+    try:
+        # normalize the version
+        return str(packaging.version.Version(version))
+    except packaging.version.InvalidVersion:
+        version = version.replace(' ','.')
+        return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-','_')
+
+
+class MarkerEvaluation(object):
+    values = {
+        'os_name': lambda: os.name,
+        'sys_platform': lambda: sys.platform,
+        'python_full_version': platform.python_version,
+        'python_version': lambda: platform.python_version()[:3],
+        'platform_version': platform.version,
+        'platform_machine': platform.machine,
+        'python_implementation': platform.python_implementation,
+    }
+
+    @classmethod
+    def is_invalid_marker(cls, text):
+        """
+        Validate text as a PEP 426 environment marker; return an exception
+        if invalid or False otherwise.
+        """
+        try:
+            cls.evaluate_marker(text)
+        except SyntaxError as e:
+            return cls.normalize_exception(e)
+        return False
+
+    @staticmethod
+    def normalize_exception(exc):
+        """
+        Given a SyntaxError from a marker evaluation, normalize the error
+        message:
+         - Remove indications of filename and line number.
+         - Replace platform-specific error messages with standard error
+           messages.
+        """
+        subs = {
+            'unexpected EOF while parsing': 'invalid syntax',
+            'parenthesis is never closed': 'invalid syntax',
+        }
+        exc.filename = None
+        exc.lineno = None
+        exc.msg = subs.get(exc.msg, exc.msg)
+        return exc
+
+    @classmethod
+    def and_test(cls, nodelist):
+        # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
+        items = [
+            cls.interpret(nodelist[i])
+            for i in range(1, len(nodelist), 2)
+        ]
+        return functools.reduce(operator.and_, items)
+
+    @classmethod
+    def test(cls, nodelist):
+        # MUST NOT short-circuit evaluation, or invalid syntax can be skipped!
+        items = [
+            cls.interpret(nodelist[i])
+            for i in range(1, len(nodelist), 2)
+        ]
+        return functools.reduce(operator.or_, items)
+
+    @classmethod
+    def atom(cls, nodelist):
+        t = nodelist[1][0]
+        if t == token.LPAR:
+            if nodelist[2][0] == token.RPAR:
+                raise SyntaxError("Empty parentheses")
+            return cls.interpret(nodelist[2])
+        msg = "Language feature not supported in environment markers"
+        raise SyntaxError(msg)
+
+    @classmethod
+    def comparison(cls, nodelist):
+        if len(nodelist) > 4:
+            msg = "Chained comparison not allowed in environment markers"
+            raise SyntaxError(msg)
+        comp = nodelist[2][1]
+        cop = comp[1]
+        if comp[0] == token.NAME:
+            if len(nodelist[2]) == 3:
+                if cop == 'not':
+                    cop = 'not in'
+                else:
+                    cop = 'is not'
+        try:
+            cop = cls.get_op(cop)
+        except KeyError:
+            msg = repr(cop) + " operator not allowed in environment markers"
+            raise SyntaxError(msg)
+        return cop(cls.evaluate(nodelist[1]), cls.evaluate(nodelist[3]))
+
+    @classmethod
+    def get_op(cls, op):
+        ops = {
+            symbol.test: cls.test,
+            symbol.and_test: cls.and_test,
+            symbol.atom: cls.atom,
+            symbol.comparison: cls.comparison,
+            'not in': lambda x, y: x not in y,
+            'in': lambda x, y: x in y,
+            '==': operator.eq,
+            '!=': operator.ne,
+            '<':  operator.lt,
+            '>':  operator.gt,
+            '<=': operator.le,
+            '>=': operator.ge,
+        }
+        if hasattr(symbol, 'or_test'):
+            ops[symbol.or_test] = cls.test
+        return ops[op]
+
+    @classmethod
+    def evaluate_marker(cls, text, extra=None):
+        """
+        Evaluate a PEP 426 environment marker on CPython 2.4+.
+        Return a boolean indicating the marker result in this environment.
+        Raise SyntaxError if marker is invalid.
+
+        This implementation uses the 'parser' module, which is not implemented
+        on
+        Jython and has been superseded by the 'ast' module in Python 2.6 and
+        later.
+        """
+        return cls.interpret(parser.expr(text).totuple(1)[1])
+
+    @classmethod
+    def _markerlib_evaluate(cls, text):
+        """
+        Evaluate a PEP 426 environment marker using markerlib.
+        Return a boolean indicating the marker result in this environment.
+        Raise SyntaxError if marker is invalid.
+        """
+        from pip._vendor import _markerlib
+        # markerlib implements Metadata 1.2 (PEP 345) environment markers.
+        # Translate the variables to Metadata 2.0 (PEP 426).
+        env = _markerlib.default_environment()
+        for key in env.keys():
+            new_key = key.replace('.', '_')
+            env[new_key] = env.pop(key)
+        try:
+            result = _markerlib.interpret(text, env)
+        except NameError as e:
+            raise SyntaxError(e.args[0])
+        return result
+
+    if 'parser' not in globals():
+        # Fall back to less-complete _markerlib implementation if 'parser' module
+        # is not available.
+        evaluate_marker = _markerlib_evaluate
+
+    @classmethod
+    def interpret(cls, nodelist):
+        while len(nodelist)==2: nodelist = nodelist[1]
+        try:
+            op = cls.get_op(nodelist[0])
+        except KeyError:
+            raise SyntaxError("Comparison or logical expression expected")
+        return op(nodelist)
+
+    @classmethod
+    def evaluate(cls, nodelist):
+        while len(nodelist)==2: nodelist = nodelist[1]
+        kind = nodelist[0]
+        name = nodelist[1]
+        if kind==token.NAME:
+            try:
+                op = cls.values[name]
+            except KeyError:
+                raise SyntaxError("Unknown name %r" % name)
+            return op()
+        if kind==token.STRING:
+            s = nodelist[1]
+            if not cls._safe_string(s):
+                raise SyntaxError(
+                    "Only plain strings allowed in environment markers")
+            return s[1:-1]
+        msg = "Language feature not supported in environment markers"
+        raise SyntaxError(msg)
+
+    @staticmethod
+    def _safe_string(cand):
+        return (
+            cand[:1] in "'\"" and
+            not cand.startswith('"""') and
+            not cand.startswith("'''") and
+            '\\' not in cand
+        )
+
+invalid_marker = MarkerEvaluation.is_invalid_marker
+evaluate_marker = MarkerEvaluation.evaluate_marker
+
+class NullProvider:
+    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+    egg_name = None
+    egg_info = None
+    loader = None
+
+    def __init__(self, module):
+        self.loader = getattr(module, '__loader__', None)
+        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+    def get_resource_filename(self, manager, resource_name):
+        return self._fn(self.module_path, resource_name)
+
+    def get_resource_stream(self, manager, resource_name):
+        return io.BytesIO(self.get_resource_string(manager, resource_name))
+
+    def get_resource_string(self, manager, resource_name):
+        return self._get(self._fn(self.module_path, resource_name))
+
+    def has_resource(self, resource_name):
+        return self._has(self._fn(self.module_path, resource_name))
+
+    def has_metadata(self, name):
+        return self.egg_info and self._has(self._fn(self.egg_info, name))
+
+    if sys.version_info <= (3,):
+        def get_metadata(self, name):
+            if not self.egg_info:
+                return ""
+            return self._get(self._fn(self.egg_info, name))
+    else:
+        def get_metadata(self, name):
+            if not self.egg_info:
+                return ""
+            return self._get(self._fn(self.egg_info, name)).decode("utf-8")
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+    def resource_isdir(self, resource_name):
+        return self._isdir(self._fn(self.module_path, resource_name))
+
+    def metadata_isdir(self, name):
+        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
+
+    def resource_listdir(self, resource_name):
+        return self._listdir(self._fn(self.module_path, resource_name))
+
+    def metadata_listdir(self, name):
+        if self.egg_info:
+            return self._listdir(self._fn(self.egg_info, name))
+        return []
+
+    def run_script(self, script_name, namespace):
+        script = 'scripts/'+script_name
+        if not self.has_metadata(script):
+            raise ResolutionError("No script named %r" % script_name)
+        script_text = self.get_metadata(script).replace('\r\n', '\n')
+        script_text = script_text.replace('\r', '\n')
+        script_filename = self._fn(self.egg_info, script)
+        namespace['__file__'] = script_filename
+        if os.path.exists(script_filename):
+            source = open(script_filename).read()
+            code = compile(source, script_filename, 'exec')
+            exec(code, namespace, namespace)
+        else:
+            from linecache import cache
+            cache[script_filename] = (
+                len(script_text), 0, script_text.split('\n'), script_filename
+            )
+            script_code = compile(script_text, script_filename,'exec')
+            exec(script_code, namespace, namespace)
+
+    def _has(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _isdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _listdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _fn(self, base, resource_name):
+        if resource_name:
+            return os.path.join(base, *resource_name.split('/'))
+        return base
+
+    def _get(self, path):
+        if hasattr(self.loader, 'get_data'):
+            return self.loader.get_data(path)
+        raise NotImplementedError(
+            "Can't perform this operation for loaders without 'get_data()'"
+        )
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+    """Provider based on a virtual filesystem"""
+
+    def __init__(self, module):
+        NullProvider.__init__(self, module)
+        self._setup_prefix()
+
+    def _setup_prefix(self):
+        # we assume here that our metadata may be nested inside a "basket"
+        # of multiple eggs; that's why we use module_path instead of .archive
+        path = self.module_path
+        old = None
+        while path!=old:
+            if path.lower().endswith('.egg'):
+                self.egg_name = os.path.basename(path)
+                self.egg_info = os.path.join(path, 'EGG-INFO')
+                self.egg_root = path
+                break
+            old = path
+            path, base = os.path.split(path)
+
+class DefaultProvider(EggProvider):
+    """Provides access to package resources in the filesystem"""
+
+    def _has(self, path):
+        return os.path.exists(path)
+
+    def _isdir(self, path):
+        return os.path.isdir(path)
+
+    def _listdir(self, path):
+        return os.listdir(path)
+
+    def get_resource_stream(self, manager, resource_name):
+        return open(self._fn(self.module_path, resource_name), 'rb')
+
+    def _get(self, path):
+        with open(path, 'rb') as stream:
+            return stream.read()
+
+register_loader_type(type(None), DefaultProvider)
+
+if importlib_machinery is not None:
+    register_loader_type(importlib_machinery.SourceFileLoader, DefaultProvider)
+
+
+class EmptyProvider(NullProvider):
+    """Provider that returns nothing for all requests"""
+
+    _isdir = _has = lambda self, path: False
+    _get = lambda self, path: ''
+    _listdir = lambda self, path: []
+    module_path = None
+
+    def __init__(self):
+        pass
+
+empty_provider = EmptyProvider()
+
+
+class ZipManifests(dict):
+    """
+    zip manifest builder
+    """
+
+    @classmethod
+    def build(cls, path):
+        """
+        Build a dictionary similar to the zipimport directory
+        caches, except instead of tuples, store ZipInfo objects.
+
+        Use a platform-specific path separator (os.sep) for the path keys
+        for compatibility with pypy on Windows.
+        """
+        with ContextualZipFile(path) as zfile:
+            items = (
+                (
+                    name.replace('/', os.sep),
+                    zfile.getinfo(name),
+                )
+                for name in zfile.namelist()
+            )
+            return dict(items)
+
+    load = build
+
+
+class MemoizedZipManifests(ZipManifests):
+    """
+    Memoized zipfile manifests.
+    """
+    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
+
+    def load(self, path):
+        """
+        Load a manifest at path or return a suitable manifest already loaded.
+        """
+        path = os.path.normpath(path)
+        mtime = os.stat(path).st_mtime
+
+        if path not in self or self[path].mtime != mtime:
+            manifest = self.build(path)
+            self[path] = self.manifest_mod(manifest, mtime)
+
+        return self[path].manifest
+
+
+class ContextualZipFile(zipfile.ZipFile):
+    """
+    Supplement ZipFile class to support context manager for Python 2.6
+    """
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, type, value, traceback):
+        self.close()
+
+    def __new__(cls, *args, **kwargs):
+        """
+        Construct a ZipFile or ContextualZipFile as appropriate
+        """
+        if hasattr(zipfile.ZipFile, '__exit__'):
+            return zipfile.ZipFile(*args, **kwargs)
+        return super(ContextualZipFile, cls).__new__(cls)
+
+
+class ZipProvider(EggProvider):
+    """Resource support for zips and eggs"""
+
+    eagers = None
+    _zip_manifests = MemoizedZipManifests()
+
+    def __init__(self, module):
+        EggProvider.__init__(self, module)
+        self.zip_pre = self.loader.archive+os.sep
+
+    def _zipinfo_name(self, fspath):
+        # Convert a virtual filename (full path to file) into a zipfile subpath
+        # usable with the zipimport directory cache for our target archive
+        if fspath.startswith(self.zip_pre):
+            return fspath[len(self.zip_pre):]
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath, self.zip_pre)
+        )
+
+    def _parts(self, zip_path):
+        # Convert a zipfile subpath into an egg-relative path part list.
+        # pseudo-fs path
+        fspath = self.zip_pre+zip_path
+        if fspath.startswith(self.egg_root+os.sep):
+            return fspath[len(self.egg_root)+1:].split(os.sep)
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath, self.egg_root)
+        )
+
+    @property
+    def zipinfo(self):
+        return self._zip_manifests.load(self.loader.archive)
+
+    def get_resource_filename(self, manager, resource_name):
+        if not self.egg_name:
+            raise NotImplementedError(
+                "resource_filename() only supported for .egg, not .zip"
+            )
+        # no need to lock for extraction, since we use temp names
+        zip_path = self._resource_to_zip(resource_name)
+        eagers = self._get_eager_resources()
+        if '/'.join(self._parts(zip_path)) in eagers:
+            for name in eagers:
+                self._extract_resource(manager, self._eager_to_zip(name))
+        return self._extract_resource(manager, zip_path)
+
+    @staticmethod
+    def _get_date_and_size(zip_stat):
+        size = zip_stat.file_size
+        # ymdhms+wday, yday, dst
+        date_time = zip_stat.date_time + (0, 0, -1)
+        # 1980 offset already done
+        timestamp = time.mktime(date_time)
+        return timestamp, size
+
+    def _extract_resource(self, manager, zip_path):
+
+        if zip_path in self._index():
+            for name in self._index()[zip_path]:
+                last = self._extract_resource(
+                    manager, os.path.join(zip_path, name)
+                )
+            # return the extracted directory name
+            return os.path.dirname(last)
+
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+
+        if not WRITE_SUPPORT:
+            raise IOError('"os.rename" and "os.unlink" are not supported '
+                          'on this platform')
+        try:
+
+            real_path = manager.get_cache_path(
+                self.egg_name, self._parts(zip_path)
+            )
+
+            if self._is_current(real_path, zip_path):
+                return real_path
+
+            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
+            os.write(outf, self.loader.get_data(zip_path))
+            os.close(outf)
+            utime(tmpnam, (timestamp, timestamp))
+            manager.postprocess(tmpnam, real_path)
+
+            try:
+                rename(tmpnam, real_path)
+
+            except os.error:
+                if os.path.isfile(real_path):
+                    if self._is_current(real_path, zip_path):
+                        # the file became current since it was checked above,
+                        #  so proceed.
+                        return real_path
+                    # Windows, del old file and retry
+                    elif os.name=='nt':
+                        unlink(real_path)
+                        rename(tmpnam, real_path)
+                        return real_path
+                raise
+
+        except os.error:
+            # report a user-friendly error
+            manager.extraction_error()
+
+        return real_path
+
+    def _is_current(self, file_path, zip_path):
+        """
+        Return True if the file_path is current for this zip_path
+        """
+        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
+        if not os.path.isfile(file_path):
+            return False
+        stat = os.stat(file_path)
+        if stat.st_size!=size or stat.st_mtime!=timestamp:
+            return False
+        # check that the contents match
+        zip_contents = self.loader.get_data(zip_path)
+        with open(file_path, 'rb') as f:
+            file_contents = f.read()
+        return zip_contents == file_contents
+
+    def _get_eager_resources(self):
+        if self.eagers is None:
+            eagers = []
+            for name in ('native_libs.txt', 'eager_resources.txt'):
+                if self.has_metadata(name):
+                    eagers.extend(self.get_metadata_lines(name))
+            self.eagers = eagers
+        return self.eagers
+
+    def _index(self):
+        try:
+            return self._dirindex
+        except AttributeError:
+            ind = {}
+            for path in self.zipinfo:
+                parts = path.split(os.sep)
+                while parts:
+                    parent = os.sep.join(parts[:-1])
+                    if parent in ind:
+                        ind[parent].append(parts[-1])
+                        break
+                    else:
+                        ind[parent] = [parts.pop()]
+            self._dirindex = ind
+            return ind
+
+    def _has(self, fspath):
+        zip_path = self._zipinfo_name(fspath)
+        return zip_path in self.zipinfo or zip_path in self._index()
+
+    def _isdir(self, fspath):
+        return self._zipinfo_name(fspath) in self._index()
+
+    def _listdir(self, fspath):
+        return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+    def _eager_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
+
+    def _resource_to_zip(self, resource_name):
+        return self._zipinfo_name(self._fn(self.module_path, resource_name))
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+class FileMetadata(EmptyProvider):
+    """Metadata handler for standalone PKG-INFO files
+
+    Usage::
+
+        metadata = FileMetadata("/path/to/PKG-INFO")
+
+    This provider rejects all data and metadata requests except for PKG-INFO,
+    which is treated as existing, and will be the contents of the file at
+    the provided location.
+    """
+
+    def __init__(self, path):
+        self.path = path
+
+    def has_metadata(self, name):
+        return name=='PKG-INFO'
+
+    def get_metadata(self, name):
+        if name=='PKG-INFO':
+            with open(self.path,'rU') as f:
+                metadata = f.read()
+            return metadata
+        raise KeyError("No metadata except PKG-INFO is available")
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+
+class PathMetadata(DefaultProvider):
+    """Metadata provider for egg directories
+
+    Usage::
+
+        # Development eggs:
+
+        egg_info = "/path/to/PackageName.egg-info"
+        base_dir = os.path.dirname(egg_info)
+        metadata = PathMetadata(base_dir, egg_info)
+        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
+
+        # Unpacked egg directories:
+
+        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+        dist = Distribution.from_filename(egg_path, metadata=metadata)
+    """
+
+    def __init__(self, path, egg_info):
+        self.module_path = path
+        self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+    """Metadata provider for .egg files"""
+
+    def __init__(self, importer):
+        """Create a metadata provider from a zipimporter"""
+
+        self.zip_pre = importer.archive+os.sep
+        self.loader = importer
+        if importer.prefix:
+            self.module_path = os.path.join(importer.archive, importer.prefix)
+        else:
+            self.module_path = importer.archive
+        self._setup_prefix()
+
+_declare_state('dict', _distribution_finders = {})
+
+def register_finder(importer_type, distribution_finder):
+    """Register `distribution_finder` to find distributions in sys.path items
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `distribution_finder` is a callable that, passed a path
+    item and the importer instance, yields ``Distribution`` instances found on
+    that path item.  See ``pkg_resources.find_on_path`` for an example."""
+    _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+    """Yield distributions accessible via `path_item`"""
+    importer = get_importer(path_item)
+    finder = _find_adapter(_distribution_finders, importer)
+    return finder(importer, path_item, only)
+
+def find_eggs_in_zip(importer, path_item, only=False):
+    """
+    Find eggs in zip files; possibly multiple nested eggs.
+    """
+    if importer.archive.endswith('.whl'):
+        # wheels are not supported with this finder
+        # they don't have PKG-INFO metadata, and won't ever contain eggs
+        return
+    metadata = EggMetadata(importer)
+    if metadata.has_metadata('PKG-INFO'):
+        yield Distribution.from_filename(path_item, metadata=metadata)
+    if only:
+        # don't yield nested distros
+        return
+    for subitem in metadata.resource_listdir('/'):
+        if subitem.endswith('.egg'):
+            subpath = os.path.join(path_item, subitem)
+            for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
+                yield dist
+
+register_finder(zipimport.zipimporter, find_eggs_in_zip)
+
+def find_nothing(importer, path_item, only=False):
+    return ()
+register_finder(object, find_nothing)
+
+def find_on_path(importer, path_item, only=False):
+    """Yield distributions accessible on a sys.path directory"""
+    path_item = _normalize_cached(path_item)
+
+    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
+        if path_item.lower().endswith('.egg'):
+            # unpacked egg
+            yield Distribution.from_filename(
+                path_item, metadata=PathMetadata(
+                    path_item, os.path.join(path_item,'EGG-INFO')
+                )
+            )
+        else:
+            # scan for .egg and .egg-info in directory
+            for entry in os.listdir(path_item):
+                lower = entry.lower()
+                if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
+                    fullpath = os.path.join(path_item, entry)
+                    if os.path.isdir(fullpath):
+                        # egg-info directory, allow getting metadata
+                        metadata = PathMetadata(path_item, fullpath)
+                    else:
+                        metadata = FileMetadata(fullpath)
+                    yield Distribution.from_location(
+                        path_item, entry, metadata, precedence=DEVELOP_DIST
+                    )
+                elif not only and lower.endswith('.egg'):
+                    dists = find_distributions(os.path.join(path_item, entry))
+                    for dist in dists:
+                        yield dist
+                elif not only and lower.endswith('.egg-link'):
+                    with open(os.path.join(path_item, entry)) as entry_file:
+                        entry_lines = entry_file.readlines()
+                    for line in entry_lines:
+                        if not line.strip():
+                            continue
+                        path = os.path.join(path_item, line.rstrip())
+                        dists = find_distributions(path)
+                        for item in dists:
+                            yield item
+                        break
+register_finder(pkgutil.ImpImporter, find_on_path)
+
+if importlib_machinery is not None:
+    register_finder(importlib_machinery.FileFinder, find_on_path)
+
+_declare_state('dict', _namespace_handlers={})
+_declare_state('dict', _namespace_packages={})
+
+
+def register_namespace_handler(importer_type, namespace_handler):
+    """Register `namespace_handler` to declare namespace packages
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `namespace_handler` is a callable like this::
+
+        def namespace_handler(importer, path_entry, moduleName, module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the importer object has already
+    agreed that it can handle the relevant path item, and they should only
+    return a subpath if the module __path__ does not already contain an
+    equivalent subpath.  For an example namespace handler, see
+    ``pkg_resources.file_ns_handler``.
+    """
+    _namespace_handlers[importer_type] = namespace_handler
+
+def _handle_ns(packageName, path_item):
+    """Ensure that named package includes a subpath of path_item (if needed)"""
+
+    importer = get_importer(path_item)
+    if importer is None:
+        return None
+    loader = importer.find_module(packageName)
+    if loader is None:
+        return None
+    module = sys.modules.get(packageName)
+    if module is None:
+        module = sys.modules[packageName] = types.ModuleType(packageName)
+        module.__path__ = []
+        _set_parent_ns(packageName)
+    elif not hasattr(module,'__path__'):
+        raise TypeError("Not a package:", packageName)
+    handler = _find_adapter(_namespace_handlers, importer)
+    subpath = handler(importer, path_item, packageName, module)
+    if subpath is not None:
+        path = module.__path__
+        path.append(subpath)
+        loader.load_module(packageName)
+        for path_item in path:
+            if path_item not in module.__path__:
+                module.__path__.append(path_item)
+    return subpath
+
+def declare_namespace(packageName):
+    """Declare that package 'packageName' is a namespace package"""
+
+    _imp.acquire_lock()
+    try:
+        if packageName in _namespace_packages:
+            return
+
+        path, parent = sys.path, None
+        if '.' in packageName:
+            parent = '.'.join(packageName.split('.')[:-1])
+            declare_namespace(parent)
+            if parent not in _namespace_packages:
+                __import__(parent)
+            try:
+                path = sys.modules[parent].__path__
+            except AttributeError:
+                raise TypeError("Not a package:", parent)
+
+        # Track what packages are namespaces, so when new path items are added,
+        # they can be updated
+        _namespace_packages.setdefault(parent,[]).append(packageName)
+        _namespace_packages.setdefault(packageName,[])
+
+        for path_item in path:
+            # Ensure all the parent's path items are reflected in the child,
+            # if they apply
+            _handle_ns(packageName, path_item)
+
+    finally:
+        _imp.release_lock()
+
+def fixup_namespace_packages(path_item, parent=None):
+    """Ensure that previously-declared namespace packages include path_item"""
+    _imp.acquire_lock()
+    try:
+        for package in _namespace_packages.get(parent,()):
+            subpath = _handle_ns(package, path_item)
+            if subpath:
+                fixup_namespace_packages(subpath, package)
+    finally:
+        _imp.release_lock()
+
+def file_ns_handler(importer, path_item, packageName, module):
+    """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+    subpath = os.path.join(path_item, packageName.split('.')[-1])
+    normalized = _normalize_cached(subpath)
+    for item in module.__path__:
+        if _normalize_cached(item)==normalized:
+            break
+    else:
+        # Only return the path if it's not already there
+        return subpath
+
+register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+
+if importlib_machinery is not None:
+    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+    return None
+
+register_namespace_handler(object, null_ns_handler)
+
+
+def normalize_path(filename):
+    """Normalize a file/dir name for comparison purposes"""
+    return os.path.normcase(os.path.realpath(filename))
+
+def _normalize_cached(filename, _cache={}):
+    try:
+        return _cache[filename]
+    except KeyError:
+        _cache[filename] = result = normalize_path(filename)
+        return result
+
+def _set_parent_ns(packageName):
+    parts = packageName.split('.')
+    name = parts.pop()
+    if parts:
+        parent = '.'.join(parts)
+        setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+    """Yield non-empty/non-comment lines of a string or sequence"""
+    if isinstance(strs, string_types):
+        for s in strs.splitlines():
+            s = s.strip()
+            # skip blank lines/comments
+            if s and not s.startswith('#'):
+                yield s
+    else:
+        for ss in strs:
+            for s in yield_lines(ss):
+                yield s
+
+# whitespace and comment
+LINE_END = re.compile(r"\s*(#.*)?$").match
+# line continuation
+CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match
+# Distribution or extra
+DISTRO = re.compile(r"\s*((\w|[-.])+)").match
+# ver. info
+VERSION = re.compile(r"\s*(<=?|>=?|===?|!=|~=)\s*((\w|[-.*_!+])+)").match
+# comma between items
+COMMA = re.compile(r"\s*,").match
+OBRACKET = re.compile(r"\s*\[").match
+CBRACKET = re.compile(r"\s*\]").match
+MODULE = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+    r"""
+    (?P<name>[^-]+) (
+        -(?P<ver>[^-]+) (
+            -py(?P<pyver>[^-]+) (
+                -(?P<plat>.+)
+            )?
+        )?
+    )?
+    """,
+    re.VERBOSE | re.IGNORECASE,
+).match
+
+
+class EntryPoint(object):
+    """Object representing an advertised importable object"""
+
+    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+        if not MODULE(module_name):
+            raise ValueError("Invalid module name", module_name)
+        self.name = name
+        self.module_name = module_name
+        self.attrs = tuple(attrs)
+        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
+        self.dist = dist
+
+    def __str__(self):
+        s = "%s = %s" % (self.name, self.module_name)
+        if self.attrs:
+            s += ':' + '.'.join(self.attrs)
+        if self.extras:
+            s += ' [%s]' % ','.join(self.extras)
+        return s
+
+    def __repr__(self):
+        return "EntryPoint.parse(%r)" % str(self)
+
+    def load(self, require=True, *args, **kwargs):
+        """
+        Require packages for this EntryPoint, then resolve it.
+        """
+        if not require or args or kwargs:
+            warnings.warn(
+                "Parameters to load are deprecated.  Call .resolve and "
+                ".require separately.",
+                DeprecationWarning,
+                stacklevel=2,
+            )
+        if require:
+            self.require(*args, **kwargs)
+        return self.resolve()
+
+    def resolve(self):
+        """
+        Resolve the entry point from its module and attrs.
+        """
+        module = __import__(self.module_name, fromlist=['__name__'], level=0)
+        try:
+            return functools.reduce(getattr, self.attrs, module)
+        except AttributeError as exc:
+            raise ImportError(str(exc))
+
+    def require(self, env=None, installer=None):
+        if self.extras and not self.dist:
+            raise UnknownExtra("Can't require() without a distribution", self)
+        reqs = self.dist.requires(self.extras)
+        items = working_set.resolve(reqs, env, installer)
+        list(map(working_set.add, items))
+
+    pattern = re.compile(
+        r'\s*'
+        r'(?P<name>.+?)\s*'
+        r'=\s*'
+        r'(?P<module>[\w.]+)\s*'
+        r'(:\s*(?P<attr>[\w.]+))?\s*'
+        r'(?P<extras>\[.*\])?\s*$'
+    )
+
+    @classmethod
+    def parse(cls, src, dist=None):
+        """Parse a single entry point from string `src`
+
+        Entry point syntax follows the form::
+
+            name = some.module:some.attr [extra1, extra2]
+
+        The entry name and module name are required, but the ``:attrs`` and
+        ``[extras]`` parts are optional
+        """
+        m = cls.pattern.match(src)
+        if not m:
+            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
+            raise ValueError(msg, src)
+        res = m.groupdict()
+        extras = cls._parse_extras(res['extras'])
+        attrs = res['attr'].split('.') if res['attr'] else ()
+        return cls(res['name'], res['module'], attrs, extras, dist)
+
+    @classmethod
+    def _parse_extras(cls, extras_spec):
+        if not extras_spec:
+            return ()
+        req = Requirement.parse('x' + extras_spec)
+        if req.specs:
+            raise ValueError()
+        return req.extras
+
+    @classmethod
+    def parse_group(cls, group, lines, dist=None):
+        """Parse an entry point group"""
+        if not MODULE(group):
+            raise ValueError("Invalid group name", group)
+        this = {}
+        for line in yield_lines(lines):
+            ep = cls.parse(line, dist)
+            if ep.name in this:
+                raise ValueError("Duplicate entry point", group, ep.name)
+            this[ep.name]=ep
+        return this
+
+    @classmethod
+    def parse_map(cls, data, dist=None):
+        """Parse a map of entry point groups"""
+        if isinstance(data, dict):
+            data = data.items()
+        else:
+            data = split_sections(data)
+        maps = {}
+        for group, lines in data:
+            if group is None:
+                if not lines:
+                    continue
+                raise ValueError("Entry points must be listed in groups")
+            group = group.strip()
+            if group in maps:
+                raise ValueError("Duplicate group name", group)
+            maps[group] = cls.parse_group(group, lines, dist)
+        return maps
+
+
+def _remove_md5_fragment(location):
+    if not location:
+        return ''
+    parsed = urlparse(location)
+    if parsed[-1].startswith('md5='):
+        return urlunparse(parsed[:-1] + ('',))
+    return location
+
+
+class Distribution(object):
+    """Wrap an actual or potential sys.path entry w/metadata"""
+    PKG_INFO = 'PKG-INFO'
+
+    def __init__(self, location=None, metadata=None, project_name=None,
+            version=None, py_version=PY_MAJOR, platform=None,
+            precedence=EGG_DIST):
+        self.project_name = safe_name(project_name or 'Unknown')
+        if version is not None:
+            self._version = safe_version(version)
+        self.py_version = py_version
+        self.platform = platform
+        self.location = location
+        self.precedence = precedence
+        self._provider = metadata or empty_provider
+
+    @classmethod
+    def from_location(cls, location, basename, metadata=None,**kw):
+        project_name, version, py_version, platform = [None]*4
+        basename, ext = os.path.splitext(basename)
+        if ext.lower() in _distributionImpl:
+            # .dist-info gets much metadata differently
+            match = EGG_NAME(basename)
+            if match:
+                project_name, version, py_version, platform = match.group(
+                    'name','ver','pyver','plat'
+                )
+            cls = _distributionImpl[ext.lower()]
+        return cls(
+            location, metadata, project_name=project_name, version=version,
+            py_version=py_version, platform=platform, **kw
+        )
+
+    @property
+    def hashcmp(self):
+        return (
+            self.parsed_version,
+            self.precedence,
+            self.key,
+            _remove_md5_fragment(self.location),
+            self.py_version or '',
+            self.platform or '',
+        )
+
+    def __hash__(self):
+        return hash(self.hashcmp)
+
+    def __lt__(self, other):
+        return self.hashcmp < other.hashcmp
+
+    def __le__(self, other):
+        return self.hashcmp <= other.hashcmp
+
+    def __gt__(self, other):
+        return self.hashcmp > other.hashcmp
+
+    def __ge__(self, other):
+        return self.hashcmp >= other.hashcmp
+
+    def __eq__(self, other):
+        if not isinstance(other, self.__class__):
+            # It's not a Distribution, so they are not equal
+            return False
+        return self.hashcmp == other.hashcmp
+
+    def __ne__(self, other):
+        return not self == other
+
+    # These properties have to be lazy so that we don't have to load any
+    # metadata until/unless it's actually needed.  (i.e., some distributions
+    # may not know their name or version without loading PKG-INFO)
+
+    @property
+    def key(self):
+        try:
+            return self._key
+        except AttributeError:
+            self._key = key = self.project_name.lower()
+            return key
+
+    @property
+    def parsed_version(self):
+        if not hasattr(self, "_parsed_version"):
+            self._parsed_version = parse_version(self.version)
+
+        return self._parsed_version
+
+    def _warn_legacy_version(self):
+        LV = packaging.version.LegacyVersion
+        is_legacy = isinstance(self._parsed_version, LV)
+        if not is_legacy:
+            return
+
+        # While an empty version is technically a legacy version and
+        # is not a valid PEP 440 version, it's also unlikely to
+        # actually come from someone and instead it is more likely that
+        # it comes from setuptools attempting to parse a filename and
+        # including it in the list. So for that we'll gate this warning
+        # on if the version is anything at all or not.
+        if not self.version:
+            return
+
+        tmpl = textwrap.dedent("""
+            '{project_name} ({version})' is being parsed as a legacy,
+            non PEP 440,
+            version. You may find odd behavior and sort order.
+            In particular it will be sorted as less than 0.0. It
+            is recommended to migrate to PEP 440 compatible
+            versions.
+            """).strip().replace('\n', ' ')
+
+        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
+
+    @property
+    def version(self):
+        try:
+            return self._version
+        except AttributeError:
+            for line in self._get_metadata(self.PKG_INFO):
+                if line.lower().startswith('version:'):
+                    self._version = safe_version(line.split(':',1)[1].strip())
+                    return self._version
+            else:
+                tmpl = "Missing 'Version:' header and/or %s file"
+                raise ValueError(tmpl % self.PKG_INFO, self)
+
+    @property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            dm = self.__dep_map = {None: []}
+            for name in 'requires.txt', 'depends.txt':
+                for extra, reqs in split_sections(self._get_metadata(name)):
+                    if extra:
+                        if ':' in extra:
+                            extra, marker = extra.split(':', 1)
+                            if invalid_marker(marker):
+                                # XXX warn
+                                reqs=[]
+                            elif not evaluate_marker(marker):
+                                reqs=[]
+                        extra = safe_extra(extra) or None
+                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
+            return dm
+
+    def requires(self, extras=()):
+        """List of Requirements needed for this distro if `extras` are used"""
+        dm = self._dep_map
+        deps = []
+        deps.extend(dm.get(None, ()))
+        for ext in extras:
+            try:
+                deps.extend(dm[safe_extra(ext)])
+            except KeyError:
+                raise UnknownExtra(
+                    "%s has no such extra feature %r" % (self, ext)
+                )
+        return deps
+
+    def _get_metadata(self, name):
+        if self.has_metadata(name):
+            for line in self.get_metadata_lines(name):
+                yield line
+
+    def activate(self, path=None):
+        """Ensure distribution is importable on `path` (default=sys.path)"""
+        if path is None:
+            path = sys.path
+        self.insert_on(path)
+        if path is sys.path:
+            fixup_namespace_packages(self.location)
+            for pkg in self._get_metadata('namespace_packages.txt'):
+                if pkg in sys.modules:
+                    declare_namespace(pkg)
+
+    def egg_name(self):
+        """Return what this distribution's standard .egg filename should be"""
+        filename = "%s-%s-py%s" % (
+            to_filename(self.project_name), to_filename(self.version),
+            self.py_version or PY_MAJOR
+        )
+
+        if self.platform:
+            filename += '-' + self.platform
+        return filename
+
+    def __repr__(self):
+        if self.location:
+            return "%s (%s)" % (self, self.location)
+        else:
+            return str(self)
+
+    def __str__(self):
+        try:
+            version = getattr(self, 'version', None)
+        except ValueError:
+            version = None
+        version = version or "[unknown version]"
+        return "%s %s" % (self.project_name, version)
+
+    def __getattr__(self, attr):
+        """Delegate all unrecognized public attributes to .metadata provider"""
+        if attr.startswith('_'):
+            raise AttributeError(attr)
+        return getattr(self._provider, attr)
+
+    @classmethod
+    def from_filename(cls, filename, metadata=None, **kw):
+        return cls.from_location(
+            _normalize_cached(filename), os.path.basename(filename), metadata,
+            **kw
+        )
+
+    def as_requirement(self):
+        """Return a ``Requirement`` that matches this distribution exactly"""
+        if isinstance(self.parsed_version, packaging.version.Version):
+            spec = "%s==%s" % (self.project_name, self.parsed_version)
+        else:
+            spec = "%s===%s" % (self.project_name, self.parsed_version)
+
+        return Requirement.parse(spec)
+
+    def load_entry_point(self, group, name):
+        """Return the `name` entry point of `group` or raise ImportError"""
+        ep = self.get_entry_info(group, name)
+        if ep is None:
+            raise ImportError("Entry point %r not found" % ((group, name),))
+        return ep.load()
+
+    def get_entry_map(self, group=None):
+        """Return the entry point map for `group`, or the full entry map"""
+        try:
+            ep_map = self._ep_map
+        except AttributeError:
+            ep_map = self._ep_map = EntryPoint.parse_map(
+                self._get_metadata('entry_points.txt'), self
+            )
+        if group is not None:
+            return ep_map.get(group,{})
+        return ep_map
+
+    def get_entry_info(self, group, name):
+        """Return the EntryPoint object for `group`+`name`, or ``None``"""
+        return self.get_entry_map(group).get(name)
+
+    def insert_on(self, path, loc = None):
+        """Insert self.location in path before its nearest parent directory"""
+
+        loc = loc or self.location
+        if not loc:
+            return
+
+        nloc = _normalize_cached(loc)
+        bdir = os.path.dirname(nloc)
+        npath= [(p and _normalize_cached(p) or p) for p in path]
+
+        for p, item in enumerate(npath):
+            if item == nloc:
+                break
+            elif item == bdir and self.precedence == EGG_DIST:
+                # if it's an .egg, give it precedence over its directory
+                if path is sys.path:
+                    self.check_version_conflict()
+                path.insert(p, loc)
+                npath.insert(p, nloc)
+                break
+        else:
+            if path is sys.path:
+                self.check_version_conflict()
+            path.append(loc)
+            return
+
+        # p is the spot where we found or inserted loc; now remove duplicates
+        while True:
+            try:
+                np = npath.index(nloc, p+1)
+            except ValueError:
+                break
+            else:
+                del npath[np], path[np]
+                # ha!
+                p = np
+
+        return
+
+    def check_version_conflict(self):
+        if self.key == 'setuptools':
+            # ignore the inevitable setuptools self-conflicts  :(
+            return
+
+        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+        loc = normalize_path(self.location)
+        for modname in self._get_metadata('top_level.txt'):
+            if (modname not in sys.modules or modname in nsp
+                    or modname in _namespace_packages):
+                continue
+            if modname in ('pkg_resources', 'setuptools', 'site'):
+                continue
+            fn = getattr(sys.modules[modname], '__file__', None)
+            if fn and (normalize_path(fn).startswith(loc) or
+                       fn.startswith(self.location)):
+                continue
+            issue_warning(
+                "Module %s was already imported from %s, but %s is being added"
+                " to sys.path" % (modname, fn, self.location),
+            )
+
+    def has_version(self):
+        try:
+            self.version
+        except ValueError:
+            issue_warning("Unbuilt egg for " + repr(self))
+            return False
+        return True
+
+    def clone(self,**kw):
+        """Copy this distribution, substituting in any changed keyword args"""
+        names = 'project_name version py_version platform location precedence'
+        for attr in names.split():
+            kw.setdefault(attr, getattr(self, attr, None))
+        kw.setdefault('metadata', self._provider)
+        return self.__class__(**kw)
+
+    @property
+    def extras(self):
+        return [dep for dep in self._dep_map if dep]
+
+
+class DistInfoDistribution(Distribution):
+    """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
+    PKG_INFO = 'METADATA'
+    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
+
+    @property
+    def _parsed_pkg_info(self):
+        """Parse and cache metadata"""
+        try:
+            return self._pkg_info
+        except AttributeError:
+            metadata = self.get_metadata(self.PKG_INFO)
+            self._pkg_info = email.parser.Parser().parsestr(metadata)
+            return self._pkg_info
+
+    @property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            self.__dep_map = self._compute_dependencies()
+            return self.__dep_map
+
+    def _preparse_requirement(self, requires_dist):
+        """Convert 'Foobar (1); baz' to ('Foobar ==1', 'baz')
+        Split environment marker, add == prefix to version specifiers as
+        necessary, and remove parenthesis.
+        """
+        parts = requires_dist.split(';', 1) + ['']
+        distvers = parts[0].strip()
+        mark = parts[1].strip()
+        distvers = re.sub(self.EQEQ, r"\1==\2\3", distvers)
+        distvers = distvers.replace('(', '').replace(')', '')
+        return (distvers, mark)
+
+    def _compute_dependencies(self):
+        """Recompute this distribution's dependencies."""
+        from pip._vendor._markerlib import compile as compile_marker
+        dm = self.__dep_map = {None: []}
+
+        reqs = []
+        # Including any condition expressions
+        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
+            distvers, mark = self._preparse_requirement(req)
+            parsed = next(parse_requirements(distvers))
+            parsed.marker_fn = compile_marker(mark)
+            reqs.append(parsed)
+
+        def reqs_for_extra(extra):
+            for req in reqs:
+                if req.marker_fn(override={'extra':extra}):
+                    yield req
+
+        common = frozenset(reqs_for_extra(None))
+        dm[None].extend(common)
+
+        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
+            extra = safe_extra(extra.strip())
+            dm[extra] = list(frozenset(reqs_for_extra(extra)) - common)
+
+        return dm
+
+
+_distributionImpl = {
+    '.egg': Distribution,
+    '.egg-info': Distribution,
+    '.dist-info': DistInfoDistribution,
+    }
+
+
+def issue_warning(*args,**kw):
+    level = 1
+    g = globals()
+    try:
+        # find the first stack frame that is *not* code in
+        # the pkg_resources module, to use for the warning
+        while sys._getframe(level).f_globals is g:
+            level += 1
+    except ValueError:
+        pass
+    warnings.warn(stacklevel=level + 1, *args, **kw)
+
+
+class RequirementParseError(ValueError):
+    def __str__(self):
+        return ' '.join(self.args)
+
+
+def parse_requirements(strs):
+    """Yield ``Requirement`` objects for each specification in `strs`
+
+    `strs` must be a string, or a (possibly-nested) iterable thereof.
+    """
+    # create a steppable iterator, so we can handle \-continuations
+    lines = iter(yield_lines(strs))
+
+    def scan_list(ITEM, TERMINATOR, line, p, groups, item_name):
+
+        items = []
+
+        while not TERMINATOR(line, p):
+            if CONTINUE(line, p):
+                try:
+                    line = next(lines)
+                    p = 0
+                except StopIteration:
+                    msg = "\\ must not appear on the last nonblank line"
+                    raise RequirementParseError(msg)
+
+            match = ITEM(line, p)
+            if not match:
+                msg = "Expected " + item_name + " in"
+                raise RequirementParseError(msg, line, "at", line[p:])
+
+            items.append(match.group(*groups))
+            p = match.end()
+
+            match = COMMA(line, p)
+            if match:
+                # skip the comma
+                p = match.end()
+            elif not TERMINATOR(line, p):
+                msg = "Expected ',' or end-of-list in"
+                raise RequirementParseError(msg, line, "at", line[p:])
+
+        match = TERMINATOR(line, p)
+        # skip the terminator, if any
+        if match:
+            p = match.end()
+        return line, p, items
+
+    for line in lines:
+        match = DISTRO(line)
+        if not match:
+            raise RequirementParseError("Missing distribution spec", line)
+        project_name = match.group(1)
+        p = match.end()
+        extras = []
+
+        match = OBRACKET(line, p)
+        if match:
+            p = match.end()
+            line, p, extras = scan_list(
+                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
+            )
+
+        line, p, specs = scan_list(VERSION, LINE_END, line, p, (1, 2),
+            "version spec")
+        specs = [(op, val) for op, val in specs]
+        yield Requirement(project_name, specs, extras)
+
+
+class Requirement:
+    def __init__(self, project_name, specs, extras):
+        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+        self.unsafe_name, project_name = project_name, safe_name(project_name)
+        self.project_name, self.key = project_name, project_name.lower()
+        self.specifier = packaging.specifiers.SpecifierSet(
+            ",".join(["".join([x, y]) for x, y in specs])
+        )
+        self.specs = specs
+        self.extras = tuple(map(safe_extra, extras))
+        self.hashCmp = (
+            self.key,
+            self.specifier,
+            frozenset(self.extras),
+        )
+        self.__hash = hash(self.hashCmp)
+
+    def __str__(self):
+        extras = ','.join(self.extras)
+        if extras:
+            extras = '[%s]' % extras
+        return '%s%s%s' % (self.project_name, extras, self.specifier)
+
+    def __eq__(self, other):
+        return (
+            isinstance(other, Requirement) and
+            self.hashCmp == other.hashCmp
+        )
+
+    def __ne__(self, other):
+        return not self == other
+
+    def __contains__(self, item):
+        if isinstance(item, Distribution):
+            if item.key != self.key:
+                return False
+
+            item = item.version
+
+        # Allow prereleases always in order to match the previous behavior of
+        # this method. In the future this should be smarter and follow PEP 440
+        # more accurately.
+        return self.specifier.contains(item, prereleases=True)
+
+    def __hash__(self):
+        return self.__hash
+
+    def __repr__(self): return "Requirement.parse(%r)" % str(self)
+
+    @staticmethod
+    def parse(s):
+        reqs = list(parse_requirements(s))
+        if reqs:
+            if len(reqs) == 1:
+                return reqs[0]
+            raise ValueError("Expected only one requirement", s)
+        raise ValueError("No requirements found", s)
+
+
+def _get_mro(cls):
+    """Get an mro for a type or classic class"""
+    if not isinstance(cls, type):
+        class cls(cls, object): pass
+        return cls.__mro__[1:]
+    return cls.__mro__
+
+def _find_adapter(registry, ob):
+    """Return an adapter factory for `ob` from `registry`"""
+    for t in _get_mro(getattr(ob, '__class__', type(ob))):
+        if t in registry:
+            return registry[t]
+
+
+def ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+
+def _bypass_ensure_directory(path):
+    """Sandbox-bypassing version of ensure_directory()"""
+    if not WRITE_SUPPORT:
+        raise IOError('"os.mkdir" not supported on this platform.')
+    dirname, filename = split(path)
+    if dirname and filename and not isdir(dirname):
+        _bypass_ensure_directory(dirname)
+        mkdir(dirname, 0o755)
+
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section, content) pairs
+
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+def _mkstemp(*args,**kw):
+    old_open = os.open
+    try:
+        # temporarily bypass sandboxing
+        os.open = os_open
+        return tempfile.mkstemp(*args,**kw)
+    finally:
+        # and then put it back
+        os.open = old_open
+
+
+# Silence the PEP440Warning by default, so that end users don't get hit by it
+# randomly just because they use pkg_resources. We want to append the rule
+# because we want earlier uses of filterwarnings to take precedence over this
+# one.
+warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
+
+
+# from jaraco.functools 1.3
+def _call_aside(f, *args, **kwargs):
+    f(*args, **kwargs)
+    return f
+
+
+@_call_aside
+def _initialize(g=globals()):
+    "Set up global resource manager (deliberately not state-saved)"
+    manager = ResourceManager()
+    g['_manager'] = manager
+    for name in dir(manager):
+        if not name.startswith('_'):
+            g[name] = getattr(manager, name)
+
+
+@_call_aside
+def _initialize_master_working_set():
+    """
+    Prepare the master working set and make the ``require()``
+    API available.
+
+    This function has explicit effects on the global state
+    of pkg_resources. It is intended to be invoked once at
+    the initialization of this module.
+
+    Invocation by other packages is unsupported and done
+    at their own risk.
+    """
+    working_set = WorkingSet._build_master()
+    _declare_state('object', working_set=working_set)
+
+    require = working_set.require
+    iter_entry_points = working_set.iter_entry_points
+    add_activation_listener = working_set.subscribe
+    run_script = working_set.run_script
+    # backward compatibility
+    run_main = run_script
+    # Activate all distributions already on sys.path, and ensure that
+    # all distributions added to the working set in the future (e.g. by
+    # calling ``require()``) will get activated as well.
+    add_activation_listener(lambda dist: dist.activate())
+    working_set.entries=[]
+    # match order
+    list(map(working_set.add_entry, sys.path))
+    globals().update(locals())

BIN
venv/lib/python2.7/site-packages/pip/_vendor/pkg_resources/__init__.pyc


+ 123 - 0
venv/lib/python2.7/site-packages/pip/_vendor/progress/__init__.py

@@ -0,0 +1,123 @@
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import division
+
+from collections import deque
+from datetime import timedelta
+from math import ceil
+from sys import stderr
+from time import time
+
+
+__version__ = '1.2'
+
+
+class Infinite(object):
+    file = stderr
+    sma_window = 10
+
+    def __init__(self, *args, **kwargs):
+        self.index = 0
+        self.start_ts = time()
+        self._ts = self.start_ts
+        self._dt = deque(maxlen=self.sma_window)
+        for key, val in kwargs.items():
+            setattr(self, key, val)
+
+    def __getitem__(self, key):
+        if key.startswith('_'):
+            return None
+        return getattr(self, key, None)
+
+    @property
+    def avg(self):
+        return sum(self._dt) / len(self._dt) if self._dt else 0
+
+    @property
+    def elapsed(self):
+        return int(time() - self.start_ts)
+
+    @property
+    def elapsed_td(self):
+        return timedelta(seconds=self.elapsed)
+
+    def update(self):
+        pass
+
+    def start(self):
+        pass
+
+    def finish(self):
+        pass
+
+    def next(self, n=1):
+        if n > 0:
+            now = time()
+            dt = (now - self._ts) / n
+            self._dt.append(dt)
+            self._ts = now
+
+        self.index = self.index + n
+        self.update()
+
+    def iter(self, it):
+        for x in it:
+            yield x
+            self.next()
+        self.finish()
+
+
+class Progress(Infinite):
+    def __init__(self, *args, **kwargs):
+        super(Progress, self).__init__(*args, **kwargs)
+        self.max = kwargs.get('max', 100)
+
+    @property
+    def eta(self):
+        return int(ceil(self.avg * self.remaining))
+
+    @property
+    def eta_td(self):
+        return timedelta(seconds=self.eta)
+
+    @property
+    def percent(self):
+        return self.progress * 100
+
+    @property
+    def progress(self):
+        return min(1, self.index / self.max)
+
+    @property
+    def remaining(self):
+        return max(self.max - self.index, 0)
+
+    def start(self):
+        self.update()
+
+    def goto(self, index):
+        incr = index - self.index
+        self.next(incr)
+
+    def iter(self, it):
+        try:
+            self.max = len(it)
+        except TypeError:
+            pass
+
+        for x in it:
+            yield x
+            self.next()
+        self.finish()

BIN
venv/lib/python2.7/site-packages/pip/_vendor/progress/__init__.pyc


+ 86 - 0
venv/lib/python2.7/site-packages/pip/_vendor/progress/bar.py

@@ -0,0 +1,86 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import unicode_literals
+
+
+from . import Progress
+from .helpers import WritelnMixin
+
+
+class Bar(WritelnMixin, Progress):
+    width = 32
+    message = ''
+    suffix = '%(index)d/%(max)d'
+    bar_prefix = ' |'
+    bar_suffix = '| '
+    empty_fill = ' '
+    fill = '#'
+    hide_cursor = True
+
+    def update(self):
+        filled_length = int(self.width * self.progress)
+        empty_length = self.width - filled_length
+
+        message = self.message % self
+        bar = self.fill * filled_length
+        empty = self.empty_fill * empty_length
+        suffix = self.suffix % self
+        line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
+                        suffix])
+        self.writeln(line)
+
+
+class ChargingBar(Bar):
+    suffix = '%(percent)d%%'
+    bar_prefix = ' '
+    bar_suffix = ' '
+    empty_fill = '∙'
+    fill = '█'
+
+
+class FillingSquaresBar(ChargingBar):
+    empty_fill = '▢'
+    fill = '▣'
+
+
+class FillingCirclesBar(ChargingBar):
+    empty_fill = '◯'
+    fill = '◉'
+
+
+class IncrementalBar(Bar):
+    phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█')
+
+    def update(self):
+        nphases = len(self.phases)
+        expanded_length = int(nphases * self.width * self.progress)
+        filled_length = int(self.width * self.progress)
+        empty_length = self.width - filled_length
+        phase = expanded_length - (filled_length * nphases)
+
+        message = self.message % self
+        bar = self.phases[-1] * filled_length
+        current = self.phases[phase] if phase > 0 else ''
+        empty = self.empty_fill * max(0, empty_length - len(current))
+        suffix = self.suffix % self
+        line = ''.join([message, self.bar_prefix, bar, current, empty,
+                        self.bar_suffix, suffix])
+        self.writeln(line)
+
+
+class ShadyBar(IncrementalBar):
+    phases = (' ', '░', '▒', '▓', '█')

BIN
venv/lib/python2.7/site-packages/pip/_vendor/progress/bar.pyc


+ 49 - 0
venv/lib/python2.7/site-packages/pip/_vendor/progress/counter.py

@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import unicode_literals
+
+from . import Infinite, Progress
+from .helpers import WriteMixin
+
+
+class Counter(WriteMixin, Infinite):
+    message = ''
+    hide_cursor = True
+
+    def update(self):
+        self.write(str(self.index))
+
+
+class Countdown(WriteMixin, Progress):
+    hide_cursor = True
+
+    def update(self):
+        self.write(str(self.remaining))
+
+
+class Stack(WriteMixin, Progress):
+    phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
+    hide_cursor = True
+
+    def update(self):
+        nphases = len(self.phases)
+        i = min(nphases - 1, int(self.progress * nphases))
+        self.write(self.phases[i])
+
+
+class Pie(Stack):
+    phases = ('○', '◔', '◑', '◕', '●')

BIN
venv/lib/python2.7/site-packages/pip/_vendor/progress/counter.pyc


+ 92 - 0
venv/lib/python2.7/site-packages/pip/_vendor/progress/helpers.py

@@ -0,0 +1,92 @@
+# Copyright (c) 2012 Giorgos Verigakis <verigak@gmail.com>
+#
+# Permission to use, copy, modify, and distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+from __future__ import print_function
+from __future__ import unicode_literals
+
+
+HIDE_CURSOR = '\x1b[?25l'
+SHOW_CURSOR = '\x1b[?25h'
+
+
+class WriteMixin(object):
+    hide_cursor = False
+
+    def __init__(self, message=None, **kwargs):
+        super(WriteMixin, self).__init__(**kwargs)
+        self._width = 0
+        if message:
+            self.message = message
+
+        if self.file.isatty():
+            if self.hide_cursor:
+                print(HIDE_CURSOR, end='', file=self.file)
+            print(self.message, end='', file=self.file)
+            self.file.flush()
+
+    def write(self, s):
+        if self.file.isatty():
+            b = '\b' * self._width
+            c = s.ljust(self._width)
+            print(b + c, end='', file=self.file)
+            self._width = max(self._width, len(s))
+            self.file.flush()
+
+    def finish(self):
+        if self.file.isatty() and self.hide_cursor:
+            print(SHOW_CURSOR, end='', file=self.file)
+
+
+class WritelnMixin(object):
+    hide_cursor = False
+
+    def __init__(self, message=None, **kwargs):
+        super(WritelnMixin, self).__init__(**kwargs)
+        if message:
+            self.message = message
+
+        if self.file.isatty() and self.hide_cursor:
+            print(HIDE_CURSOR, end='', file=self.file)
+
+    def clearln(self):
+        if self.file.isatty():
+            print('\r\x1b[K', end='', file=self.file)
+
+    def writeln(self, line):
+        if self.file.isatty():
+            self.clearln()
+            print(line, end='', file=self.file)
+            self.file.flush()
+
+    def finish(self):
+        if self.file.isatty():
+            print(file=self.file)
+            if self.hide_cursor:
+                print(SHOW_CURSOR, end='', file=self.file)
+
+
+from signal import signal, SIGINT
+from sys import exit
+
+
+class SigIntMixin(object):
+    """Registers a signal handler that calls finish on SIGINT"""
+
+    def __init__(self, *args, **kwargs):
+        super(SigIntMixin, self).__init__(*args, **kwargs)
+        signal(SIGINT, self._sigint_handler)
+
+    def _sigint_handler(self, signum, frame):
+        self.finish()
+        exit(0)

BIN
venv/lib/python2.7/site-packages/pip/_vendor/progress/helpers.pyc


Daži faili netika attēloti, jo izmaiņu fails ir pārāk liels