Repository 'genome_diversity'
hg clone https://toolshed.g2.bx.psu.edu/repos/miller-lab/genome_diversity

Changeset 32:03c22b722882 (2013-09-20)
Previous changeset 31:a631c2f6d913 (2013-09-20) Next changeset 33:5064f618ec1c (2013-09-20)
Commit message:
remove BeautifulSoup dependency
modified:
pca.xml
population_structure.xml
tool_dependencies.xml
added:
BeautifulSoup.py
b
diff -r a631c2f6d913 -r 03c22b722882 BeautifulSoup.py
--- /dev/null Thu Jan 01 00:00:00 1970 +0000
+++ b/BeautifulSoup.py Fri Sep 20 13:54:23 2013 -0400
b
b'@@ -0,0 +1,2017 @@\n+"""Beautiful Soup\n+Elixir and Tonic\n+"The Screen-Scraper\'s Friend"\n+http://www.crummy.com/software/BeautifulSoup/\n+\n+Beautiful Soup parses a (possibly invalid) XML or HTML document into a\n+tree representation. It provides methods and Pythonic idioms that make\n+it easy to navigate, search, and modify the tree.\n+\n+A well-formed XML/HTML document yields a well-formed data\n+structure. An ill-formed XML/HTML document yields a correspondingly\n+ill-formed data structure. If your document is only locally\n+well-formed, you can use this library to find and process the\n+well-formed part of it.\n+\n+Beautiful Soup works with Python 2.2 and up. It has no external\n+dependencies, but you\'ll have more success at converting data to UTF-8\n+if you also install these three packages:\n+\n+* chardet, for auto-detecting character encodings\n+  http://chardet.feedparser.org/\n+* cjkcodecs and iconv_codec, which add more encodings to the ones supported\n+  by stock Python.\n+  http://cjkpython.i18n.org/\n+\n+Beautiful Soup defines classes for two main parsing strategies:\n+\n+ * BeautifulStoneSoup, for parsing XML, SGML, or your domain-specific\n+   language that kind of looks like XML.\n+\n+ * BeautifulSoup, for parsing run-of-the-mill HTML code, be it valid\n+   or invalid. This class has web browser-like heuristics for\n+   obtaining a sensible parse tree in the face of common HTML errors.\n+\n+Beautiful Soup also defines a class (UnicodeDammit) for autodetecting\n+the encoding of an HTML or XML document, and converting it to\n+Unicode. Much of this code is taken from Mark Pilgrim\'s Universal Feed Parser.\n+\n+For more than you ever wanted to know about Beautiful Soup, see the\n+documentation:\n+http://www.crummy.com/software/BeautifulSoup/documentation.html\n+\n+Here, have some legalese:\n+\n+Copyright (c) 2004-2010, Leonard Richardson\n+\n+All rights reserved.\n+\n+Redistribution and use in source and binary forms, with or without\n+modification, are permitted provided that the following conditions are\n+met:\n+\n+  * Redistributions of source code must retain the above copyright\n+    notice, this list of conditions and the following disclaimer.\n+\n+  * Redistributions in binary form must reproduce the above\n+    copyright notice, this list of conditions and the following\n+    disclaimer in the documentation and/or other materials provided\n+    with the distribution.\n+\n+  * Neither the name of the the Beautiful Soup Consortium and All\n+    Night Kosher Bakery nor the names of its contributors may be\n+    used to endorse or promote products derived from this software\n+    without specific prior written permission.\n+\n+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR\n+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,\n+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,\n+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR\n+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF\n+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS\n+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE, DAMMIT.\n+\n+"""\n+from __future__ import generators\n+\n+__author__ = "Leonard Richardson (leonardr@segfault.org)"\n+__version__ = "3.2.1"\n+__copyright__ = "Copyright (c) 2004-2012 Leonard Richardson"\n+__license__ = "New-style BSD"\n+\n+from sgmllib import SGMLParser, SGMLParseError\n+import codecs\n+import markupbase\n+import types\n+import re\n+import sgmllib\n+try:\n+  from htmlentitydefs import name2codepoint\n+except ImportError:\n+  name2codepoint = {}\n+try:\n+    set\n+except NameError:\n+    from sets import Set as set\n+\n+#These hacks make Beautiful Soup able to parse XML with namespaces\n+sgmllib.tagfind = re.co'..b' \'utf-32\', \'utf_16\', \'utf_32\',\n+                                 \'utf16\', \'u16\')):\n+                xml_encoding = sniffed_xml_encoding\n+        return xml_data, xml_encoding, sniffed_xml_encoding\n+\n+\n+    def find_codec(self, charset):\n+        return self._codec(self.CHARSET_ALIASES.get(charset, charset)) \\\n+               or (charset and self._codec(charset.replace("-", ""))) \\\n+               or (charset and self._codec(charset.replace("-", "_"))) \\\n+               or charset\n+\n+    def _codec(self, charset):\n+        if not charset: return charset\n+        codec = None\n+        try:\n+            codecs.lookup(charset)\n+            codec = charset\n+        except (LookupError, ValueError):\n+            pass\n+        return codec\n+\n+    EBCDIC_TO_ASCII_MAP = None\n+    def _ebcdic_to_ascii(self, s):\n+        c = self.__class__\n+        if not c.EBCDIC_TO_ASCII_MAP:\n+            emap = (0,1,2,3,156,9,134,127,151,141,142,11,12,13,14,15,\n+                    16,17,18,19,157,133,8,135,24,25,146,143,28,29,30,31,\n+                    128,129,130,131,132,10,23,27,136,137,138,139,140,5,6,7,\n+                    144,145,22,147,148,149,150,4,152,153,154,155,20,21,158,26,\n+                    32,160,161,162,163,164,165,166,167,168,91,46,60,40,43,33,\n+                    38,169,170,171,172,173,174,175,176,177,93,36,42,41,59,94,\n+                    45,47,178,179,180,181,182,183,184,185,124,44,37,95,62,63,\n+                    186,187,188,189,190,191,192,193,194,96,58,35,64,39,61,34,\n+                    195,97,98,99,100,101,102,103,104,105,196,197,198,199,200,\n+                    201,202,106,107,108,109,110,111,112,113,114,203,204,205,\n+                    206,207,208,209,126,115,116,117,118,119,120,121,122,210,\n+                    211,212,213,214,215,216,217,218,219,220,221,222,223,224,\n+                    225,226,227,228,229,230,231,123,65,66,67,68,69,70,71,72,\n+                    73,232,233,234,235,236,237,125,74,75,76,77,78,79,80,81,\n+                    82,238,239,240,241,242,243,92,159,83,84,85,86,87,88,89,\n+                    90,244,245,246,247,248,249,48,49,50,51,52,53,54,55,56,57,\n+                    250,251,252,253,254,255)\n+            import string\n+            c.EBCDIC_TO_ASCII_MAP = string.maketrans( \\\n+            \'\'.join(map(chr, range(256))), \'\'.join(map(chr, emap)))\n+        return s.translate(c.EBCDIC_TO_ASCII_MAP)\n+\n+    MS_CHARS = { \'\\x80\' : (\'euro\', \'20AC\'),\n+                 \'\\x81\' : \' \',\n+                 \'\\x82\' : (\'sbquo\', \'201A\'),\n+                 \'\\x83\' : (\'fnof\', \'192\'),\n+                 \'\\x84\' : (\'bdquo\', \'201E\'),\n+                 \'\\x85\' : (\'hellip\', \'2026\'),\n+                 \'\\x86\' : (\'dagger\', \'2020\'),\n+                 \'\\x87\' : (\'Dagger\', \'2021\'),\n+                 \'\\x88\' : (\'circ\', \'2C6\'),\n+                 \'\\x89\' : (\'permil\', \'2030\'),\n+                 \'\\x8A\' : (\'Scaron\', \'160\'),\n+                 \'\\x8B\' : (\'lsaquo\', \'2039\'),\n+                 \'\\x8C\' : (\'OElig\', \'152\'),\n+                 \'\\x8D\' : \'?\',\n+                 \'\\x8E\' : (\'#x17D\', \'17D\'),\n+                 \'\\x8F\' : \'?\',\n+                 \'\\x90\' : \'?\',\n+                 \'\\x91\' : (\'lsquo\', \'2018\'),\n+                 \'\\x92\' : (\'rsquo\', \'2019\'),\n+                 \'\\x93\' : (\'ldquo\', \'201C\'),\n+                 \'\\x94\' : (\'rdquo\', \'201D\'),\n+                 \'\\x95\' : (\'bull\', \'2022\'),\n+                 \'\\x96\' : (\'ndash\', \'2013\'),\n+                 \'\\x97\' : (\'mdash\', \'2014\'),\n+                 \'\\x98\' : (\'tilde\', \'2DC\'),\n+                 \'\\x99\' : (\'trade\', \'2122\'),\n+                 \'\\x9a\' : (\'scaron\', \'161\'),\n+                 \'\\x9b\' : (\'rsaquo\', \'203A\'),\n+                 \'\\x9c\' : (\'oelig\', \'153\'),\n+                 \'\\x9d\' : \'?\',\n+                 \'\\x9e\' : (\'#x17E\', \'17E\'),\n+                 \'\\x9f\' : (\'Yuml\', \'\'),}\n+\n+#######################################################################\n+\n+\n+#By default, act as an HTML pretty-printer.\n+if __name__ == \'__main__\':\n+    import sys\n+    soup = BeautifulSoup(sys.stdin)\n+    print soup.prettify()\n'
b
diff -r a631c2f6d913 -r 03c22b722882 pca.xml
--- a/pca.xml Fri Sep 20 13:25:27 2013 -0400
+++ b/pca.xml Fri Sep 20 13:54:23 2013 -0400
b
@@ -16,7 +16,9 @@
   <requirements>
     <requirement type="package" version="5.0.1">eigensoft</requirement>
     <requirement type="package" version="0.1">gd_c_tools</requirement>
+    <!--
     <requirement type="package" version="3.2.1">beautifulsoup</requirement>
+    -->
   </requirements>
 
   <!--
b
diff -r a631c2f6d913 -r 03c22b722882 population_structure.xml
--- a/population_structure.xml Fri Sep 20 13:25:27 2013 -0400
+++ b/population_structure.xml Fri Sep 20 13:54:23 2013 -0400
b
@@ -14,9 +14,11 @@
     <data name="output" format="html" />
   </outputs>
 
+  <!--
   <requirements>
     <requirement type="package" version="3.2.1">beautifulsoup</requirement>
   </requirements>
+  -->
 
   <!--
   <tests>
b
diff -r a631c2f6d913 -r 03c22b722882 tool_dependencies.xml
--- a/tool_dependencies.xml Fri Sep 20 13:25:27 2013 -0400
+++ b/tool_dependencies.xml Fri Sep 20 13:54:23 2013 -0400
b
@@ -1,8 +1,10 @@
 <?xml version="1.0"?>
 <tool_dependency>
+  <!--
   <package name="beautifulsoup" version="3.2.1">
     <repository prior_installation_required="True" toolshed="http://toolshed.g2.bx.psu.edu/" owner="miller-lab" name="package_beautifulsoup_3_2_1" changeset_revision="83c21b81ee9d" />
   </package>
+  -->
   <package name="eigensoft" version="5.0.1">
     <repository prior_installation_required="True" toolshed="http://toolshed.g2.bx.psu.edu/" owner="miller-lab" name="package_eigensoft_5_0_1" changeset_revision="02f04f3579b5" />
   </package>