├── static ├── styles │ ├── screen-switcher-default.css │ ├── print.css │ ├── netscape4.css │ ├── defaultfonts.css │ ├── largestyles.css │ └── styles.css └── images │ ├── trans.gif │ └── python-logo.gif ├── .gitignore ├── docker-compose.yml ├── code ├── README.pydotorg ├── python │ ├── opml.xml.tmpl │ ├── rss20.xml.tmpl │ ├── foafroll.xml.tmpl │ ├── rss10.xml.tmpl │ ├── config.ini │ └── index.html.tmpl ├── TODO ├── planet │ ├── atomstyler.py │ ├── cache.py │ ├── compat_logging │ │ ├── config.py │ │ └── handlers.py │ └── sanitize.py ├── planet.py └── planet-cache.py ├── Dockerfile.deploy ├── .pre-commit-config.yaml ├── .github └── workflows │ ├── lint.yml │ └── publish-docker.yml ├── config ├── opml.xml.tmpl ├── rss20.xml.tmpl ├── foafroll.xml.tmpl ├── rss10.xml.tmpl ├── sort-ini.py ├── jython.ini ├── summary.html.tmpl ├── titles_only.html.tmpl └── index.html.tmpl ├── README.rst ├── Dockerfile ├── PULL_REQUEST_TEMPLATE.md └── ISSUE_TEMPLATE.md /static/styles/screen-switcher-default.css: -------------------------------------------------------------------------------- 1 | @import url(../styles/styles.css); 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /config/*.tmplc 2 | *.pyc 3 | *.pyo 4 | /_cache 5 | /_output 6 | /.venv 7 | -------------------------------------------------------------------------------- /static/images/trans.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python/planet/main/static/images/trans.gif -------------------------------------------------------------------------------- /static/images/python-logo.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/python/planet/main/static/images/python-logo.gif -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3.7' 2 | services: 3 | planet: 4 | build: . 5 | command: ["/start.sh"] 6 | ports: 7 | - '8080:8080' 8 | -------------------------------------------------------------------------------- /code/README.pydotorg: -------------------------------------------------------------------------------- 1 | 2 | This holds a copy of the PlanetPlanet code (www.planetplanet.org). 3 | This version is a copy of the nightly tarball, taken on 2005-05-15. 4 | 5 | --amk 6 | -------------------------------------------------------------------------------- /Dockerfile.deploy: -------------------------------------------------------------------------------- 1 | FROM python:2.7.18 2 | 3 | ENV PYTHONUNBUFFERED=1 4 | ENV PYTHONDONTWRITEBYTECODE=1 5 | 6 | RUN mkdir /planet 7 | WORKDIR /planet 8 | 9 | COPY code /planet/code 10 | COPY config /planet/config 11 | COPY static /planet/static 12 | 13 | 14 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | - repo: local 3 | hooks: 4 | - id: sort-ini 5 | name: Sort config.ini 6 | entry: python3 config/sort-ini.py config/config.ini 7 | language: python 8 | pass_filenames: false 9 | files: ^config/config.ini$ 10 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: [push, pull_request, workflow_dispatch] 4 | 5 | permissions: {} 6 | 7 | env: 8 | FORCE_COLOR: 1 9 | RUFF_OUTPUT_FORMAT: github 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | persist-credentials: false 19 | - uses: actions/setup-python@v5 20 | with: 21 | python-version: "3.x" 22 | - uses: tox-dev/action-pre-commit-uv@v1 23 | -------------------------------------------------------------------------------- /config/opml.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | <TMPL_VAR name> 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | " xmlUrl=""/> 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /code/python/opml.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | <TMPL_VAR name> 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | " xmlUrl=""/> 14 | 15 | 16 | 17 | -------------------------------------------------------------------------------- /static/styles/print.css: -------------------------------------------------------------------------------- 1 | #left-hand-navigation, #google, #document-navigation, #border-corner, #smallcorner, #searchcorner, #logo, #search, #utility-menu, #skiptonav 2 | { 3 | display:none; 4 | } 5 | 6 | #content-body 7 | { 8 | font-family: Georgia, "Bitstream Vera Serif", "New York", Palatino, serif; 9 | font-size:11pt; 10 | } 11 | 12 | #content-body a 13 | { 14 | color: #000000; 15 | text-decoration:none; 16 | display:inline; 17 | } 18 | 19 | pre { 20 | font-size:10pt; 21 | } 22 | 23 | #body-main a 24 | { 25 | font-weight:bold; 26 | } 27 | 28 | 29 | h1 30 | { 31 | font-size:14pt; 32 | } 33 | 34 | iframe { 35 | display:none; 36 | } 37 | 38 | #footer { 39 | display:none; 40 | } 41 | -------------------------------------------------------------------------------- /code/python/rss20.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | <TMPL_VAR name> 6 | 7 | en 8 | - 9 | 10 | 11 | 12 | <TMPL_VAR channel_name><TMPL_IF title>: <TMPL_VAR title></TMPL_IF> 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /config/rss20.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | <TMPL_VAR name ESCAPE="HTML"> 6 | 7 | en 8 | - 9 | 10 | 11 | 12 | <TMPL_VAR channel_name ESCAPE="HTML"><TMPL_IF title>: <TMPL_VAR title ESCAPE="HTML"></TMPL_IF> 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | Maintaining Planet Python (https://planetpython.org): 2 | 3 | * Requests come in to the github issues (https://github.com/python/planet/issues) 4 | 5 | * Check the feed for validity using the services: https://validator.w3.org/feed/ or https://www.rssboard.org/rss-validator/ 6 | 7 | * Check the feed for: Python-specific contents (often we 8 | have to ask for a Python specific feed), and English-language 9 | content (ask for an English-language feed). 10 | 11 | * Add the feed URL to a text config file (`config/config.ini `_):: 12 | 13 | [http://example.org/feed/url/] 14 | name = Author/Group/Project Name 15 | 16 | Sort the config file:: 17 | 18 | cd config 19 | python sort-ini.py 20 | 21 | Commit the config file to the repo. 22 | 23 | The Planet code is under the code/ directory. See code/README.pydotorg 24 | for details. 25 | -------------------------------------------------------------------------------- /config/foafroll.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 9 | 10 | 11 | 12 | " /> 13 | 14 | 15 | 16 | 17 | 18 | 19 | "> 20 | 21 | 22 | " /> 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /code/python/foafroll.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 9 | 10 | 11 | 12 | " /> 13 | 14 | 15 | 16 | 17 | 18 | 19 | "> 20 | 21 | 22 | " /> 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ubuntu:bionic 2 | 3 | RUN apt-get update \ 4 | && apt-get install -y --no-install-recommends \ 5 | python-pip python-setuptools python-wheel \ 6 | locales tzdata \ 7 | ca-certificates \ 8 | strace gdb lsof locate net-tools htop iputils-ping dnsutils \ 9 | python2.7-dbg python2.7 libpython2.7 python-dbg libpython-dbg \ 10 | curl nano vim tree less telnet patch \ 11 | graphviz sqlite3 \ 12 | dumb-init \ 13 | && rm -rf /var/lib/apt/lists/* 14 | 15 | RUN locale-gen en_US.UTF-8 16 | 17 | COPY /code /planet/code 18 | COPY /config /planet/config 19 | COPY /static /planet/static 20 | 21 | #RUN mkdir /srv/planetpython.org/ 22 | VOLUME /srv/planetpython.org/ 23 | WORKDIR /planet 24 | 25 | ENTRYPOINT ["dumb-init"] 26 | 27 | RUN echo "#!/bin/bash -eux \n\ 28 | python2.7 code/planet.py config/config.ini \n\ 29 | cd /srv/planetpython.org/ \n\ 30 | python2.7 -mSimpleHTTPServer 8080 \n\ 31 | "> /start.sh 32 | RUN chmod +x /start.sh 33 | EXPOSE 8080 34 | 35 | -------------------------------------------------------------------------------- /code/python/rss10.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 9 | "> 10 | <TMPL_VAR name> 11 | 12 | - 13 | 14 | 15 | 16 | 17 | " /> 18 | 19 | 20 | 21 | 22 | 23 | 24 | "> 25 | <TMPL_VAR channel_name><TMPL_IF title>: <TMPL_VAR title></TMPL_IF> 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /config/rss10.xml.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 9 | "> 10 | <TMPL_VAR name ESCAPE="HTML"> 11 | 12 | - 13 | 14 | 15 | 16 | 17 | " /> 18 | 19 | 20 | 21 | 22 | 23 | 24 | "> 25 | <TMPL_VAR channel_name ESCAPE="HTML"><TMPL_IF title>: <TMPL_VAR title ESCAPE="HTML"></TMPL_IF> 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /PULL_REQUEST_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | !!! Please choose one template below ADD FEED, EDIT FEED or INFORM a BUG/PROBLEM. 2 | PLEASE: Remove this headline and the unused templates!!! 3 | 4 | 5 | # ADD FEED / EDIT FEED 6 | ------------------------------------------------------------------------------- 7 | 8 | Hi, I want to add my feed to the Python Planet. or I want to change my current feed url from CURRENT_URL_HERE to NEW_URL_HERE 9 | 10 | ## I checked the following required validations: (mark all 5 with [x]) 11 | 12 | 1. [ ] My feed is valid, I checked using https://validator.w3.org/feed/check.cgi?url=MY_FEED_URL and it is valid! 13 | 2. [ ] My feed is a **Python Specific** feed, e.g: I am proposing the filtered tag or categorized feed url 14 | 3. [ ] I only post content to this feed which is related to the Python language and its components and libraries. Or content that I consider interesting for the Python community. 15 | 4. [ ] I am aware that once my feed is added it can take a few hours to start being fetched (according to the server update cycle) 16 | 5. [ ] My feed contains only content in English language. 17 | 18 | Thanks in advance for adding my feed to the PythonPlanet! :+1: 19 | 20 | > NOTE: If you are adding a podcast feed please validate using http://castfeedvalidator.com/ 21 | 22 | 23 | -------------------------------------------------------------------------------- /config/sort-ini.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | import configparser 5 | 6 | if len(sys.argv) > 1: 7 | filename = sys.argv[1] 8 | else: 9 | filename = 'config.ini' 10 | 11 | oconfig = configparser.RawConfigParser() 12 | oconfig.read(filename) 13 | 14 | # This part will destroy the configuration if there's a crash while 15 | # writing the output. We're in an GIT-controlled directory, so 16 | # I didn't care enough to fix this. 17 | with open(filename, 'w', encoding='utf-8') as fd: 18 | # Copy of write() code that sorts output by section 19 | if oconfig._defaults: 20 | fd.write("[%s]\n" % DEFAULTSECT) 21 | for (key, value) in oconfig._defaults.items(): 22 | fd.write("%s = %s\n" % (key, str(value).replace('\n', '\n\t'))) 23 | fd.write("\n") 24 | 25 | result = {} 26 | for section in sorted(oconfig._sections): 27 | if section == 'Planet': 28 | fd.write("[%s]\n" % section) 29 | for (key, value) in oconfig._sections[section].items(): 30 | if key != "__name__": 31 | if section == 'Planet': 32 | fd.write("%s = %s\n" % 33 | (key, str(value).replace('\n', '\n\t'))) 34 | else: 35 | result[value.replace('"', '')] = section 36 | if section == 'Planet': 37 | fd.write("\n") 38 | 39 | for key, value in sorted(result.items()): 40 | fd.write("[%s]\n" % value) 41 | name = key 42 | if "'" in key: 43 | name = '"%s"' % key 44 | fd.write("name = %s\n" % name) 45 | fd.write("\n") 46 | 47 | -------------------------------------------------------------------------------- /config/jython.ini: -------------------------------------------------------------------------------- 1 | [Planet] 2 | date_format = %B %d, %Y %I:%M %p 3 | owner_name = Python Software Foundation 4 | log_level = DEBUG 5 | name = Planet Jython 6 | encoding = utf-8 7 | days_per_page = 30 8 | owner_email = planet@python.org 9 | new_feed_items = 1 10 | items_per_page = 25 11 | output_dir = /srv/planet.jython.org 12 | new_date_format = %B %d, %Y 13 | link = http://planet.jython.org/ 14 | template_files = config/index.html.tmpl config/rss20.xml.tmpl config/rss10.xml.tmpl config/opml.xml.tmpl config/foafroll.xml.tmpl config/summary.html.tmpl 15 | cache_directory = /srv/cache 16 | 17 | [http://amitksaha.blogspot.com/feeds/posts/default/-/Netbeans] 18 | name = Amit K. Saha 19 | 20 | [http://blog.leosoto.com/feeds/posts/default/-/jython] 21 | name = Leo Soto M. 22 | 23 | [http://blogs.sun.com/sundararajan/feed/entries/rss] 24 | name = A. Sundararajan 25 | 26 | [http://dunderboss.blogspot.com/feeds/posts/default/-/jython] 27 | name = Philip Jenvey 28 | 29 | [http://eternusuk.blogspot.com/feeds/posts/default] 30 | name = James Abley 31 | 32 | [http://fwierzbicki.blogspot.com/feeds/posts/default/-/jython] 33 | name = Frank Wierzbicki 34 | 35 | [http://gushieblog.blogspot.com/feeds/posts/default] 36 | name = Paul Drummond 37 | 38 | [http://henkenotes.blogspot.com/feeds/posts/default] 39 | name = Henrik Eriksson 40 | 41 | [http://jj-blogger.blogspot.com/feeds/posts/default] 42 | name = Josh Juneau 43 | 44 | [http://journal.thobe.org/feeds/posts/default] 45 | name = Tobias Ivarsson 46 | 47 | [http://jython.xhaus.com/?feed=rss2] 48 | name = Alan Kennedy 49 | 50 | [http://www.fishandcross.com/blog/?feed=rss2&cat=4] 51 | name = Ed Taekema 52 | 53 | [http://zyasoft.com/pythoneering/atom.xml] 54 | name = Jim Baker 55 | 56 | -------------------------------------------------------------------------------- /ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | !!! Please choose one template below ADD FEED, EDIT FEED or INFORM a BUG/PROBLEM. 2 | PLEASE: Remove this headline and the unused templates!!! 3 | 4 | 5 | # ADD FEED / EDIT FEED 6 | ------------------------------------------------------------------------------- 7 | 8 | Hi, I want to add my feed to the Python Planet or I want to change my current feed url from CURRENT_URL_HERE to NEW_URL_HERE 9 | 10 | **My Name/Blog Name**: MY_NAME_OR_BLOG_NAME_HERE (e.g John Doe Awesome Blog) 11 | 12 | **My Blog RSS or ATOM Python specific feed url**: MY_FEED_URL 13 | 14 | ## I checked the following required validations: (mark all 5 with [x]) 15 | 16 | 1. [ ] My feed is valid, I checked using https://validator.w3.org/feed/check.cgi?url=MY_FEED_URL and it is valid! 17 | 2. [ ] My feed is a **Python Specific** feed, e.g: I am proposing the filtered tag or categorized feed url 18 | 3. [ ] I only post content to this feed which is related to the Python language and its components and libraries. Or content that I consider interesting for the Python community. 19 | 4. [ ] I am aware that once my feed is added it can take a few hours to start being fetched (according to the server update cycle) 20 | 5. [ ] My feed contains only content in English language. 21 | 22 | Thanks in advance for adding my feed to the PythonPlanet! :+1: 23 | 24 | > NOTE: If you are adding a podcast feed please validate using http://castfeedvalidator.com/ 25 | 26 | # REPORT A BUG/PROBLEM 27 | ------------------------------------------------------------------------------- 28 | 29 | Hi, I want to notify you of a bug/problem that is happening on the Planet. 30 | 31 | I am using: 32 | **O.S**: 33 | **Browser**: 34 | **Platform**: (mobile, tablet, desktop) 35 | 36 | ## Problem 37 | DESCRIBE_THE_PROBLEM_HERE 38 | 39 | ## Details 40 | INCLUDE_OUTPUTS_OR_SCREENSHOTS_HERE 41 | 42 | Thanks. 43 | 44 | --------------------------------------------------------------------------------- 45 | 46 | 47 | !!! PLEASE remove the unused templates above !!! 48 | -------------------------------------------------------------------------------- /.github/workflows/publish-docker.yml: -------------------------------------------------------------------------------- 1 | name: Publish 2 | on: 3 | push: 4 | branches: 5 | - 'main' 6 | permissions: 7 | contents: write 8 | packages: write 9 | jobs: 10 | release-to-ghcr: 11 | concurrency: 12 | group: release-to-ghcr 13 | cancel-in-progress: false 14 | runs-on: ${{ matrix.config.os }} 15 | strategy: 16 | matrix: 17 | config: 18 | - { os: ubuntu-24.04, arch: amd64 } 19 | - { os: ubuntu-24.04-arm, arch: arm64 } 20 | steps: 21 | # Checkout push-to-registry action github repository 22 | - name: Checkout Push to Registry action 23 | uses: actions/checkout@v4 24 | - name: Set up Docker Buildx 25 | uses: docker/setup-buildx-action@v3 26 | - name: Login To GHCR 27 | uses: docker/login-action@v3 28 | with: 29 | registry: ghcr.io 30 | username: ${{ github.actor }} 31 | password: ${{ secrets.GITHUB_TOKEN }} 32 | - name: Build and push 33 | uses: docker/build-push-action@v6 34 | with: 35 | context: . 36 | file: Dockerfile.deploy 37 | push: true 38 | tags: | 39 | ghcr.io/python/planetpython:${{ github.sha }}-${{ matrix.config.arch }} 40 | ghcr.io/python/planetpython:latest-${{ matrix.config.arch }} 41 | 42 | create-manifests: 43 | runs-on: ubuntu-24.04-arm 44 | needs: [release-to-ghcr] 45 | 46 | steps: 47 | - name: Checkout Push to Registry action 48 | uses: actions/checkout@v4 49 | - name: Set up Docker Buildx 50 | uses: docker/setup-buildx-action@v3 51 | - name: Login To GHCR 52 | uses: docker/login-action@v3 53 | with: 54 | registry: ghcr.io 55 | username: ${{ github.actor }} 56 | password: ${{ secrets.GITHUB_TOKEN }} 57 | - name: Create SHA image and push 58 | run: | 59 | docker buildx imagetools create \ 60 | --tag ghcr.io/python/planetpython:${{ github.sha }} \ 61 | ghcr.io/python/planetpython:${{ github.sha }}-amd64 \ 62 | ghcr.io/python/planetpython:${{ github.sha }}-arm64 63 | 64 | - name: Create latest manifest and push 65 | run: | 66 | docker buildx imagetools create \ 67 | --tag ghcr.io/python/planetpython:latest \ 68 | ghcr.io/python/planetpython:latest-amd64 \ 69 | ghcr.io/python/planetpython:latest-arm64 70 | -------------------------------------------------------------------------------- /code/python/config.ini: -------------------------------------------------------------------------------- 1 | # Planet configuration file 2 | 3 | # Every planet needs a [Planet] section 4 | [Planet] 5 | # name: Your planet's name 6 | # link: Link to the main page 7 | # owner_name: Your name 8 | # owner_email: Your e-mail address 9 | name = Planet Python 10 | link = http://planet.python.org/ 11 | owner_name = PSF 12 | owner_email = webmaster@python.org 13 | 14 | # cache_directory: Where cached feeds are stored 15 | # log_level: One of DEBUG, INFO, WARNING, ERROR or CRITICAL 16 | cache_directory = /data/planet/cache 17 | log_level = DEBUG 18 | 19 | # template_files: Space-separated list of output template files 20 | template_files = examples/index.html.tmpl examples/rss20.xml.tmpl examples/rss10.xml.tmpl examples/opml.xml.tmpl examples/foafroll.xml.tmpl 21 | 22 | # The following provide defaults for each template: 23 | # output_dir: Directory to place output files 24 | # items_per_page: How many items to put on each page 25 | # days_per_page: How many complete days of posts to put on each page 26 | # This is the absolute, hard limit (over the item limit) 27 | # date_format: strftime format for the default 'date' template variable 28 | output_dir = /data/planet/output 29 | items_per_page = 60 30 | days_per_page = 0 31 | date_format = %B %d, %Y %I:%M %p 32 | 33 | # To define a different value for a particular template you may create 34 | # a section with the same name as the template file's filename (as given 35 | # in template_files). 36 | # 37 | # [examples/rss10.xml.tmpl] 38 | # items_per_page = 30 39 | 40 | 41 | # Any other section defines a feed to subscribe to. The section title 42 | # (in the []s) is the URI of the feed itself. A section can also be 43 | # have any of the following options: 44 | # 45 | # name: Name of the feed (defaults to the title found in the feed) 46 | # offset: Number of hours (+ or -) the feed's times tend to be out 47 | # 48 | # Additionally any other option placed here will be available in 49 | # the template (prefixed with channel_ for the Items loop). You can 50 | # define defaults for these in a [DEFAULT] section, for example 51 | # Planet Debian uses the following to define faces: 52 | # 53 | # [DEFAULT] 54 | # facewidth = 64 55 | # faceheight = 64 56 | # 57 | # [http://www.blog.com/rss] 58 | # face = foo.png 59 | # faceheight = 32 60 | # 61 | # The facewidth of the defined blog defaults to 64. 62 | 63 | [http://www.amk.ca/diary/index.rss] 64 | name = Andrew Kuchling 65 | 66 | -------------------------------------------------------------------------------- /static/styles/netscape4.css: -------------------------------------------------------------------------------- 1 | #left-hand-navigation 2 | { 3 | /*/*//*/ 4 | position:absolute; 5 | left:5px; 6 | top:10em; 7 | z-index:1; 8 | font-family: Arial, Helvetica, sans-serif; 9 | font-size:0.75em; 10 | /* */ 11 | } 12 | 13 | #left-hand-navigation a 14 | { 15 | /*/*//*/ 16 | color: #3C4B6B; 17 | /* */ 18 | } 19 | 20 | #google 21 | { 22 | /*/*//*/ 23 | display:none; 24 | /* */ 25 | } 26 | 27 | #content-body 28 | { 29 | /*/*//*/ 30 | position:absolute; 31 | top:7em; 32 | left:9em; 33 | width:100%; 34 | z-index:1; 35 | font-family: Arial, Helvetica, sans-serif; 36 | font-size:0.82em; 37 | /* */ 38 | } 39 | 40 | #content-body a 41 | { 42 | /*/*//*/ 43 | color: #3C4B6B; 44 | /* */ 45 | } 46 | 47 | #document-navigation 48 | { 49 | /*/*//*/ 50 | position: relative; 51 | top:20px; 52 | left:0%; 53 | z-index:1; 54 | padding:10px; 55 | /* */ 56 | } 57 | 58 | #border-corner 59 | { 60 | /*/*//*/ 61 | display:none; 62 | /* */ 63 | } 64 | 65 | #smallcorner 66 | { 67 | /*/*//*/ 68 | display:none; 69 | /* */ 70 | } 71 | 72 | #searchcorner 73 | { 74 | /*/*//*/ 75 | display:none; 76 | /* */ 77 | } 78 | 79 | 80 | 81 | #logoheader 82 | { 83 | /*/*//*/ 84 | position:absolute; 85 | top:0; 86 | border:0 none; 87 | left:0; 88 | padding:10px; 89 | font-size: medium; 90 | z-index: 1 91 | /* */ 92 | } 93 | 94 | #logo 95 | { 96 | /*/*//*/ 97 | width:20px; 98 | /* */ 99 | } 100 | 101 | #logolink 102 | { 103 | /*/*//*/ 104 | border:none; 105 | /* */ 106 | } 107 | 108 | #search 109 | { 110 | /*/*//*/ 111 | border: none; 112 | padding-top:20px; 113 | font-family: Verdana, Helvetica, sans-serif; 114 | font-size:0.9em; 115 | padding-top:5em; 116 | /* */ 117 | } 118 | 119 | #skiptonav 120 | { 121 | /*/*//*/ 122 | border: 0px none; 123 | display:none; 124 | /* */ 125 | } 126 | 127 | #breadcrumb 128 | { 129 | /*/*//*/ 130 | position:absolute; 131 | top:0px; 132 | /* */ 133 | } 134 | 135 | #utility-menu 136 | { 137 | /*/*//*/ 138 | position:absolute; 139 | top:8em; 140 | font-family: Arial, Helvetica, sans-serif; 141 | font-size:0.75em; 142 | /* */ 143 | } 144 | 145 | #utility-menu a 146 | { 147 | /*/*//*/ 148 | color: #3C4B6B; 149 | /* */ 150 | } 151 | 152 | ul 153 | { 154 | /*/*//*/ 155 | list-style: none; 156 | padding:0; 157 | margin-left:-20px; 158 | /* */ 159 | } -------------------------------------------------------------------------------- /code/python/index.html.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | ### Planet HTML template. 5 | ### 6 | ### This is intended to demonstrate and document Planet's templating 7 | ### facilities, and at the same time provide a good base for you to 8 | ### modify into your own design. 9 | ### 10 | ### The output's a bit boring though, if you're after less documentation 11 | ### and more instant gratification, there's an example with a much 12 | ### prettier output in the fancy-examples/ directory of the Planet source. 13 | 14 | ### Lines like this are comments, and are automatically removed by the 15 | ### templating engine before processing. 16 | 17 | 18 | ### Planet makes a large number of variables available for your templates. 19 | ### See INSTALL for the complete list. The raw value can be placed in your 20 | ### output file using . We'll put the name of our 21 | ### Planet in the page title and again in an h1. 22 | 23 | 24 | <TMPL_VAR name> 25 | 26 | 27 | 28 | 29 | 30 |

31 | 32 | ### One of the two loops available is the Channels loop. This allows you 33 | ### to easily create a list of subscriptions, which is exactly what we'll do 34 | ### here. 35 | 36 | ### Note that we can also expand variables inside HTML tags, but we need 37 | ### to be cautious and HTML-escape any illegal characters using the form 38 | ### 39 | 40 |
41 |

Subscriptions

42 | 47 |
48 | 49 | ### The other loop is the Items loop, which will get iterated for each 50 | ### news item. 51 | 52 | 53 | 54 | ### Visually distinguish articles from different days by checking for 55 | ### the new_date flag. This demonstrates the ... 56 | ### check. 57 | 58 | 59 |

60 |
61 | 62 | ### Group consecutive articles by the same author together by checking 63 | ### for the new_channel flag. 64 | 65 | 66 |

" title="">

67 |
68 | 69 | 70 | 71 |

">

72 |
73 |

74 | 75 |

76 |

77 | ">by at 78 |

79 |
80 | 81 |
82 |

83 | Powered by Planet!
84 | Last updated: 85 |

86 | 87 | 88 | 89 | -------------------------------------------------------------------------------- /code/TODO: -------------------------------------------------------------------------------- 1 | Feature Goals for Planet 1.0 2 | ============================ 3 | 4 | * Store and use feed history 5 | 6 | We have a huge problem at the moment, in that we only display the current 7 | contents of the feeds we download. The old Netscape standard was 15 items 8 | in a feed, so many people still use that. Unfortunately, on sites like 9 | Slashdot, those 15 items move *very* quickly. So as they fall off the feed, 10 | they fall off your Planet aggregate. That is bad, bad, bad, and we need to 11 | solve it. Any ideas? There are potential bugs all over this. :-) 12 | 13 | * Add multiple keyword support 14 | 15 | I hadn't announced it yet, but I have added a simple 'keyword feeds' 16 | feature, which allows you to subscribe to feeds and only display items that 17 | mention your keyword in the title, summary or description. This is totally 18 | arse-kickingly rad, and demonstrated on my personal Planet. 19 | 20 | However, currently it only allows you to define one keyword phrase for each 21 | feed, which kinda sucks. If there's some quick-and-dirty boolean phrase 22 | parser, we could use it really well here, for instance: 23 | 24 | keyword = (gnome OR kde) NOT sucks 25 | 26 | Maybe that's overkill - we could just support multiple ORed keyword fields. 27 | 28 | * Fix stupid UTF-8 error with current keyword support 29 | 30 | For some reason, keyword support tweaks string encoding errors, such as: 31 | "WARNING:root:Item wasn't in UTF-8 or ISO-8859-1, replaced all non-ASCII 32 | characters." This is bad. 33 | 34 | * Allow templates to use extra feed elements 35 | 36 | See Frederic's thread about Freshmeat the other day. I don't want to 37 | special case everything, and I don't want to stomp on item variables we 38 | already have, but it would be nice to bring special things from feeds up 39 | into the template data structure. Should we only do this for namespaced 40 | elements (extensions)? 41 | 42 | * Fix intermittent timezone detection / arithmetic bugs 43 | 44 | From Nick Moffitt: "Right now there's something screwy in the way planet 45 | deals with time zones, so that all you Australians end up claiming the top 46 | of my planet while merkins I want to read end up shoved down a ways." 47 | 48 | * Allow display normalisation to specified timezone 49 | 50 | Some Planet admins would like their feed to be displayed in the local 51 | timezone, instead of UTC. 52 | 53 | * Add "sort by entry received date" option 54 | 55 | From Nick Moffitt: "Is there a way to get planet to not do entry-date 56 | placement at all? Like, always put new entries up in the order they were 57 | first seen by planet? I use mine to implement the "show me stuff I haven't 58 | already read yet" feature that pretty much all Web sites lack. 59 | 60 | Answer from Scott J. Remnant: "Yeah, this would be actually pretty easy ... 61 | it already has this code for entries missing times entirely, I'll add a 62 | config option to just outright ignore feed times and sort by seen order. 63 | I'll make it a per-feed option, maybe "sort_order = seen/time" and if you 64 | want it global just stick it in [DEFAULT]." 65 | 66 | * Support OPML and foaf subscriptions 67 | 68 | This might be a bit invasive, but I want to be able to subscribe to OPML 69 | and FOAF files, and see each feed as if it were subscribed individually. 70 | Perhaps we can do this with a two-pass configuration scheme, first to pull 71 | the static configs, second to go fetch and generate the dynamic configs. 72 | The more I think about it, the less invasive it sounds. Hmm. 73 | 74 | * Provide a 'disabled' configuration option 75 | 76 | Make it easy to mark a feed as disabled, but still show it on the subs 77 | list. Perhaps disabled is the wrong word. Some people want to use Planet to 78 | do their blogroll management, but not actually show some feeds in the 79 | aggregation. If someone can think of a better word, please tell me. 80 | Perhaps 'hide'. 81 | 82 | Other Feature Goals 83 | =================== 84 | 85 | * Port to feedparser 3.0 86 | -------------------------------------------------------------------------------- /config/summary.html.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | <TMPL_VAR name> 6 | 8 | 9 | 10 | 11 | 13 | 14 | 16 | 17 | 18 | 19 | 20 | 21 | 22 |

23 | 25 |

26 | 27 |
skip to navigation
28 |
skip to content
29 | 30 |
31 |
32 | 33 |

34 | 35 |

Last update: 36 | 37 | 38 | 39 | ### Visually distinguish articles from different days by checking for 40 | ### the new_date flag. This demonstrates the ... 41 | ### check. 42 | 43 | 44 |

45 | 46 | 47 | ### Group consecutive articles by the same author together by checking 48 | ### for the new_channel flag. 49 | 50 | 51 |

" title="">

52 |
53 | 54 | 55 | 56 |

">

57 |
58 |

59 | 60 |

61 |

62 | ">by at 63 |

64 | 65 | 66 |
67 |
68 | 69 |
70 | 108 |
109 | 110 | 111 | -------------------------------------------------------------------------------- /code/planet/atomstyler.py: -------------------------------------------------------------------------------- 1 | from xml.dom import minidom, Node 2 | from urlparse import urlparse, urlunparse 3 | from xml.parsers.expat import ExpatError 4 | from htmlentitydefs import name2codepoint 5 | import re 6 | 7 | # select and apply an xml:base for this entry 8 | class relativize: 9 | def __init__(self, parent): 10 | self.score = {} 11 | self.links = [] 12 | self.collect_and_tally(parent) 13 | self.base = self.select_optimal_base() 14 | if self.base: 15 | if not parent.hasAttribute('xml:base'): 16 | self.rebase(parent) 17 | parent.setAttribute('xml:base', self.base) 18 | 19 | # collect and tally cite, href and src attributes 20 | def collect_and_tally(self,parent): 21 | uri = None 22 | if parent.hasAttribute('cite'): uri=parent.getAttribute('cite') 23 | if parent.hasAttribute('href'): uri=parent.getAttribute('href') 24 | if parent.hasAttribute('src'): uri=parent.getAttribute('src') 25 | 26 | if uri: 27 | parts=urlparse(uri) 28 | if parts[0].lower() == 'http': 29 | parts = (parts[1]+parts[2]).split('/') 30 | base = None 31 | for i in range(1,len(parts)): 32 | base = tuple(parts[0:i]) 33 | self.score[base] = self.score.get(base,0) + len(base) 34 | if base and base not in self.links: self.links.append(base) 35 | 36 | for node in parent.childNodes: 37 | if node.nodeType == Node.ELEMENT_NODE: 38 | self.collect_and_tally(node) 39 | 40 | # select the xml:base with the highest score 41 | def select_optimal_base(self): 42 | if not self.score: return None 43 | for link in self.links: 44 | self.score[link] = 0 45 | winner = max(self.score.values()) 46 | if not winner: return None 47 | for key in self.score.keys(): 48 | if self.score[key] == winner: 49 | if winner == len(key): return None 50 | return urlunparse(('http', key[0], '/'.join(key[1:]), '', '', '')) + '/' 51 | 52 | # rewrite cite, href and src attributes using this base 53 | def rebase(self,parent): 54 | uri = None 55 | if parent.hasAttribute('cite'): uri=parent.getAttribute('cite') 56 | if parent.hasAttribute('href'): uri=parent.getAttribute('href') 57 | if parent.hasAttribute('src'): uri=parent.getAttribute('src') 58 | if uri and uri.startswith(self.base): 59 | uri = uri[len(self.base):] or '.' 60 | if parent.hasAttribute('href'): uri=parent.setAttribute('href', uri) 61 | if parent.hasAttribute('src'): uri=parent.setAttribute('src', uri) 62 | 63 | for node in parent.childNodes: 64 | if node.nodeType == Node.ELEMENT_NODE: 65 | self.rebase(node) 66 | 67 | # convert type="html" to type="plain" or type="xhtml" as appropriate 68 | def retype(parent): 69 | for node in parent.childNodes: 70 | if node.nodeType == Node.ELEMENT_NODE: 71 | 72 | if node.hasAttribute('type') and node.getAttribute('type') == 'html': 73 | if len(node.childNodes)==0: 74 | node.removeAttribute('type') 75 | elif len(node.childNodes)==1: 76 | 77 | # replace html entity defs with utf-8 78 | chunks=re.split('&(\w+);', node.childNodes[0].nodeValue) 79 | for i in range(1,len(chunks),2): 80 | if chunks[i] in ['amp', 'lt', 'gt', 'apos', 'quot']: 81 | chunks[i] ='&' + chunks[i] +';' 82 | elif chunks[i] in name2codepoint: 83 | chunks[i]=unichr(name2codepoint[chunks[i]]) 84 | else: 85 | chunks[i]='&' + chunks[i] + ';' 86 | text = u"".join(chunks) 87 | 88 | try: 89 | # see if the resulting text is a well-formed XML fragment 90 | div = '
%s
' 91 | data = minidom.parseString((div % text.encode('utf-8'))) 92 | 93 | if text.find('<') < 0: 94 | # plain text 95 | node.removeAttribute('type') 96 | text = data.documentElement.childNodes[0].nodeValue 97 | node.childNodes[0].replaceWholeText(text) 98 | 99 | elif len(text) > 80: 100 | # xhtml 101 | node.setAttribute('type', 'xhtml') 102 | node.removeChild(node.childNodes[0]) 103 | node.appendChild(data.documentElement) 104 | 105 | except ExpatError: 106 | # leave as html 107 | pass 108 | 109 | else: 110 | # recurse 111 | retype(node) 112 | 113 | if parent.nodeName == 'entry': 114 | relativize(parent) 115 | 116 | if __name__ == '__main__': 117 | 118 | # run styler on each file mention on the command line 119 | import sys 120 | for feed in sys.argv[1:]: 121 | doc = minidom.parse(feed) 122 | doc.normalize() 123 | retype(doc.documentElement) 124 | open(feed,'w').write(doc.toxml('utf-8')) 125 | -------------------------------------------------------------------------------- /code/planet.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | """The Planet aggregator. 3 | 4 | A flexible and easy-to-use aggregator for generating websites. 5 | 6 | Visit http://www.planetplanet.org/ for more information and to download 7 | the latest version. 8 | 9 | Requires Python 2.1, recommends 2.3. 10 | """ 11 | 12 | __authors__ = [ "Scott James Remnant ", 13 | "Jeff Waugh " ] 14 | __license__ = "Python" 15 | 16 | 17 | import os 18 | import sys 19 | import time 20 | import locale 21 | import socket 22 | import urlparse 23 | 24 | import planet 25 | 26 | from ConfigParser import ConfigParser 27 | 28 | # Default configuration file path 29 | CONFIG_FILE = "config.ini" 30 | 31 | # Defaults for the [Planet] config section 32 | PLANET_NAME = "Unconfigured Planet" 33 | PLANET_LINK = "Unconfigured Planet" 34 | PLANET_FEED = None 35 | OWNER_NAME = "Anonymous Coward" 36 | OWNER_EMAIL = "" 37 | LOG_LEVEL = "WARNING" 38 | FEED_TIMEOUT = 20 # seconds 39 | 40 | # Default template file list 41 | TEMPLATE_FILES = "examples/basic/planet.html.tmpl" 42 | 43 | 44 | 45 | def config_get(config, section, option, default=None, raw=0, vars=None): 46 | """Get a value from the configuration, with a default.""" 47 | if config.has_option(section, option): 48 | return config.get(section, option, raw=raw, vars=None) 49 | else: 50 | return default 51 | 52 | def main(): 53 | config_file = CONFIG_FILE 54 | offline = 0 55 | verbose = 0 56 | 57 | for arg in sys.argv[1:]: 58 | if arg == "-h" or arg == "--help": 59 | print "Usage: planet [options] [CONFIGFILE]" 60 | print 61 | print "Options:" 62 | print " -v, --verbose DEBUG level logging during update" 63 | print " -o, --offline Update the Planet from the cache only" 64 | print " -h, --help Display this help message and exit" 65 | print 66 | sys.exit(0) 67 | elif arg == "-v" or arg == "--verbose": 68 | verbose = 1 69 | elif arg == "-o" or arg == "--offline": 70 | offline = 1 71 | elif arg.startswith("-"): 72 | print >>sys.stderr, "Unknown option:", arg 73 | sys.exit(1) 74 | else: 75 | config_file = arg 76 | 77 | # Read the configuration file 78 | config = ConfigParser() 79 | config.read(config_file) 80 | if not config.has_section("Planet"): 81 | print >>sys.stderr, "Configuration missing [Planet] section." 82 | sys.exit(1) 83 | 84 | # Read the [Planet] config section 85 | planet_name = config_get(config, "Planet", "name", PLANET_NAME) 86 | planet_link = config_get(config, "Planet", "link", PLANET_LINK) 87 | planet_feed = config_get(config, "Planet", "feed", PLANET_FEED) 88 | owner_name = config_get(config, "Planet", "owner_name", OWNER_NAME) 89 | owner_email = config_get(config, "Planet", "owner_email", OWNER_EMAIL) 90 | if verbose: 91 | log_level = "DEBUG" 92 | else: 93 | log_level = config_get(config, "Planet", "log_level", LOG_LEVEL) 94 | feed_timeout = config_get(config, "Planet", "feed_timeout", FEED_TIMEOUT) 95 | template_files = config_get(config, "Planet", "template_files", 96 | TEMPLATE_FILES).split(" ") 97 | 98 | # Default feed to the first feed for which there is a template 99 | if not planet_feed: 100 | for template_file in template_files: 101 | name = os.path.splitext(os.path.basename(template_file))[0] 102 | if name.find('atom')>=0 or name.find('rss')>=0: 103 | planet_feed = urlparse.urljoin(planet_link, name) 104 | break 105 | 106 | # Define locale 107 | if config.has_option("Planet", "locale"): 108 | # The user can specify more than one locale (separated by ":") as 109 | # fallbacks. 110 | locale_ok = False 111 | for user_locale in config.get("Planet", "locale").split(':'): 112 | user_locale = user_locale.strip() 113 | try: 114 | locale.setlocale(locale.LC_ALL, user_locale) 115 | except locale.Error: 116 | pass 117 | else: 118 | locale_ok = True 119 | break 120 | if not locale_ok: 121 | print >>sys.stderr, "Unsupported locale setting." 122 | sys.exit(1) 123 | 124 | # Activate logging 125 | planet.logging.basicConfig() 126 | planet.logging.getLogger().setLevel(planet.logging.getLevelName(log_level)) 127 | log = planet.logging.getLogger("planet.runner") 128 | try: 129 | log.warning 130 | except: 131 | log.warning = log.warn 132 | 133 | if feed_timeout: 134 | try: 135 | feed_timeout = float(feed_timeout) 136 | except: 137 | log.warning("Feed timeout set to invalid value '%s', skipping", feed_timeout) 138 | feed_timeout = None 139 | 140 | if feed_timeout and not offline: 141 | socket.setdefaulttimeout(feed_timeout) 142 | log.debug("Socket timeout set to %d seconds", feed_timeout) 143 | 144 | # run the planet 145 | my_planet = planet.Planet(config) 146 | my_planet.run(planet_name, planet_link, template_files, offline) 147 | 148 | my_planet.generate_all_files(template_files, planet_name, 149 | planet_link, planet_feed, owner_name, owner_email) 150 | 151 | 152 | if __name__ == "__main__": 153 | main() 154 | 155 | -------------------------------------------------------------------------------- /config/titles_only.html.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | <TMPL_VAR name> 6 | 8 | 9 | 10 | 11 | 13 | 14 | 16 | 17 | 19 | 20 | 21 | 22 | 23 |

24 | 26 |

27 | 28 |
skip to navigation
29 |
skip to content
30 | 31 |
32 |
33 | 34 |

35 | 36 |

Last update: 37 | 38 | 39 | 40 | ### Visually distinguish articles from different days by checking for 41 | ### the new_date flag. This demonstrates the ... 42 | ### check. 43 | 44 | 45 |

46 | 47 | 48 | ### Group consecutive articles by the same author together by checking 49 | ### for the new_channel flag. 50 | 51 | 52 |

" title="">

53 |
54 | 55 | 56 | 57 |

">

58 |
59 |

60 | ">by at 61 |

62 | 63 | 64 |
65 |
66 | 67 |
68 | 130 |
131 | 132 | 133 | -------------------------------------------------------------------------------- /config/index.html.tmpl: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | <TMPL_VAR name> 6 | 8 | 9 | 10 | 11 | 13 | 14 | 16 | 17 | 19 | 30 | 31 | 32 | 33 | 34 |

35 | 37 |

38 | 39 |
skip to navigation
40 |
skip to content
41 | 42 |
43 |
44 | 45 |

46 | 47 |

Last update: 48 | 49 | 50 | 51 | ### Visually distinguish articles from different days by checking for 52 | ### the new_date flag. This demonstrates the ... 53 | ### check. 54 | 55 | 56 |

57 | 58 | 59 | ### Group consecutive articles by the same author together by checking 60 | ### for the new_channel flag. 61 | 62 | 63 |

" title="">

64 |
65 | 66 | 67 | 68 |

">

69 |
70 |

71 | 72 |

73 |

74 | ">by at 75 |

76 | 77 | 78 |
79 |
80 | 81 |
82 | 144 |
145 | 146 | 147 | -------------------------------------------------------------------------------- /code/planet-cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | """Planet cache tool. 4 | 5 | """ 6 | 7 | __authors__ = [ "Scott James Remnant ", 8 | "Jeff Waugh " ] 9 | __license__ = "Python" 10 | 11 | 12 | import os 13 | import sys 14 | import time 15 | import dbhash 16 | import ConfigParser 17 | 18 | import planet 19 | 20 | 21 | def usage(): 22 | print "Usage: planet-cache [options] CACHEFILE [ITEMID]..." 23 | print 24 | print "Examine and modify information in the Planet cache." 25 | print 26 | print "Channel Commands:" 27 | print " -C, --channel Display known information on the channel" 28 | print " -L, --list List items in the channel" 29 | print " -K, --keys List all keys found in channel items" 30 | print 31 | print "Item Commands (need ITEMID):" 32 | print " -I, --item Display known information about the item(s)" 33 | print " -H, --hide Mark the item(s) as hidden" 34 | print " -U, --unhide Mark the item(s) as not hidden" 35 | print 36 | print "Other Options:" 37 | print " -h, --help Display this help message and exit" 38 | sys.exit(0) 39 | 40 | def usage_error(msg, *args): 41 | print >>sys.stderr, msg, " ".join(args) 42 | print >>sys.stderr, "Perhaps you need --help ?" 43 | sys.exit(1) 44 | 45 | def print_keys(item, title): 46 | keys = item.keys() 47 | keys.sort() 48 | key_len = max([ len(k) for k in keys ]) 49 | 50 | print title + ":" 51 | for key in keys: 52 | if item.key_type(key) == item.DATE: 53 | value = time.strftime(planet.TIMEFMT_ISO, item[key]) 54 | else: 55 | value = str(item[key]) 56 | print " %-*s %s" % (key_len, key, fit_str(value, 74 - key_len)) 57 | 58 | def fit_str(string, length): 59 | if len(string) <= length: 60 | return string 61 | else: 62 | return string[:length-4] + " ..." 63 | 64 | 65 | if __name__ == "__main__": 66 | cache_file = None 67 | want_ids = 0 68 | ids = [] 69 | 70 | command = None 71 | 72 | for arg in sys.argv[1:]: 73 | if arg == "-h" or arg == "--help": 74 | usage() 75 | elif arg == "-C" or arg == "--channel": 76 | if command is not None: 77 | usage_error("Only one command option may be supplied") 78 | command = "channel" 79 | elif arg == "-L" or arg == "--list": 80 | if command is not None: 81 | usage_error("Only one command option may be supplied") 82 | command = "list" 83 | elif arg == "-K" or arg == "--keys": 84 | if command is not None: 85 | usage_error("Only one command option may be supplied") 86 | command = "keys" 87 | elif arg == "-I" or arg == "--item": 88 | if command is not None: 89 | usage_error("Only one command option may be supplied") 90 | command = "item" 91 | want_ids = 1 92 | elif arg == "-H" or arg == "--hide": 93 | if command is not None: 94 | usage_error("Only one command option may be supplied") 95 | command = "hide" 96 | want_ids = 1 97 | elif arg == "-U" or arg == "--unhide": 98 | if command is not None: 99 | usage_error("Only one command option may be supplied") 100 | command = "unhide" 101 | want_ids = 1 102 | elif arg.startswith("-"): 103 | usage_error("Unknown option:", arg) 104 | else: 105 | if cache_file is None: 106 | cache_file = arg 107 | elif want_ids: 108 | ids.append(arg) 109 | else: 110 | usage_error("Unexpected extra argument:", arg) 111 | 112 | if cache_file is None: 113 | usage_error("Missing expected cache filename") 114 | elif want_ids and not len(ids): 115 | usage_error("Missing expected entry ids") 116 | 117 | # Open the cache file directly to get the URL it represents 118 | try: 119 | db = dbhash.open(cache_file) 120 | url = db["url"] 121 | db.close() 122 | except dbhash.bsddb._db.DBError, e: 123 | print >>sys.stderr, cache_file + ":", e.args[1] 124 | sys.exit(1) 125 | except KeyError: 126 | print >>sys.stderr, cache_file + ": Probably not a cache file" 127 | sys.exit(1) 128 | 129 | # Now do it the right way :-) 130 | my_planet = planet.Planet(ConfigParser.ConfigParser()) 131 | my_planet.cache_directory = os.path.dirname(cache_file) 132 | channel = planet.Channel(my_planet, url) 133 | 134 | for item_id in ids: 135 | if not channel.has_item(item_id): 136 | print >>sys.stderr, item_id + ": Not in channel" 137 | sys.exit(1) 138 | 139 | # Do the user's bidding 140 | if command == "channel": 141 | print_keys(channel, "Channel Keys") 142 | 143 | elif command == "item": 144 | for item_id in ids: 145 | item = channel.get_item(item_id) 146 | print_keys(item, "Item Keys for %s" % item_id) 147 | 148 | elif command == "list": 149 | print "Items in Channel:" 150 | for item in channel.items(hidden=1, sorted=1): 151 | print " " + item.id 152 | print " " + time.strftime(planet.TIMEFMT_ISO, item.date) 153 | if hasattr(item, "title"): 154 | print " " + fit_str(item.title, 70) 155 | if hasattr(item, "hidden"): 156 | print " (hidden)" 157 | 158 | elif command == "keys": 159 | keys = {} 160 | for item in channel.items(): 161 | for key in item.keys(): 162 | keys[key] = 1 163 | 164 | keys = keys.keys() 165 | keys.sort() 166 | 167 | print "Keys used in Channel:" 168 | for key in keys: 169 | print " " + key 170 | print 171 | 172 | print "Use --item to output values of particular items." 173 | 174 | elif command == "hide": 175 | for item_id in ids: 176 | item = channel.get_item(item_id) 177 | if hasattr(item, "hidden"): 178 | print item_id + ": Already hidden." 179 | else: 180 | item.hidden = "yes" 181 | 182 | channel.cache_write() 183 | print "Done." 184 | 185 | elif command == "unhide": 186 | for item_id in ids: 187 | item = channel.get_item(item_id) 188 | if hasattr(item, "hidden"): 189 | del(item.hidden) 190 | else: 191 | print item_id + ": Not hidden." 192 | 193 | channel.cache_write() 194 | print "Done." 195 | -------------------------------------------------------------------------------- /code/planet/cache.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | """Item cache. 4 | 5 | Between runs of Planet we need somewhere to store the feed information 6 | we parsed, this is so we don't lose information when a particular feed 7 | goes away or is too short to hold enough items. 8 | 9 | This module provides the code to handle this cache transparently enough 10 | that the rest of the code can take the persistance for granted. 11 | """ 12 | 13 | import os 14 | import re 15 | 16 | 17 | # Regular expressions to sanitise cache filenames 18 | re_url_scheme = re.compile(r'^[^:]*://') 19 | re_slash = re.compile(r'[?/]+') 20 | re_initial_cruft = re.compile(r'^[,.]*') 21 | re_final_cruft = re.compile(r'[,.]*$') 22 | 23 | 24 | class CachedInfo: 25 | """Cached information. 26 | 27 | This class is designed to hold information that is stored in a cache 28 | between instances. It can act both as a dictionary (c['foo']) and 29 | as an object (c.foo) to get and set values and supports both string 30 | and date values. 31 | 32 | If you wish to support special fields you can derive a class off this 33 | and implement get_FIELD and set_FIELD functions which will be 34 | automatically called. 35 | """ 36 | STRING = "string" 37 | DATE = "date" 38 | NULL = "null" 39 | 40 | def __init__(self, cache, id_, root=0): 41 | self._type = {} 42 | self._value = {} 43 | self._cached = {} 44 | 45 | self._cache = cache 46 | self._id = id_.replace(" ", "%20") 47 | self._root = root 48 | 49 | def cache_key(self, key): 50 | """Return the cache key name for the given key.""" 51 | key = key.replace(" ", "_") 52 | if self._root: 53 | return key 54 | else: 55 | return self._id + " " + key 56 | 57 | def cache_read(self): 58 | """Read information from the cache.""" 59 | if self._root: 60 | keys_key = " keys" 61 | else: 62 | keys_key = self._id 63 | 64 | if self._cache.has_key(keys_key): 65 | keys = self._cache[keys_key].split(" ") 66 | else: 67 | return 68 | 69 | for key in keys: 70 | cache_key = self.cache_key(key) 71 | if not self._cached.has_key(key) or self._cached[key]: 72 | # Key either hasn't been loaded, or is one for the cache 73 | self._value[key] = self._cache[cache_key] 74 | self._type[key] = self._cache[cache_key + " type"] 75 | self._cached[key] = 1 76 | 77 | def cache_write(self, sync=1): 78 | """Write information to the cache.""" 79 | self.cache_clear(sync=0) 80 | 81 | keys = [] 82 | for key in self.keys(): 83 | cache_key = self.cache_key(key) 84 | if not self._cached[key]: 85 | if self._cache.has_key(cache_key): 86 | # Non-cached keys need to be cleared 87 | del(self._cache[cache_key]) 88 | del(self._cache[cache_key + " type"]) 89 | continue 90 | 91 | keys.append(key) 92 | self._cache[cache_key] = self._value[key] 93 | self._cache[cache_key + " type"] = self._type[key] 94 | 95 | if self._root: 96 | keys_key = " keys" 97 | else: 98 | keys_key = self._id 99 | 100 | self._cache[keys_key] = " ".join(keys) 101 | if sync: 102 | self._cache.sync() 103 | 104 | def cache_clear(self, sync=1): 105 | """Remove information from the cache.""" 106 | if self._root: 107 | keys_key = " keys" 108 | else: 109 | keys_key = self._id 110 | 111 | if self._cache.has_key(keys_key): 112 | keys = self._cache[keys_key].split(" ") 113 | del(self._cache[keys_key]) 114 | else: 115 | return 116 | 117 | for key in keys: 118 | cache_key = self.cache_key(key) 119 | del(self._cache[cache_key]) 120 | del(self._cache[cache_key + " type"]) 121 | 122 | if sync: 123 | self._cache.sync() 124 | 125 | def has_key(self, key): 126 | """Check whether the key exists.""" 127 | key = key.replace(" ", "_") 128 | return self._value.has_key(key) 129 | 130 | def key_type(self, key): 131 | """Return the key type.""" 132 | key = key.replace(" ", "_") 133 | return self._type[key] 134 | 135 | def set(self, key, value, cached=1): 136 | """Set the value of the given key. 137 | 138 | If a set_KEY function exists that is called otherwise the 139 | string function is called and the date function if that fails 140 | (it nearly always will). 141 | """ 142 | key = key.replace(" ", "_") 143 | 144 | try: 145 | func = getattr(self, "set_" + key) 146 | except AttributeError: 147 | pass 148 | else: 149 | return func(key, value) 150 | 151 | if value == None: 152 | return self.set_as_null(key, value) 153 | else: 154 | try: 155 | return self.set_as_string(key, value) 156 | except TypeError: 157 | return self.set_as_date(key, value) 158 | 159 | def get(self, key): 160 | """Return the value of the given key. 161 | 162 | If a get_KEY function exists that is called otherwise the 163 | correctly typed function is called if that exists. 164 | """ 165 | key = key.replace(" ", "_") 166 | 167 | try: 168 | func = getattr(self, "get_" + key) 169 | except AttributeError: 170 | pass 171 | else: 172 | return func(key) 173 | 174 | try: 175 | func = getattr(self, "get_as_" + self._type[key]) 176 | except AttributeError: 177 | pass 178 | else: 179 | return func(key) 180 | 181 | return self._value[key] 182 | 183 | def set_as_string(self, key, value, cached=1): 184 | """Set the key to the string value. 185 | 186 | The value is converted to UTF-8 if it is a Unicode string, otherwise 187 | it's assumed to have failed decoding (feedparser tries pretty hard) 188 | so has all non-ASCII characters stripped. 189 | """ 190 | value = utf8(value) 191 | 192 | key = key.replace(" ", "_") 193 | self._value[key] = value 194 | self._type[key] = self.STRING 195 | self._cached[key] = cached 196 | 197 | def get_as_string(self, key): 198 | """Return the key as a string value.""" 199 | key = key.replace(" ", "_") 200 | if not self.has_key(key): 201 | raise KeyError, key 202 | 203 | return self._value[key] 204 | 205 | def set_as_date(self, key, value, cached=1): 206 | """Set the key to the date value. 207 | 208 | The date should be a 9-item tuple as returned by time.gmtime(). 209 | """ 210 | value = " ".join([ str(s) for s in value ]) 211 | 212 | key = key.replace(" ", "_") 213 | self._value[key] = value 214 | self._type[key] = self.DATE 215 | self._cached[key] = cached 216 | 217 | def get_as_date(self, key): 218 | """Return the key as a date value.""" 219 | key = key.replace(" ", "_") 220 | if not self.has_key(key): 221 | raise KeyError, key 222 | 223 | value = self._value[key] 224 | return tuple([ int(i) for i in value.split(" ") ]) 225 | 226 | def set_as_null(self, key, value, cached=1): 227 | """Set the key to the null value. 228 | 229 | This only exists to make things less magic. 230 | """ 231 | key = key.replace(" ", "_") 232 | self._value[key] = "" 233 | self._type[key] = self.NULL 234 | self._cached[key] = cached 235 | 236 | def get_as_null(self, key): 237 | """Return the key as the null value.""" 238 | key = key.replace(" ", "_") 239 | if not self.has_key(key): 240 | raise KeyError, key 241 | 242 | return None 243 | 244 | def del_key(self, key): 245 | """Delete the given key.""" 246 | key = key.replace(" ", "_") 247 | if not self.has_key(key): 248 | raise KeyError, key 249 | 250 | del(self._value[key]) 251 | del(self._type[key]) 252 | del(self._cached[key]) 253 | 254 | def keys(self): 255 | """Return the list of cached keys.""" 256 | return self._value.keys() 257 | 258 | def __iter__(self): 259 | """Iterate the cached keys.""" 260 | return iter(self._value.keys()) 261 | 262 | # Special methods 263 | __contains__ = has_key 264 | __setitem__ = set_as_string 265 | __getitem__ = get 266 | __delitem__ = del_key 267 | __delattr__ = del_key 268 | 269 | def __setattr__(self, key, value): 270 | if key.startswith("_"): 271 | self.__dict__[key] = value 272 | else: 273 | self.set(key, value) 274 | 275 | def __getattr__(self, key): 276 | if self.has_key(key): 277 | return self.get(key) 278 | else: 279 | raise AttributeError, key 280 | 281 | 282 | def filename(directory, filename): 283 | """Return a filename suitable for the cache. 284 | 285 | Strips dangerous and common characters to create a filename we 286 | can use to store the cache in. 287 | """ 288 | filename = re_url_scheme.sub("", filename) 289 | filename = re_slash.sub(",", filename) 290 | filename = re_initial_cruft.sub("", filename) 291 | filename = re_final_cruft.sub("", filename) 292 | 293 | return os.path.join(directory, filename) 294 | 295 | def utf8(value): 296 | """Return the value as a UTF-8 string.""" 297 | if type(value) == type(u''): 298 | return value.encode("utf-8") 299 | else: 300 | try: 301 | return unicode(value, "utf-8").encode("utf-8") 302 | except UnicodeError: 303 | try: 304 | return unicode(value, "iso-8859-1").encode("utf-8") 305 | except UnicodeError: 306 | return unicode(value, "ascii", "replace").encode("utf-8") 307 | -------------------------------------------------------------------------------- /code/planet/compat_logging/config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2001-2002 by Vinay Sajip. All Rights Reserved. 2 | # 3 | # Permission to use, copy, modify, and distribute this software and its 4 | # documentation for any purpose and without fee is hereby granted, 5 | # provided that the above copyright notice appear in all copies and that 6 | # both that copyright notice and this permission notice appear in 7 | # supporting documentation, and that the name of Vinay Sajip 8 | # not be used in advertising or publicity pertaining to distribution 9 | # of the software without specific, written prior permission. 10 | # VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING 11 | # ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL 12 | # VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR 13 | # ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER 14 | # IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT 15 | # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. 16 | 17 | """ 18 | Logging package for Python. Based on PEP 282 and comments thereto in 19 | comp.lang.python, and influenced by Apache's log4j system. 20 | 21 | Should work under Python versions >= 1.5.2, except that source line 22 | information is not available unless 'inspect' is. 23 | 24 | Copyright (C) 2001-2002 Vinay Sajip. All Rights Reserved. 25 | 26 | To use, simply 'import logging' and log away! 27 | """ 28 | 29 | import sys, logging, logging.handlers, string, thread, threading, socket, struct, os 30 | 31 | from SocketServer import ThreadingTCPServer, StreamRequestHandler 32 | 33 | 34 | DEFAULT_LOGGING_CONFIG_PORT = 9030 35 | if sys.platform == "win32": 36 | RESET_ERROR = 10054 #WSAECONNRESET 37 | else: 38 | RESET_ERROR = 104 #ECONNRESET 39 | 40 | # 41 | # The following code implements a socket listener for on-the-fly 42 | # reconfiguration of logging. 43 | # 44 | # _listener holds the server object doing the listening 45 | _listener = None 46 | 47 | def fileConfig(fname, defaults=None): 48 | """ 49 | Read the logging configuration from a ConfigParser-format file. 50 | 51 | This can be called several times from an application, allowing an end user 52 | the ability to select from various pre-canned configurations (if the 53 | developer provides a mechanism to present the choices and load the chosen 54 | configuration). 55 | In versions of ConfigParser which have the readfp method [typically 56 | shipped in 2.x versions of Python], you can pass in a file-like object 57 | rather than a filename, in which case the file-like object will be read 58 | using readfp. 59 | """ 60 | import ConfigParser 61 | 62 | cp = ConfigParser.ConfigParser(defaults) 63 | if hasattr(cp, 'readfp') and hasattr(fname, 'readline'): 64 | cp.readfp(fname) 65 | else: 66 | cp.read(fname) 67 | #first, do the formatters... 68 | flist = cp.get("formatters", "keys") 69 | if len(flist): 70 | flist = string.split(flist, ",") 71 | formatters = {} 72 | for form in flist: 73 | sectname = "formatter_%s" % form 74 | opts = cp.options(sectname) 75 | if "format" in opts: 76 | fs = cp.get(sectname, "format", 1) 77 | else: 78 | fs = None 79 | if "datefmt" in opts: 80 | dfs = cp.get(sectname, "datefmt", 1) 81 | else: 82 | dfs = None 83 | f = logging.Formatter(fs, dfs) 84 | formatters[form] = f 85 | #next, do the handlers... 86 | #critical section... 87 | logging._acquireLock() 88 | try: 89 | try: 90 | #first, lose the existing handlers... 91 | logging._handlers.clear() 92 | #now set up the new ones... 93 | hlist = cp.get("handlers", "keys") 94 | if len(hlist): 95 | hlist = string.split(hlist, ",") 96 | handlers = {} 97 | fixups = [] #for inter-handler references 98 | for hand in hlist: 99 | sectname = "handler_%s" % hand 100 | klass = cp.get(sectname, "class") 101 | opts = cp.options(sectname) 102 | if "formatter" in opts: 103 | fmt = cp.get(sectname, "formatter") 104 | else: 105 | fmt = "" 106 | klass = eval(klass, vars(logging)) 107 | args = cp.get(sectname, "args") 108 | args = eval(args, vars(logging)) 109 | h = apply(klass, args) 110 | if "level" in opts: 111 | level = cp.get(sectname, "level") 112 | h.setLevel(logging._levelNames[level]) 113 | if len(fmt): 114 | h.setFormatter(formatters[fmt]) 115 | #temporary hack for FileHandler and MemoryHandler. 116 | if klass == logging.handlers.MemoryHandler: 117 | if "target" in opts: 118 | target = cp.get(sectname,"target") 119 | else: 120 | target = "" 121 | if len(target): #the target handler may not be loaded yet, so keep for later... 122 | fixups.append((h, target)) 123 | handlers[hand] = h 124 | #now all handlers are loaded, fixup inter-handler references... 125 | for fixup in fixups: 126 | h = fixup[0] 127 | t = fixup[1] 128 | h.setTarget(handlers[t]) 129 | #at last, the loggers...first the root... 130 | llist = cp.get("loggers", "keys") 131 | llist = string.split(llist, ",") 132 | llist.remove("root") 133 | sectname = "logger_root" 134 | root = logging.root 135 | log = root 136 | opts = cp.options(sectname) 137 | if "level" in opts: 138 | level = cp.get(sectname, "level") 139 | log.setLevel(logging._levelNames[level]) 140 | for h in root.handlers[:]: 141 | root.removeHandler(h) 142 | hlist = cp.get(sectname, "handlers") 143 | if len(hlist): 144 | hlist = string.split(hlist, ",") 145 | for hand in hlist: 146 | log.addHandler(handlers[hand]) 147 | #and now the others... 148 | #we don't want to lose the existing loggers, 149 | #since other threads may have pointers to them. 150 | #existing is set to contain all existing loggers, 151 | #and as we go through the new configuration we 152 | #remove any which are configured. At the end, 153 | #what's left in existing is the set of loggers 154 | #which were in the previous configuration but 155 | #which are not in the new configuration. 156 | existing = root.manager.loggerDict.keys() 157 | #now set up the new ones... 158 | for log in llist: 159 | sectname = "logger_%s" % log 160 | qn = cp.get(sectname, "qualname") 161 | opts = cp.options(sectname) 162 | if "propagate" in opts: 163 | propagate = cp.getint(sectname, "propagate") 164 | else: 165 | propagate = 1 166 | logger = logging.getLogger(qn) 167 | if qn in existing: 168 | existing.remove(qn) 169 | if "level" in opts: 170 | level = cp.get(sectname, "level") 171 | logger.setLevel(logging._levelNames[level]) 172 | for h in logger.handlers[:]: 173 | logger.removeHandler(h) 174 | logger.propagate = propagate 175 | logger.disabled = 0 176 | hlist = cp.get(sectname, "handlers") 177 | if len(hlist): 178 | hlist = string.split(hlist, ",") 179 | for hand in hlist: 180 | logger.addHandler(handlers[hand]) 181 | #Disable any old loggers. There's no point deleting 182 | #them as other threads may continue to hold references 183 | #and by disabling them, you stop them doing any logging. 184 | for log in existing: 185 | root.manager.loggerDict[log].disabled = 1 186 | except: 187 | import traceback 188 | ei = sys.exc_info() 189 | traceback.print_exception(ei[0], ei[1], ei[2], None, sys.stderr) 190 | del ei 191 | finally: 192 | logging._releaseLock() 193 | 194 | def listen(port=DEFAULT_LOGGING_CONFIG_PORT): 195 | """ 196 | Start up a socket server on the specified port, and listen for new 197 | configurations. 198 | 199 | These will be sent as a file suitable for processing by fileConfig(). 200 | Returns a Thread object on which you can call start() to start the server, 201 | and which you can join() when appropriate. To stop the server, call 202 | stopListening(). 203 | """ 204 | if not thread: 205 | raise NotImplementedError, "listen() needs threading to work" 206 | 207 | class ConfigStreamHandler(StreamRequestHandler): 208 | """ 209 | Handler for a logging configuration request. 210 | 211 | It expects a completely new logging configuration and uses fileConfig 212 | to install it. 213 | """ 214 | def handle(self): 215 | """ 216 | Handle a request. 217 | 218 | Each request is expected to be a 4-byte length, 219 | followed by the config file. Uses fileConfig() to do the 220 | grunt work. 221 | """ 222 | import tempfile 223 | try: 224 | conn = self.connection 225 | chunk = conn.recv(4) 226 | if len(chunk) == 4: 227 | slen = struct.unpack(">L", chunk)[0] 228 | chunk = self.connection.recv(slen) 229 | while len(chunk) < slen: 230 | chunk = chunk + conn.recv(slen - len(chunk)) 231 | #Apply new configuration. We'd like to be able to 232 | #create a StringIO and pass that in, but unfortunately 233 | #1.5.2 ConfigParser does not support reading file 234 | #objects, only actual files. So we create a temporary 235 | #file and remove it later. 236 | file = tempfile.mktemp(".ini") 237 | f = open(file, "w") 238 | f.write(chunk) 239 | f.close() 240 | fileConfig(file) 241 | os.remove(file) 242 | except socket.error, e: 243 | if type(e.args) != types.TupleType: 244 | raise 245 | else: 246 | errcode = e.args[0] 247 | if errcode != RESET_ERROR: 248 | raise 249 | 250 | class ConfigSocketReceiver(ThreadingTCPServer): 251 | """ 252 | A simple TCP socket-based logging config receiver. 253 | """ 254 | 255 | allow_reuse_address = 1 256 | 257 | def __init__(self, host='localhost', port=DEFAULT_LOGGING_CONFIG_PORT, 258 | handler=None): 259 | ThreadingTCPServer.__init__(self, (host, port), handler) 260 | logging._acquireLock() 261 | self.abort = 0 262 | logging._releaseLock() 263 | self.timeout = 1 264 | 265 | def serve_until_stopped(self): 266 | import select 267 | abort = 0 268 | while not abort: 269 | rd, wr, ex = select.select([self.socket.fileno()], 270 | [], [], 271 | self.timeout) 272 | if rd: 273 | self.handle_request() 274 | logging._acquireLock() 275 | abort = self.abort 276 | logging._releaseLock() 277 | 278 | def serve(rcvr, hdlr, port): 279 | server = rcvr(port=port, handler=hdlr) 280 | global _listener 281 | logging._acquireLock() 282 | _listener = server 283 | logging._releaseLock() 284 | server.serve_until_stopped() 285 | 286 | return threading.Thread(target=serve, 287 | args=(ConfigSocketReceiver, 288 | ConfigStreamHandler, port)) 289 | 290 | def stopListening(): 291 | """ 292 | Stop the listening server which was created with a call to listen(). 293 | """ 294 | global _listener 295 | if _listener: 296 | logging._acquireLock() 297 | _listener.abort = 1 298 | _listener = None 299 | logging._releaseLock() 300 | -------------------------------------------------------------------------------- /code/planet/sanitize.py: -------------------------------------------------------------------------------- 1 | """ 2 | sanitize: bringing sanitiy to world of messed-up data 3 | """ 4 | 5 | __author__ = ["Mark Pilgrim ", 6 | "Aaron Swartz "] 7 | __contributors__ = ["Sam Ruby "] 8 | __license__ = "BSD" 9 | __version__ = "0.25" 10 | 11 | _debug = 0 12 | 13 | # If you want sanitize to automatically run HTML markup through HTML Tidy, set 14 | # this to 1. Requires mxTidy 15 | # or utidylib . 16 | TIDY_MARKUP = 0 17 | 18 | # List of Python interfaces for HTML Tidy, in order of preference. Only useful 19 | # if TIDY_MARKUP = 1 20 | PREFERRED_TIDY_INTERFACES = ["uTidy", "mxTidy"] 21 | 22 | import sgmllib, re 23 | 24 | # chardet library auto-detects character encodings 25 | # Download from http://chardet.feedparser.org/ 26 | try: 27 | import chardet 28 | if _debug: 29 | import chardet.constants 30 | chardet.constants._debug = 1 31 | 32 | _chardet = lambda data: chardet.detect(data)['encoding'] 33 | except: 34 | chardet = None 35 | _chardet = lambda data: None 36 | 37 | class _BaseHTMLProcessor(sgmllib.SGMLParser): 38 | elements_no_end_tag = ['area', 'base', 'basefont', 'br', 'col', 'frame', 'hr', 39 | 'img', 'input', 'isindex', 'link', 'meta', 'param'] 40 | 41 | _r_barebang = re.compile(r'') 44 | 45 | def __init__(self, encoding): 46 | self.encoding = encoding 47 | if _debug: sys.stderr.write('entering BaseHTMLProcessor, encoding=%s\n' % self.encoding) 48 | sgmllib.SGMLParser.__init__(self) 49 | 50 | def reset(self): 51 | self.pieces = [] 52 | sgmllib.SGMLParser.reset(self) 53 | 54 | def _shorttag_replace(self, match): 55 | tag = match.group(1) 56 | if tag in self.elements_no_end_tag: 57 | return '<' + tag + ' />' 58 | else: 59 | return '<' + tag + '>' 60 | 61 | def feed(self, data): 62 | data = self._r_barebang.sub(r'<!\1', data) 63 | data = self._r_bareamp.sub("&", data) 64 | data = self._r_shorttag.sub(self._shorttag_replace, data) 65 | if self.encoding and type(data) == type(u''): 66 | data = data.encode(self.encoding) 67 | sgmllib.SGMLParser.feed(self, data) 68 | 69 | def normalize_attrs(self, attrs): 70 | # utility method to be called by descendants 71 | attrs = [(k.lower(), v) for k, v in attrs] 72 | attrs = [(k, k in ('rel', 'type') and v.lower() or v) for k, v in attrs] 73 | return attrs 74 | 75 | def unknown_starttag(self, tag, attrs): 76 | # called for each start tag 77 | # attrs is a list of (attr, value) tuples 78 | # e.g. for
, tag='pre', attrs=[('class', 'screen')]
 79 |         if _debug: sys.stderr.write('_BaseHTMLProcessor, unknown_starttag, tag=%s\n' % tag)
 80 |         uattrs = []
 81 |         # thanks to Kevin Marks for this breathtaking hack to deal with (valid) high-bit attribute values in UTF-8 feeds
 82 |         for key, value in attrs:
 83 |             if type(value) != type(u''):
 84 |                 value = unicode(value, self.encoding)
 85 |             uattrs.append((unicode(key, self.encoding), value))
 86 |         strattrs = u''.join([u' %s="%s"' % (key, value) for key, value in uattrs]).encode(self.encoding)
 87 |         if tag in self.elements_no_end_tag:
 88 |             self.pieces.append('<%(tag)s%(strattrs)s />' % locals())
 89 |         else:
 90 |             self.pieces.append('<%(tag)s%(strattrs)s>' % locals())
 91 | 
 92 |     def unknown_endtag(self, tag):
 93 |         # called for each end tag, e.g. for 
, tag will be 'pre' 94 | # Reconstruct the original end tag. 95 | if tag not in self.elements_no_end_tag: 96 | self.pieces.append("" % locals()) 97 | 98 | def handle_charref(self, ref): 99 | # called for each character reference, e.g. for ' ', ref will be '160' 100 | # Reconstruct the original character reference. 101 | self.pieces.append('&#%(ref)s;' % locals()) 102 | 103 | def handle_entityref(self, ref): 104 | # called for each entity reference, e.g. for '©', ref will be 'copy' 105 | # Reconstruct the original entity reference. 106 | self.pieces.append('&%(ref)s;' % locals()) 107 | 108 | def handle_data(self, text): 109 | # called for each block of plain text, i.e. outside of any tag and 110 | # not containing any character or entity references 111 | # Store the original text verbatim. 112 | if _debug: sys.stderr.write('_BaseHTMLProcessor, handle_text, text=%s\n' % text) 113 | self.pieces.append(text) 114 | 115 | def handle_comment(self, text): 116 | # called for each HTML comment, e.g. 117 | # Reconstruct the original comment. 118 | self.pieces.append('' % locals()) 119 | 120 | def handle_pi(self, text): 121 | # called for each processing instruction, e.g. 122 | # Reconstruct original processing instruction. 123 | self.pieces.append('' % locals()) 124 | 125 | def handle_decl(self, text): 126 | # called for the DOCTYPE, if present, e.g. 127 | # 129 | # Reconstruct original DOCTYPE 130 | self.pieces.append('' % locals()) 131 | 132 | _new_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9:]*\s*').match 133 | def _scan_name(self, i, declstartpos): 134 | rawdata = self.rawdata 135 | n = len(rawdata) 136 | if i == n: 137 | return None, -1 138 | m = self._new_declname_match(rawdata, i) 139 | if m: 140 | s = m.group() 141 | name = s.strip() 142 | if (i + len(s)) == n: 143 | return None, -1 # end of buffer 144 | return name.lower(), m.end() 145 | else: 146 | self.handle_data(rawdata) 147 | # self.updatepos(declstartpos, i) 148 | return None, -1 149 | 150 | def output(self): 151 | '''Return processed HTML as a single string''' 152 | return ''.join([str(p) for p in self.pieces]) 153 | 154 | class _HTMLSanitizer(_BaseHTMLProcessor): 155 | acceptable_elements = ['a', 'abbr', 'acronym', 'address', 'area', 'b', 'big', 156 | 'blockquote', 'br', 'button', 'caption', 'center', 'cite', 'code', 'col', 157 | 'colgroup', 'dd', 'del', 'dfn', 'dir', 'div', 'dl', 'dt', 'em', 'fieldset', 158 | 'font', 'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'hr', 'i', 'img', 'input', 159 | 'ins', 'kbd', 'label', 'legend', 'li', 'map', 'menu', 'ol', 'optgroup', 160 | 'option', 'p', 'pre', 'q', 's', 'samp', 'select', 'small', 'span', 'strike', 161 | 'strong', 'sub', 'sup', 'table', 'textarea', 'tbody', 'td', 'tfoot', 'th', 162 | 'thead', 'tr', 'tt', 'u', 'ul', 'var'] 163 | 164 | acceptable_attributes = ['abbr', 'accept', 'accept-charset', 'accesskey', 165 | 'action', 'align', 'alt', 'axis', 'border', 'cellpadding', 'cellspacing', 166 | 'char', 'charoff', 'charset', 'checked', 'cite', 'class', 'clear', 'cols', 167 | 'colspan', 'color', 'compact', 'coords', 'datetime', 'dir', 'disabled', 168 | 'enctype', 'for', 'frame', 'headers', 'height', 'href', 'hreflang', 'hspace', 169 | 'id', 'ismap', 'label', 'lang', 'longdesc', 'maxlength', 'media', 'method', 170 | 'multiple', 'name', 'nohref', 'noshade', 'nowrap', 'prompt', 'readonly', 171 | 'rel', 'rev', 'rows', 'rowspan', 'rules', 'scope', 'selected', 'shape', 'size', 172 | 'span', 'src', 'start', 'summary', 'tabindex', 'target', 'title', 'type', 173 | 'usemap', 'valign', 'value', 'vspace', 'width'] 174 | 175 | ignorable_elements = ['script', 'applet', 'style'] 176 | 177 | def reset(self): 178 | _BaseHTMLProcessor.reset(self) 179 | self.tag_stack = [] 180 | self.ignore_level = 0 181 | 182 | def feed(self, data): 183 | _BaseHTMLProcessor.feed(self, data) 184 | while self.tag_stack: 185 | _BaseHTMLProcessor.unknown_endtag(self, self.tag_stack.pop()) 186 | 187 | def unknown_starttag(self, tag, attrs): 188 | if tag in self.ignorable_elements: 189 | self.ignore_level += 1 190 | return 191 | 192 | if self.ignore_level: 193 | return 194 | 195 | if tag in self.acceptable_elements: 196 | attrs = self.normalize_attrs(attrs) 197 | attrs = [(key, value) for key, value in attrs if key in self.acceptable_attributes] 198 | if tag not in self.elements_no_end_tag: 199 | self.tag_stack.append(tag) 200 | _BaseHTMLProcessor.unknown_starttag(self, tag, attrs) 201 | 202 | def unknown_endtag(self, tag): 203 | if tag in self.ignorable_elements: 204 | self.ignore_level -= 1 205 | return 206 | 207 | if self.ignore_level: 208 | return 209 | 210 | if tag in self.acceptable_elements and tag not in self.elements_no_end_tag: 211 | match = False 212 | while self.tag_stack: 213 | top = self.tag_stack.pop() 214 | if top == tag: 215 | match = True 216 | break 217 | _BaseHTMLProcessor.unknown_endtag(self, top) 218 | 219 | if match: 220 | _BaseHTMLProcessor.unknown_endtag(self, tag) 221 | 222 | def handle_pi(self, text): 223 | pass 224 | 225 | def handle_decl(self, text): 226 | pass 227 | 228 | def handle_data(self, text): 229 | if not self.ignore_level: 230 | text = text.replace('<', '') 231 | _BaseHTMLProcessor.handle_data(self, text) 232 | 233 | def HTML(htmlSource, encoding='utf8'): 234 | p = _HTMLSanitizer(encoding) 235 | p.feed(htmlSource) 236 | data = p.output() 237 | if TIDY_MARKUP: 238 | # loop through list of preferred Tidy interfaces looking for one that's installed, 239 | # then set up a common _tidy function to wrap the interface-specific API. 240 | _tidy = None 241 | for tidy_interface in PREFERRED_TIDY_INTERFACES: 242 | try: 243 | if tidy_interface == "uTidy": 244 | from tidy import parseString as _utidy 245 | def _tidy(data, **kwargs): 246 | return str(_utidy(data, **kwargs)) 247 | break 248 | elif tidy_interface == "mxTidy": 249 | from mx.Tidy import Tidy as _mxtidy 250 | def _tidy(data, **kwargs): 251 | nerrors, nwarnings, data, errordata = _mxtidy.tidy(data, **kwargs) 252 | return data 253 | break 254 | except: 255 | pass 256 | if _tidy: 257 | utf8 = type(data) == type(u'') 258 | if utf8: 259 | data = data.encode('utf-8') 260 | data = _tidy(data, output_xhtml=1, numeric_entities=1, wrap=0, char_encoding="utf8") 261 | if utf8: 262 | data = unicode(data, 'utf-8') 263 | if data.count(''): 266 | data = data.split('>', 1)[1] 267 | if data.count('= 1.5.2, except that source line 22 | information is not available unless 'inspect' is. 23 | 24 | Copyright (C) 2001-2002 Vinay Sajip. All Rights Reserved. 25 | 26 | To use, simply 'import logging' and log away! 27 | """ 28 | 29 | import sys, logging, socket, types, os, string, cPickle, struct, time 30 | 31 | from SocketServer import ThreadingTCPServer, StreamRequestHandler 32 | 33 | # 34 | # Some constants... 35 | # 36 | 37 | DEFAULT_TCP_LOGGING_PORT = 9020 38 | DEFAULT_UDP_LOGGING_PORT = 9021 39 | DEFAULT_HTTP_LOGGING_PORT = 9022 40 | DEFAULT_SOAP_LOGGING_PORT = 9023 41 | SYSLOG_UDP_PORT = 514 42 | 43 | 44 | class RotatingFileHandler(logging.FileHandler): 45 | def __init__(self, filename, mode="a", maxBytes=0, backupCount=0): 46 | """ 47 | Open the specified file and use it as the stream for logging. 48 | 49 | By default, the file grows indefinitely. You can specify particular 50 | values of maxBytes and backupCount to allow the file to rollover at 51 | a predetermined size. 52 | 53 | Rollover occurs whenever the current log file is nearly maxBytes in 54 | length. If backupCount is >= 1, the system will successively create 55 | new files with the same pathname as the base file, but with extensions 56 | ".1", ".2" etc. appended to it. For example, with a backupCount of 5 57 | and a base file name of "app.log", you would get "app.log", 58 | "app.log.1", "app.log.2", ... through to "app.log.5". The file being 59 | written to is always "app.log" - when it gets filled up, it is closed 60 | and renamed to "app.log.1", and if files "app.log.1", "app.log.2" etc. 61 | exist, then they are renamed to "app.log.2", "app.log.3" etc. 62 | respectively. 63 | 64 | If maxBytes is zero, rollover never occurs. 65 | """ 66 | logging.FileHandler.__init__(self, filename, mode) 67 | self.maxBytes = maxBytes 68 | self.backupCount = backupCount 69 | if maxBytes > 0: 70 | self.mode = "a" 71 | 72 | def doRollover(self): 73 | """ 74 | Do a rollover, as described in __init__(). 75 | """ 76 | 77 | self.stream.close() 78 | if self.backupCount > 0: 79 | for i in range(self.backupCount - 1, 0, -1): 80 | sfn = "%s.%d" % (self.baseFilename, i) 81 | dfn = "%s.%d" % (self.baseFilename, i + 1) 82 | if os.path.exists(sfn): 83 | #print "%s -> %s" % (sfn, dfn) 84 | if os.path.exists(dfn): 85 | os.remove(dfn) 86 | os.rename(sfn, dfn) 87 | dfn = self.baseFilename + ".1" 88 | if os.path.exists(dfn): 89 | os.remove(dfn) 90 | os.rename(self.baseFilename, dfn) 91 | #print "%s -> %s" % (self.baseFilename, dfn) 92 | self.stream = open(self.baseFilename, "w") 93 | 94 | def emit(self, record): 95 | """ 96 | Emit a record. 97 | 98 | Output the record to the file, catering for rollover as described 99 | in doRollover(). 100 | """ 101 | if self.maxBytes > 0: # are we rolling over? 102 | msg = "%s\n" % self.format(record) 103 | self.stream.seek(0, 2) #due to non-posix-compliant Windows feature 104 | if self.stream.tell() + len(msg) >= self.maxBytes: 105 | self.doRollover() 106 | logging.FileHandler.emit(self, record) 107 | 108 | 109 | class SocketHandler(logging.Handler): 110 | """ 111 | A handler class which writes logging records, in pickle format, to 112 | a streaming socket. The socket is kept open across logging calls. 113 | If the peer resets it, an attempt is made to reconnect on the next call. 114 | The pickle which is sent is that of the LogRecord's attribute dictionary 115 | (__dict__), so that the receiver does not need to have the logging module 116 | installed in order to process the logging event. 117 | 118 | To unpickle the record at the receiving end into a LogRecord, use the 119 | makeLogRecord function. 120 | """ 121 | 122 | def __init__(self, host, port): 123 | """ 124 | Initializes the handler with a specific host address and port. 125 | 126 | The attribute 'closeOnError' is set to 1 - which means that if 127 | a socket error occurs, the socket is silently closed and then 128 | reopened on the next logging call. 129 | """ 130 | logging.Handler.__init__(self) 131 | self.host = host 132 | self.port = port 133 | self.sock = None 134 | self.closeOnError = 0 135 | 136 | def makeSocket(self): 137 | """ 138 | A factory method which allows subclasses to define the precise 139 | type of socket they want. 140 | """ 141 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 142 | s.connect((self.host, self.port)) 143 | return s 144 | 145 | def send(self, s): 146 | """ 147 | Send a pickled string to the socket. 148 | 149 | This function allows for partial sends which can happen when the 150 | network is busy. 151 | """ 152 | if hasattr(self.sock, "sendall"): 153 | self.sock.sendall(s) 154 | else: 155 | sentsofar = 0 156 | left = len(s) 157 | while left > 0: 158 | sent = self.sock.send(s[sentsofar:]) 159 | sentsofar = sentsofar + sent 160 | left = left - sent 161 | 162 | def makePickle(self, record): 163 | """ 164 | Pickles the record in binary format with a length prefix, and 165 | returns it ready for transmission across the socket. 166 | """ 167 | s = cPickle.dumps(record.__dict__, 1) 168 | #n = len(s) 169 | #slen = "%c%c" % ((n >> 8) & 0xFF, n & 0xFF) 170 | slen = struct.pack(">L", len(s)) 171 | return slen + s 172 | 173 | def handleError(self, record): 174 | """ 175 | Handle an error during logging. 176 | 177 | An error has occurred during logging. Most likely cause - 178 | connection lost. Close the socket so that we can retry on the 179 | next event. 180 | """ 181 | if self.closeOnError and self.sock: 182 | self.sock.close() 183 | self.sock = None #try to reconnect next time 184 | else: 185 | logging.Handler.handleError(self, record) 186 | 187 | def emit(self, record): 188 | """ 189 | Emit a record. 190 | 191 | Pickles the record and writes it to the socket in binary format. 192 | If there is an error with the socket, silently drop the packet. 193 | If there was a problem with the socket, re-establishes the 194 | socket. 195 | """ 196 | try: 197 | s = self.makePickle(record) 198 | if not self.sock: 199 | self.sock = self.makeSocket() 200 | self.send(s) 201 | except: 202 | self.handleError(record) 203 | 204 | def close(self): 205 | """ 206 | Closes the socket. 207 | """ 208 | if self.sock: 209 | self.sock.close() 210 | self.sock = None 211 | 212 | class DatagramHandler(SocketHandler): 213 | """ 214 | A handler class which writes logging records, in pickle format, to 215 | a datagram socket. The pickle which is sent is that of the LogRecord's 216 | attribute dictionary (__dict__), so that the receiver does not need to 217 | have the logging module installed in order to process the logging event. 218 | 219 | To unpickle the record at the receiving end into a LogRecord, use the 220 | makeLogRecord function. 221 | 222 | """ 223 | def __init__(self, host, port): 224 | """ 225 | Initializes the handler with a specific host address and port. 226 | """ 227 | SocketHandler.__init__(self, host, port) 228 | self.closeOnError = 0 229 | 230 | def makeSocket(self): 231 | """ 232 | The factory method of SocketHandler is here overridden to create 233 | a UDP socket (SOCK_DGRAM). 234 | """ 235 | s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 236 | return s 237 | 238 | def send(self, s): 239 | """ 240 | Send a pickled string to a socket. 241 | 242 | This function no longer allows for partial sends which can happen 243 | when the network is busy - UDP does not guarantee delivery and 244 | can deliver packets out of sequence. 245 | """ 246 | self.sock.sendto(s, (self.host, self.port)) 247 | 248 | class SysLogHandler(logging.Handler): 249 | """ 250 | A handler class which sends formatted logging records to a syslog 251 | server. Based on Sam Rushing's syslog module: 252 | http://www.nightmare.com/squirl/python-ext/misc/syslog.py 253 | Contributed by Nicolas Untz (after which minor refactoring changes 254 | have been made). 255 | """ 256 | 257 | # from : 258 | # ====================================================================== 259 | # priorities/facilities are encoded into a single 32-bit quantity, where 260 | # the bottom 3 bits are the priority (0-7) and the top 28 bits are the 261 | # facility (0-big number). Both the priorities and the facilities map 262 | # roughly one-to-one to strings in the syslogd(8) source code. This 263 | # mapping is included in this file. 264 | # 265 | # priorities (these are ordered) 266 | 267 | LOG_EMERG = 0 # system is unusable 268 | LOG_ALERT = 1 # action must be taken immediately 269 | LOG_CRIT = 2 # critical conditions 270 | LOG_ERR = 3 # error conditions 271 | LOG_WARNING = 4 # warning conditions 272 | LOG_NOTICE = 5 # normal but significant condition 273 | LOG_INFO = 6 # informational 274 | LOG_DEBUG = 7 # debug-level messages 275 | 276 | # facility codes 277 | LOG_KERN = 0 # kernel messages 278 | LOG_USER = 1 # random user-level messages 279 | LOG_MAIL = 2 # mail system 280 | LOG_DAEMON = 3 # system daemons 281 | LOG_AUTH = 4 # security/authorization messages 282 | LOG_SYSLOG = 5 # messages generated internally by syslogd 283 | LOG_LPR = 6 # line printer subsystem 284 | LOG_NEWS = 7 # network news subsystem 285 | LOG_UUCP = 8 # UUCP subsystem 286 | LOG_CRON = 9 # clock daemon 287 | LOG_AUTHPRIV = 10 # security/authorization messages (private) 288 | 289 | # other codes through 15 reserved for system use 290 | LOG_LOCAL0 = 16 # reserved for local use 291 | LOG_LOCAL1 = 17 # reserved for local use 292 | LOG_LOCAL2 = 18 # reserved for local use 293 | LOG_LOCAL3 = 19 # reserved for local use 294 | LOG_LOCAL4 = 20 # reserved for local use 295 | LOG_LOCAL5 = 21 # reserved for local use 296 | LOG_LOCAL6 = 22 # reserved for local use 297 | LOG_LOCAL7 = 23 # reserved for local use 298 | 299 | priority_names = { 300 | "alert": LOG_ALERT, 301 | "crit": LOG_CRIT, 302 | "critical": LOG_CRIT, 303 | "debug": LOG_DEBUG, 304 | "emerg": LOG_EMERG, 305 | "err": LOG_ERR, 306 | "error": LOG_ERR, # DEPRECATED 307 | "info": LOG_INFO, 308 | "notice": LOG_NOTICE, 309 | "panic": LOG_EMERG, # DEPRECATED 310 | "warn": LOG_WARNING, # DEPRECATED 311 | "warning": LOG_WARNING, 312 | } 313 | 314 | facility_names = { 315 | "auth": LOG_AUTH, 316 | "authpriv": LOG_AUTHPRIV, 317 | "cron": LOG_CRON, 318 | "daemon": LOG_DAEMON, 319 | "kern": LOG_KERN, 320 | "lpr": LOG_LPR, 321 | "mail": LOG_MAIL, 322 | "news": LOG_NEWS, 323 | "security": LOG_AUTH, # DEPRECATED 324 | "syslog": LOG_SYSLOG, 325 | "user": LOG_USER, 326 | "uucp": LOG_UUCP, 327 | "local0": LOG_LOCAL0, 328 | "local1": LOG_LOCAL1, 329 | "local2": LOG_LOCAL2, 330 | "local3": LOG_LOCAL3, 331 | "local4": LOG_LOCAL4, 332 | "local5": LOG_LOCAL5, 333 | "local6": LOG_LOCAL6, 334 | "local7": LOG_LOCAL7, 335 | } 336 | 337 | def __init__(self, address=('localhost', SYSLOG_UDP_PORT), facility=LOG_USER): 338 | """ 339 | Initialize a handler. 340 | 341 | If address is specified as a string, UNIX socket is used. 342 | If facility is not specified, LOG_USER is used. 343 | """ 344 | logging.Handler.__init__(self) 345 | 346 | self.address = address 347 | self.facility = facility 348 | if type(address) == types.StringType: 349 | self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_DGRAM) 350 | # syslog may require either DGRAM or STREAM sockets 351 | try: 352 | self.socket.connect(address) 353 | except socket.error: 354 | self.socket.close() 355 | self.socket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM) 356 | self.socket.connect(address) 357 | self.unixsocket = 1 358 | else: 359 | self.socket = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) 360 | self.unixsocket = 0 361 | 362 | self.formatter = None 363 | 364 | # curious: when talking to the unix-domain '/dev/log' socket, a 365 | # zero-terminator seems to be required. this string is placed 366 | # into a class variable so that it can be overridden if 367 | # necessary. 368 | log_format_string = '<%d>%s\000' 369 | 370 | def encodePriority (self, facility, priority): 371 | """ 372 | Encode the facility and priority. You can pass in strings or 373 | integers - if strings are passed, the facility_names and 374 | priority_names mapping dictionaries are used to convert them to 375 | integers. 376 | """ 377 | if type(facility) == types.StringType: 378 | facility = self.facility_names[facility] 379 | if type(priority) == types.StringType: 380 | priority = self.priority_names[priority] 381 | return (facility << 3) | priority 382 | 383 | def close (self): 384 | """ 385 | Closes the socket. 386 | """ 387 | if self.unixsocket: 388 | self.socket.close() 389 | 390 | def emit(self, record): 391 | """ 392 | Emit a record. 393 | 394 | The record is formatted, and then sent to the syslog server. If 395 | exception information is present, it is NOT sent to the server. 396 | """ 397 | msg = self.format(record) 398 | """ 399 | We need to convert record level to lowercase, maybe this will 400 | change in the future. 401 | """ 402 | msg = self.log_format_string % ( 403 | self.encodePriority(self.facility, 404 | string.lower(record.levelname)), 405 | msg) 406 | try: 407 | if self.unixsocket: 408 | self.socket.send(msg) 409 | else: 410 | self.socket.sendto(msg, self.address) 411 | except: 412 | self.handleError(record) 413 | 414 | class SMTPHandler(logging.Handler): 415 | """ 416 | A handler class which sends an SMTP email for each logging event. 417 | """ 418 | def __init__(self, mailhost, fromaddr, toaddrs, subject): 419 | """ 420 | Initialize the handler. 421 | 422 | Initialize the instance with the from and to addresses and subject 423 | line of the email. To specify a non-standard SMTP port, use the 424 | (host, port) tuple format for the mailhost argument. 425 | """ 426 | logging.Handler.__init__(self) 427 | if type(mailhost) == types.TupleType: 428 | host, port = mailhost 429 | self.mailhost = host 430 | self.mailport = port 431 | else: 432 | self.mailhost = mailhost 433 | self.mailport = None 434 | self.fromaddr = fromaddr 435 | if type(toaddrs) == types.StringType: 436 | toaddrs = [toaddrs] 437 | self.toaddrs = toaddrs 438 | self.subject = subject 439 | 440 | def getSubject(self, record): 441 | """ 442 | Determine the subject for the email. 443 | 444 | If you want to specify a subject line which is record-dependent, 445 | override this method. 446 | """ 447 | return self.subject 448 | 449 | weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'] 450 | 451 | monthname = [None, 452 | 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 453 | 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'] 454 | 455 | def date_time(self): 456 | """Return the current date and time formatted for a MIME header.""" 457 | year, month, day, hh, mm, ss, wd, y, z = time.gmtime(time.time()) 458 | s = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % ( 459 | self.weekdayname[wd], 460 | day, self.monthname[month], year, 461 | hh, mm, ss) 462 | return s 463 | 464 | def emit(self, record): 465 | """ 466 | Emit a record. 467 | 468 | Format the record and send it to the specified addressees. 469 | """ 470 | try: 471 | import smtplib 472 | port = self.mailport 473 | if not port: 474 | port = smtplib.SMTP_PORT 475 | smtp = smtplib.SMTP(self.mailhost, port) 476 | msg = self.format(record) 477 | msg = "From: %s\r\nTo: %s\r\nSubject: %s\r\nDate: %s\r\n\r\n%s" % ( 478 | self.fromaddr, 479 | string.join(self.toaddrs, ","), 480 | self.getSubject(record), 481 | self.date_time(), msg) 482 | smtp.sendmail(self.fromaddr, self.toaddrs, msg) 483 | smtp.quit() 484 | except: 485 | self.handleError(record) 486 | 487 | class NTEventLogHandler(logging.Handler): 488 | """ 489 | A handler class which sends events to the NT Event Log. Adds a 490 | registry entry for the specified application name. If no dllname is 491 | provided, win32service.pyd (which contains some basic message 492 | placeholders) is used. Note that use of these placeholders will make 493 | your event logs big, as the entire message source is held in the log. 494 | If you want slimmer logs, you have to pass in the name of your own DLL 495 | which contains the message definitions you want to use in the event log. 496 | """ 497 | def __init__(self, appname, dllname=None, logtype="Application"): 498 | logging.Handler.__init__(self) 499 | try: 500 | import win32evtlogutil, win32evtlog 501 | self.appname = appname 502 | self._welu = win32evtlogutil 503 | if not dllname: 504 | dllname = os.path.split(self._welu.__file__) 505 | dllname = os.path.split(dllname[0]) 506 | dllname = os.path.join(dllname[0], r'win32service.pyd') 507 | self.dllname = dllname 508 | self.logtype = logtype 509 | self._welu.AddSourceToRegistry(appname, dllname, logtype) 510 | self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE 511 | self.typemap = { 512 | logging.DEBUG : win32evtlog.EVENTLOG_INFORMATION_TYPE, 513 | logging.INFO : win32evtlog.EVENTLOG_INFORMATION_TYPE, 514 | logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE, 515 | logging.ERROR : win32evtlog.EVENTLOG_ERROR_TYPE, 516 | logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE, 517 | } 518 | except ImportError: 519 | print "The Python Win32 extensions for NT (service, event "\ 520 | "logging) appear not to be available." 521 | self._welu = None 522 | 523 | def getMessageID(self, record): 524 | """ 525 | Return the message ID for the event record. If you are using your 526 | own messages, you could do this by having the msg passed to the 527 | logger being an ID rather than a formatting string. Then, in here, 528 | you could use a dictionary lookup to get the message ID. This 529 | version returns 1, which is the base message ID in win32service.pyd. 530 | """ 531 | return 1 532 | 533 | def getEventCategory(self, record): 534 | """ 535 | Return the event category for the record. 536 | 537 | Override this if you want to specify your own categories. This version 538 | returns 0. 539 | """ 540 | return 0 541 | 542 | def getEventType(self, record): 543 | """ 544 | Return the event type for the record. 545 | 546 | Override this if you want to specify your own types. This version does 547 | a mapping using the handler's typemap attribute, which is set up in 548 | __init__() to a dictionary which contains mappings for DEBUG, INFO, 549 | WARNING, ERROR and CRITICAL. If you are using your own levels you will 550 | either need to override this method or place a suitable dictionary in 551 | the handler's typemap attribute. 552 | """ 553 | return self.typemap.get(record.levelno, self.deftype) 554 | 555 | def emit(self, record): 556 | """ 557 | Emit a record. 558 | 559 | Determine the message ID, event category and event type. Then 560 | log the message in the NT event log. 561 | """ 562 | if self._welu: 563 | try: 564 | id = self.getMessageID(record) 565 | cat = self.getEventCategory(record) 566 | type = self.getEventType(record) 567 | msg = self.format(record) 568 | self._welu.ReportEvent(self.appname, id, cat, type, [msg]) 569 | except: 570 | self.handleError(record) 571 | 572 | def close(self): 573 | """ 574 | Clean up this handler. 575 | 576 | You can remove the application name from the registry as a 577 | source of event log entries. However, if you do this, you will 578 | not be able to see the events as you intended in the Event Log 579 | Viewer - it needs to be able to access the registry to get the 580 | DLL name. 581 | """ 582 | #self._welu.RemoveSourceFromRegistry(self.appname, self.logtype) 583 | pass 584 | 585 | class HTTPHandler(logging.Handler): 586 | """ 587 | A class which sends records to a Web server, using either GET or 588 | POST semantics. 589 | """ 590 | def __init__(self, host, url, method="GET"): 591 | """ 592 | Initialize the instance with the host, the request URL, and the method 593 | ("GET" or "POST") 594 | """ 595 | logging.Handler.__init__(self) 596 | method = string.upper(method) 597 | if method not in ["GET", "POST"]: 598 | raise ValueError, "method must be GET or POST" 599 | self.host = host 600 | self.url = url 601 | self.method = method 602 | 603 | def mapLogRecord(self, record): 604 | """ 605 | Default implementation of mapping the log record into a dict 606 | that is send as the CGI data. Overwrite in your class. 607 | Contributed by Franz Glasner. 608 | """ 609 | return record.__dict__ 610 | 611 | def emit(self, record): 612 | """ 613 | Emit a record. 614 | 615 | Send the record to the Web server as an URL-encoded dictionary 616 | """ 617 | try: 618 | import httplib, urllib 619 | h = httplib.HTTP(self.host) 620 | url = self.url 621 | data = urllib.urlencode(self.mapLogRecord(record)) 622 | if self.method == "GET": 623 | if (string.find(url, '?') >= 0): 624 | sep = '&' 625 | else: 626 | sep = '?' 627 | url = url + "%c%s" % (sep, data) 628 | h.putrequest(self.method, url) 629 | if self.method == "POST": 630 | h.putheader("Content-length", str(len(data))) 631 | h.endheaders() 632 | if self.method == "POST": 633 | h.send(data) 634 | h.getreply() #can't do anything with the result 635 | except: 636 | self.handleError(record) 637 | 638 | class BufferingHandler(logging.Handler): 639 | """ 640 | A handler class which buffers logging records in memory. Whenever each 641 | record is added to the buffer, a check is made to see if the buffer should 642 | be flushed. If it should, then flush() is expected to do what's needed. 643 | """ 644 | def __init__(self, capacity): 645 | """ 646 | Initialize the handler with the buffer size. 647 | """ 648 | logging.Handler.__init__(self) 649 | self.capacity = capacity 650 | self.buffer = [] 651 | 652 | def shouldFlush(self, record): 653 | """ 654 | Should the handler flush its buffer? 655 | 656 | Returns true if the buffer is up to capacity. This method can be 657 | overridden to implement custom flushing strategies. 658 | """ 659 | return (len(self.buffer) >= self.capacity) 660 | 661 | def emit(self, record): 662 | """ 663 | Emit a record. 664 | 665 | Append the record. If shouldFlush() tells us to, call flush() to process 666 | the buffer. 667 | """ 668 | self.buffer.append(record) 669 | if self.shouldFlush(record): 670 | self.flush() 671 | 672 | def flush(self): 673 | """ 674 | Override to implement custom flushing behaviour. 675 | 676 | This version just zaps the buffer to empty. 677 | """ 678 | self.buffer = [] 679 | 680 | class MemoryHandler(BufferingHandler): 681 | """ 682 | A handler class which buffers logging records in memory, periodically 683 | flushing them to a target handler. Flushing occurs whenever the buffer 684 | is full, or when an event of a certain severity or greater is seen. 685 | """ 686 | def __init__(self, capacity, flushLevel=logging.ERROR, target=None): 687 | """ 688 | Initialize the handler with the buffer size, the level at which 689 | flushing should occur and an optional target. 690 | 691 | Note that without a target being set either here or via setTarget(), 692 | a MemoryHandler is no use to anyone! 693 | """ 694 | BufferingHandler.__init__(self, capacity) 695 | self.flushLevel = flushLevel 696 | self.target = target 697 | 698 | def shouldFlush(self, record): 699 | """ 700 | Check for buffer full or a record at the flushLevel or higher. 701 | """ 702 | return (len(self.buffer) >= self.capacity) or \ 703 | (record.levelno >= self.flushLevel) 704 | 705 | def setTarget(self, target): 706 | """ 707 | Set the target handler for this handler. 708 | """ 709 | self.target = target 710 | 711 | def flush(self): 712 | """ 713 | For a MemoryHandler, flushing means just sending the buffered 714 | records to the target, if there is one. Override if you want 715 | different behaviour. 716 | """ 717 | if self.target: 718 | for record in self.buffer: 719 | self.target.handle(record) 720 | self.buffer = [] 721 | 722 | def close(self): 723 | """ 724 | Flush, set the target to None and lose the buffer. 725 | """ 726 | self.flush() 727 | self.target = None 728 | self.buffer = [] 729 | -------------------------------------------------------------------------------- /static/styles/styles.css: -------------------------------------------------------------------------------- 1 | /* Main Styles for HTML Elements */ 2 | HTML, BODY 3 | { 4 | margin: 0; 5 | padding: 0; 6 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 7 | font-size: 103%; 8 | /* possibly both of these should be removed */ 9 | color: #000; 10 | background-color: #FFF; 11 | } 12 | 13 | IMG 14 | { 15 | border: 0; 16 | } 17 | 18 | H1,H2,H3,H4,H5 { 19 | font-family: Georgia, "Bitstream Vera Serif", "New York", Palatino, serif; 20 | font-weight:normal; 21 | line-height: 1em; 22 | } 23 | 24 | H1 25 | { 26 | font-size: 160%; 27 | color: #234764; 28 | margin: 0.7em 0 0.7em 0; 29 | text-decoration: none; 30 | } 31 | 32 | H1 A { 33 | color: #234764; 34 | } 35 | 36 | #intro H1 { 37 | font-size:145%; 38 | } 39 | H2 40 | { 41 | font-size: 140%; 42 | 43 | color: #366D9C; 44 | margin: 0.7em 0 0.7em 0; 45 | } 46 | 47 | H3 48 | { 49 | font-size: 135%; 50 | font-style:italic; 51 | color: #366D9C; 52 | margin: 0.4em 0 0.0em 0; 53 | } 54 | 55 | H4 56 | { 57 | font-size: 125%; 58 | color: #366D9C; 59 | margin: 0.4em 0 0.0em 0; 60 | } 61 | 62 | /* Logo */ 63 | #logoheader 64 | { 65 | border: 0; 66 | margin: 0; 67 | padding: 1px; 68 | z-index: 1; 69 | background-color:#F7F7F7; 70 | background-repeat: repeat-x; 71 | border-bottom: 1px solid #999999; 72 | height:84px; 73 | } 74 | 75 | #logo 76 | { 77 | width: 211px; 78 | height:71px; 79 | margin-top: 10px; 80 | margin-left: 3%; 81 | } 82 | 83 | 84 | 85 | /* main content section */ 86 | #content-body 87 | { 88 | position: absolute; 89 | left: 0; 90 | top: 63px; 91 | width: 93.9%; 92 | z-index: 0; 93 | font-size:75%; 94 | margin-left:3.0%; 95 | min-width:660px; 96 | } 97 | 98 | 99 | #body-main 100 | { 101 | 102 | padding: 0 0.55em 40px 0.0em; 103 | line-height: 1.4em; 104 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 105 | margin-left: 19em; 106 | } 107 | 108 | #body-main { 109 | font-size:100%; 110 | } 111 | 112 | 113 | /* end subnav */ 114 | /* Left Hand Navigation */ 115 | #left-hand-navigation 116 | { 117 | position: absolute; 118 | left: 3%; 119 | z-index: 1; 120 | top: 110px; 121 | } 122 | 123 | #menu 124 | { 125 | 126 | padding:0; 127 | margin-bottom: 5px; 128 | width: 16em; 129 | font-size:75%; 130 | } 131 | 132 | #menu ul 133 | { 134 | list-style: none; 135 | margin: 0; 136 | padding: 0; 137 | border: 0; 138 | } 139 | 140 | #menu li 141 | { 142 | display:inline; 143 | } 144 | 145 | #menu ul.level-one a 146 | { 147 | display: block; 148 | border: 1px solid #DADADA; 149 | padding: 2px 2px 2px 4px; 150 | margin: 0 0 4px 1.4em; 151 | width: 12em !important; 152 | width /**/: 13.4em; 153 | font-family: Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 154 | color: #4B5A6A; 155 | background-image: url(../images/nav-off-bg.png); 156 | background-position: top right; 157 | background-repeat: no-repeat; 158 | background-color:#F5F5F5; 159 | text-transform: uppercase 160 | 161 | } 162 | 163 | /*\*//*/ 164 | #menu ul.level-one a 165 | { 166 | width: 13.4em !important; 167 | } 168 | /**/ 169 | 170 | #menu ul.level-one a:hover 171 | { 172 | color:black; 173 | text-decoration:underline; 174 | } 175 | 176 | #menu ul.level-one a.selected 177 | { 178 | background-image: url(../images/nav-on-bg.png); 179 | background-color: #FFFFFF; 180 | color:black; 181 | border-left:3px solid #FFDB4C; 182 | } 183 | 184 | #menu ul.level-two li:first-child a 185 | { 186 | border-top:0; 187 | } 188 | 189 | #menu ul.level-two a 190 | { 191 | 192 | background-image: none; 193 | background-color: transparent; 194 | display: block; 195 | border: 0; 196 | border-top: 1px solid #DDD; 197 | padding: 0.1em; 198 | margin: 0 3em 0px 1.5em; 199 | color: #3C4B7B; 200 | background: none; 201 | width: 11em !important; 202 | width /**/: 11.2em; 203 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 204 | text-transform: none; 205 | } 206 | 207 | #menu ul.level-two a:hover 208 | { 209 | text-decoration: underline; 210 | color: black; 211 | } 212 | 213 | 214 | #menu ul.level-two a:visited 215 | { 216 | color: #4C3B5B; 217 | } 218 | 219 | #menu ul.level-one ul.level-two a.selected 220 | { 221 | background-image: url(../images/blank.gif); 222 | background-color: #FFFFFF; 223 | color:#000; 224 | border-left:0; 225 | 226 | font-weight:bold; 227 | } 228 | 229 | #menu li ul 230 | { 231 | margin-bottom: 7px 232 | } 233 | 234 | #menu a 235 | { 236 | text-decoration: none; 237 | } 238 | 239 | #menu ul.level-three a 240 | { 241 | 242 | display: block; 243 | border: 0; 244 | padding: 0.1em; 245 | margin: 0 3em 0px 1.8em; 246 | padding-left:1em; 247 | color: #5E72A5; 248 | background-image: none; 249 | width: 10em !important; 250 | width /**/: 11.4em; 251 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 252 | font-size: 95%; 253 | } 254 | 255 | #menu ul.level-three a:hover 256 | { 257 | text-decoration: underline; 258 | color: black; 259 | } 260 | 261 | #menu ul.level-three li.selected a.selected 262 | { 263 | 264 | background-image: url(../images/bullet.gif); 265 | background-repeat: no-repeat; 266 | background-position: center left; 267 | color:#000; 268 | font-weight:normal; 269 | } 270 | 271 | #menu ul.level-three 272 | { 273 | margin-top:5px; 274 | } 275 | 276 | #left-hand-navigation h4 277 | { 278 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 279 | font-size: 77%; 280 | color: #4C5B6B; 281 | padding: 0.4em 0 0 1.5em; 282 | margin: 0.2em 0 0.3em 0; 283 | } 284 | 285 | #left-hand-navigation h4 a 286 | { 287 | color:#4C5B6B; 288 | text-decoration:none; 289 | font-weight:bold; 290 | } 291 | 292 | #left-hand-navigation h4 a:hover 293 | { 294 | color:black; 295 | text-decoration:underline; 296 | } 297 | 298 | .calendar 299 | { 300 | align:center; 301 | padding-top: 0.0em; 302 | padding-left: 0em; 303 | font-size:75%; 304 | } 305 | 306 | #body-main a:link 307 | { 308 | color: #00A; 309 | text-decoration:none; 310 | 311 | } 312 | 313 | #body-main a:visited 314 | { 315 | color: #551A8B; 316 | text-decoration:none; 317 | } 318 | 319 | #body-main ul, #body-main li { 320 | margin-left:1em; 321 | padding-left:0; 322 | } 323 | 324 | /* Breadcrumb Trail */ 325 | #breadcrumb 326 | { 327 | vertical-align: middle; 328 | color: #3A4969; 329 | font-size: 110%; 330 | margin-bottom:30px; 331 | background-image: url(../images/bullet.gif); 332 | padding-left:0.9em; 333 | background-repeat: no-repeat; 334 | background-position: left; 335 | } 336 | 337 | #breadcrumb a:link, #breadcrumb a:visited 338 | { 339 | text-decoration: none; 340 | color: #3C4B6B; 341 | } 342 | 343 | #breadcrumb a:hover 344 | { 345 | text-decoration: underline; 346 | color: black; 347 | } 348 | 349 | .breadcrumb-separator 350 | { 351 | color:#ACC6D1; 352 | } 353 | 354 | #breadcrumb-text 355 | { 356 | color: #3A4969; 357 | font-size: 0.9em; 358 | vertical-align: middle; 359 | } 360 | 361 | /* Utility Menu */ 362 | #utility-menu 363 | { 364 | position: absolute; 365 | top: 0; 366 | right: 4%; 367 | height:85px; 368 | margin:0; 369 | padding:0; 370 | width:400px; 371 | z-index: 1; 372 | font-size: 88%; 373 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 374 | } 375 | 376 | #utility-menu a 377 | { 378 | text-decoration: none; 379 | padding: 0 0.5em 0 0.5em; 380 | color: #436A85; 381 | } 382 | 383 | #utility-menu a:hover 384 | { 385 | text-decoration: underline; 386 | } 387 | 388 | #utility-menu #screen-switcher 389 | { 390 | display:none; /* will anybody notice? */ 391 | position:absolute; 392 | bottom:6px; 393 | right:0; 394 | margin:0; 395 | padding:0; 396 | } 397 | 398 | #screen-switcher dl { 399 | margin:0; 400 | padding:0; 401 | } 402 | 403 | /* Search Box */ 404 | #searchbox { 405 | position: absolute; 406 | white-space: nowrap; 407 | top: 16px; 408 | right: 0; 409 | width: 28.1em; 410 | vertical-align: middle; 411 | font-weight: bold; 412 | text-align: right; 413 | } 414 | #searchbox form { 415 | display:inline; 416 | } 417 | #search 418 | { 419 | color: #A38E60; 420 | } 421 | 422 | #search .input-text 423 | { 424 | margin-top: 3px; 425 | border: 1px solid #C4CCCC; 426 | background-color: #FFFFFF; 427 | vertical-align: top; 428 | font-weight:normal; 429 | font-size: 116%; 430 | width: 11em; 431 | } 432 | 433 | #search .input-button 434 | { 435 | color: #223344; 436 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 437 | font-weight: bold; 438 | font-size: 103%; 439 | border-top: 1px solid #C4CCCC; 440 | border-left: 1px solid #C4CCCC; 441 | border-bottom: 1px solid #6F7777; 442 | border-right: 1px solid #6F7777; 443 | background-color: #F8F7F7; 444 | background-image: url(../images/button-on-bg.png); 445 | background-repeat: no-repeat; 446 | padding: 0px 0.2em 0px 0.2em; 447 | margin: 3px 0.4em 0px 0.4em; 448 | vertical-align: text-top; 449 | } 450 | 451 | /* Navigation Skipper */ 452 | #skiptonav 453 | { 454 | height: 1px; 455 | width: 1px; 456 | } 457 | 458 | /* Document Navigation */ 459 | .homepage-box h4, #document-navigation h4, .externallinks h4 460 | { 461 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 462 | font-size: 94%; 463 | font-weight: bold; 464 | padding: 3px 0 4px 12px; 465 | margin-bottom:8px; 466 | border-bottom:3px solid #FFBC29; 467 | background-color: #F6F6F6; 468 | text-align: left; 469 | z-index: 0; 470 | -moz-border-radius-topleft: 16px; 471 | } 472 | 473 | #document-navigation 474 | { 475 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 476 | font-size: 98%; 477 | text-align: left; 478 | color: #3C4B6B; 479 | background-color: #F7F6F0; 480 | width: 15em; 481 | border: 1px solid #B7BECC; 482 | z-index: 999; 483 | float:right; 484 | margin: 0 12px 12px 12px; 485 | padding-bottom: 10px; 486 | list-style:none; 487 | -moz-border-radius-topleft: 12px; 488 | -moz-border-radius-bottomright: 12px; 489 | } 490 | 491 | #document-navigation ul 492 | { 493 | list-style: none; 494 | display: block; 495 | border:0; 496 | padding: 0; 497 | margin: 0 3em 0 1.1em; 498 | color: #3C4B6B; 499 | background: none; 500 | width: auto; 501 | font-size: 103%; 502 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 503 | } 504 | 505 | #document-navigation a:link, #document-navigation a:visited 506 | { 507 | color:#3C4B6B; 508 | text-decoration: none; 509 | } 510 | 511 | #document-navigation a:hover 512 | { 513 | color: #000000; 514 | text-decoration: underline; 515 | } 516 | 517 | #document-navigation h4 a:link, #document-navigation h4 a:visited 518 | { 519 | color: #FFFFFF; 520 | } 521 | 522 | 523 | #document-navigation h4 a:hover 524 | { 525 | color: #FFFFFF; 526 | 527 | } 528 | #document-navigation form { 529 | margin:12px; 530 | } 531 | #document-navigation input { 532 | margin-top:3px; 533 | } 534 | input.formbutton { 535 | color: #223344; 536 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 537 | font-weight: bold; 538 | font-size: 103%; 539 | border-top: 1px solid #C4CCCC; 540 | border-left: 1px solid #C4CCCC; 541 | border-bottom: 1px solid #6F7777; 542 | border-right: 1px solid #6F7777; 543 | background-color: #F8F7F7; 544 | vertical-align: text-top; 545 | } 546 | .formtextinput { 547 | border: 1px solid #C4CCCC; 548 | background-color: #FFFFFF; 549 | vertical-align: top; 550 | font-weight:normal; 551 | font-size: 116%; 552 | } 553 | #id { 554 | width:6em; 555 | } 556 | #document-navigation .inputbox { 557 | width:6em; 558 | } 559 | 560 | /* vote box */ 561 | div.vote-box p { 562 | margin: 8px 0; 563 | } 564 | 565 | div.vote-box { 566 | padding-bottom: 0!important; 567 | } 568 | 569 | /* Add this so tables will correctly flow below document navigation div. */ 570 | div#content table { 571 | clear:right; 572 | } 573 | 574 | #selecteditem 575 | { 576 | padding-left: 12px; 577 | background-image: url(../images/bullet.gif); 578 | background-repeat: no-repeat; 579 | background-position: center left; 580 | } 581 | 582 | #selecteditem a 583 | { 584 | color: #000000; 585 | } 586 | 587 | #selecteditem a:hover 588 | { 589 | text-decoration:none; 590 | } 591 | 592 | .group { 593 | background-image: url(../images/bullet.gif); 594 | background-repeat: no-repeat; 595 | background-position: center left; 596 | padding-left: 12px; 597 | } 598 | /* subnav */ 599 | #document-navigation ul.level-one { 600 | padding:0; 601 | margin:0 0 0 1.1em; 602 | font-size:88% 603 | } 604 | #document-navigation ul.level-one li { 605 | padding:0; 606 | margin:0; 607 | } 608 | #document-navigation li ul 609 | { 610 | margin-bottom: 7px 611 | } 612 | #document-navigation ul.level-one a 613 | { 614 | border: 0; 615 | border-bottom: 1px solid #DDD; 616 | color: #3C4B6B; 617 | width: 160px; 618 | display: block; 619 | padding:0; 620 | margin:0 621 | } 622 | 623 | #document-navigation ul.level-one a:hover 624 | { 625 | text-decoration: underline; 626 | color: black; 627 | } 628 | #document-navigation ul.level-one a:link { 629 | border:0; 630 | } 631 | #document-navigation ul.level-one a:visited { 632 | border:0; 633 | } 634 | #document-navigation ul.level-one a.selected 635 | { 636 | color:#000; 637 | font-weight: bold; 638 | border-left:0; 639 | } 640 | #document-navigation ul.level-two { 641 | padding:0; 642 | margin:0; 643 | } 644 | #document-navigation ul.level-two li { 645 | padding:0; 646 | margin:0; 647 | } 648 | #document-navigation ul.level-two a 649 | { 650 | border: 0; 651 | margin: 0; 652 | padding:0 0 0 1em; 653 | color: #5E72A5; 654 | font-size:97%; 655 | display: block; 656 | width: 140px; 657 | 658 | } 659 | 660 | #document-navigation ul.level-two a:hover 661 | { 662 | text-decoration: underline; 663 | color: black; 664 | } 665 | 666 | #document-navigation ul.level-two li.selected a.selected 667 | { 668 | 669 | background-image: url(../images/bullet.gif); 670 | background-repeat: no-repeat; 671 | background-position: center left; 672 | color:#444; 673 | border:0; 674 | font-size:100%; 675 | font-weight:normal; 676 | font-style: italic; 677 | } 678 | #document-navigation a 679 | { 680 | text-decoration: none; 681 | } 682 | 683 | /* Quick Links */ 684 | ul.quicklinks 685 | { 686 | margin-left: 0; 687 | padding-left: 0; 688 | list-style: none; 689 | margin: 0 0 0 1.5em; 690 | line-height: 1.2em; 691 | } 692 | 693 | ul.quicklinks li 694 | { 695 | padding-left: 16px; 696 | background-image: url(../images/bullet.gif); 697 | background-repeat: no-repeat; 698 | background-position: 0 50%; 699 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 700 | font-size: 77%; 701 | } 702 | 703 | ul.quicklinks a 704 | { 705 | text-decoration:none; 706 | color: #3C4B6B; 707 | } 708 | 709 | ul.quicklinks a:hover 710 | { 711 | text-decoration:underline; 712 | color: #000000; 713 | } 714 | 715 | #sidebar-wrapper 716 | { 717 | position:relative; 718 | width:100%; 719 | } 720 | 721 | #sidebar { 722 | font-size:9px; 723 | float:left; 724 | top:35px; 725 | width:37%; 726 | height:auto; 727 | padding:0; 728 | border:0; 729 | margin:0; 730 | text-align:left; 731 | padding-bottom:10px; 732 | } 733 | 734 | #sidebar2 { 735 | font-size:9px; 736 | float:left; 737 | top:35px; 738 | width:25%; 739 | height:auto; 740 | padding:0; 741 | border:0; 742 | margin:0; 743 | text-align:left; 744 | padding-bottom:10px; 745 | } 746 | #sidebar3 { 747 | font-size:9px; 748 | float:left; 749 | top:35px; 750 | width:37%; 751 | border:0; 752 | height:auto; 753 | padding:0; 754 | margin:0; 755 | text-align:left; 756 | padding-bottom:10px; 757 | } 758 | 759 | .teaser { 760 | overflow:hidden; 761 | margin-top:5px; 762 | border:0; 763 | width:100%; 764 | text-align:left; 765 | } 766 | 767 | P.news 768 | { 769 | padding-left:12px; 770 | margin:0 0 5px 0; 771 | } 772 | 773 | P A:link,P A:visited 774 | { 775 | border-bottom:1px dashed #ccc; 776 | } 777 | 778 | P.news A:link { 779 | border-bottom: 1px dashed #21356F; 780 | } 781 | P.news A:visited 782 | { 783 | border-bottom:1px dashed #DDD; 784 | } 785 | 786 | #body-main h1.pageheading { 787 | margin:48px 0 10px 0; 788 | } 789 | #body-main H2.news 790 | { 791 | background-image: url(../images/bullet.gif); 792 | background-repeat: no-repeat; 793 | background-position: center left; 794 | padding-left:12px; 795 | margin:0; 796 | font-size:110%; 797 | color: #3C4B6B; 798 | font-weight:normal; 799 | font-weight:bold; 800 | } 801 | 802 | #body-main H2.news A:link, #body-main H2.news A:visited 803 | { 804 | text-decoration:none; 805 | color: #2A374B; 806 | 807 | } 808 | 809 | #body-main ul A:link,#body-main ul A:visited 810 | { 811 | border-bottom:1px dashed #ccc; 812 | } 813 | 814 | /* Document Navigation */ 815 | #document-navigation h4, .homepage-box h4, .externallinks h4 816 | { 817 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 818 | font-size: 94%; 819 | font-weight: bold; 820 | padding: 3px 0 4px 12px; 821 | margin:0 0 0 0; 822 | text-align: left; 823 | z-index: 0; 824 | color:#FFFFFF; 825 | -moz-border-radius-topleft: 11px; 826 | background-color:#376A94; 827 | 828 | } 829 | 830 | #document-navigation h4 a, .homepage-box h4 a, .externallinks h4 a 831 | { 832 | background: #4C83A3; 833 | padding: 0 2px; 834 | color: white!important; 835 | border: none!important; 836 | } 837 | 838 | /* news specific */ 839 | .news-section { 840 | margin-top:15px; 841 | min-height:40em; 842 | _height:40em; 843 | } 844 | .news-section h1 { 845 | margin:0 0 10px 0; 846 | } 847 | .news-section p { 848 | padding-left:12px; 849 | margin:0 0 2px 0; 850 | } 851 | .news-section .pubdate { 852 | padding:0 0 0 12px; 853 | margin:0 0 5px 0; 854 | color:#696969; 855 | } 856 | /* homepage specific */ 857 | #intro { 858 | margin-top:5px; 859 | } 860 | #intro h1 { 861 | margin:0 0 10px 0; 862 | } 863 | #intro p { 864 | margin-top:10px 865 | } 866 | .content-section { 867 | margin-top:15px; 868 | } 869 | .content-section h1 { 870 | margin:0 0 10px 0; 871 | } 872 | .content-section p { 873 | padding-left:12px; 874 | margin:0 0 5px 0; 875 | } 876 | .content-section .pubdate { 877 | padding:0 0 0 12px; 878 | margin:0 0 8px 0; 879 | color:#696969; 880 | } 881 | #homepage-boxes { 882 | width: 250px; 883 | float:right; 884 | margin: 0 0 6px 30px; 885 | } 886 | 887 | #homepage-boxes .homepage-box, .externallinks 888 | { 889 | font-family: Arial, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 890 | font-size: 98%; 891 | text-align: left; 892 | color: #3C4B6B; 893 | background-color: #FBFBF7; 894 | width: 240px; 895 | border: 1px solid #B7BECC; 896 | z-index: 999; 897 | float:right; 898 | padding-bottom: 10px; 899 | list-style:none; 900 | margin: 6px; 901 | -moz-border-radius-topleft: 12px; 902 | -moz-border-radius-bottomright: 12px; 903 | background-color: #F7F6F0; 904 | } 905 | 906 | #homepage-boxes .homepage-box ul, .externallinks ul 907 | { 908 | list-style: none; 909 | display: block; 910 | border:0; 911 | padding: 0; 912 | margin: 0.5em 3em 0 1.1em; 913 | color: #3C4B6B; 914 | background: none; 915 | width: auto; 916 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 917 | font-size: 103%; 918 | margin-left:0; 919 | } 920 | 921 | #homepage-boxes .homepage-box li, #content-body #body-main .externallinks li { 922 | padding-left:15px; 923 | } 924 | 925 | 926 | #homepage-boxes .homepage-box a:link, .externallinks a:link 927 | { 928 | color: #00A; 929 | text-decoration: none; 930 | border-width:0; 931 | border-bottom:1px dashed #ccc; 932 | } 933 | 934 | #homepage-boxes .homepage-box a:visited, .externallinks a:visited 935 | { 936 | color: #551A8B; 937 | text-decoration: none; 938 | border-width:0; 939 | border-bottom:1px dashed #ccc; 940 | } 941 | 942 | #homepage-boxes .homepage-box a:hover, .externallinks a:hover 943 | { 944 | color: #000000; 945 | text-decoration: underline; 946 | } 947 | 948 | #homepage-boxes .items, .externallinks .items { 949 | padding-left: 12px; 950 | } 951 | 952 | #homepage-boxes .homepage-box .items a:link, .externallinks .items a:link 953 | { 954 | color: #33D; 955 | border-bottom:1px dashed #CCC; 956 | } 957 | 958 | #homepage-boxes .homepage-box .items a:visited, .externallinks .items a:visited 959 | { 960 | color: #854ABB; 961 | border-bottom:1px dashed #CCC; 962 | } 963 | 964 | .homepage-box li.more { 965 | text-align:right; 966 | } 967 | #skiptonav { 968 | position:absolute; 969 | } 970 | 971 | /* additional rest styles */ 972 | 973 | a.toc-backref { 974 | color: inherit ! important; 975 | } 976 | 977 | ul.auto-toc { 978 | list-style-type: none; 979 | } 980 | 981 | p.admonition-title { 982 | color:red; 983 | } 984 | 985 | pre.literal-block, pre.doctest-block { 986 | padding: 10px; 987 | font-size: 115%; 988 | background-color: #E0E0FF; 989 | } 990 | 991 | div.topic { 992 | margin: 2em; 993 | } 994 | 995 | p.topic-title { 996 | font-weight: bold; 997 | } 998 | 999 | div.admonition, div.attention, div.caution, div.danger, div.error, 1000 | div.hint, div.important, div.note, div.tip, div.warning { 1001 | margin: 2em; 1002 | border-style: solid; 1003 | border-color: #CCC; 1004 | border-width: thin 0 0 thin; 1005 | padding: 0 0.5em 0em 1em; 1006 | } 1007 | 1008 | div.admonition p.admonition-title, div.hint p.admonition-title, 1009 | div.important p.admonition-title, div.note p.admonition-title, 1010 | div.tip p.admonition-title { 1011 | font-weight: bold; 1012 | font-size: 110%; 1013 | } 1014 | 1015 | div.attention p.admonition-title, div.caution p.admonition-title, 1016 | div.danger p.admonition-title, div.error p.admonition-title, 1017 | div.warning p.admonition-title { 1018 | color: red; 1019 | font-weight: bold; 1020 | font-size: 110%; 1021 | } 1022 | 1023 | ol.arabic { 1024 | list-style: decimal } 1025 | 1026 | ol.loweralpha { 1027 | list-style: lower-alpha } 1028 | 1029 | ol.upperalpha { 1030 | list-style: upper-alpha } 1031 | 1032 | ol.lowerroman { 1033 | list-style: lower-roman } 1034 | 1035 | ol.upperroman { 1036 | list-style: upper-roman } 1037 | 1038 | /* used to remove borders from tables and images */ 1039 | .borderless, table.borderless td, table.borderless th { 1040 | border: 0 } 1041 | 1042 | table.borderless td, table.borderless th { 1043 | /* Override padding for "table.docutils td" with "! important". 1044 | The right padding separates the table cells. */ 1045 | padding: 0 0.5em 0 0 ! important } 1046 | 1047 | img.hspaced { 1048 | padding-left: 1em; 1049 | padding-right: 1em; } 1050 | 1051 | /* General Table Style ?? built for sigs page */ 1052 | 1053 | #sigindex { 1054 | border-collapse:collapse; 1055 | width:100%; 1056 | } 1057 | 1058 | #sigindex THEAD { 1059 | border-bottom: 2px solid #CCC; 1060 | } 1061 | 1062 | #sigindex TH { 1063 | padding: 0.5em 0.2em 0.5em 0.2em ; 1064 | text-align:left; 1065 | } 1066 | 1067 | #sigindex TD { 1068 | padding: 0.5em 0.2em 0.5em 0.2em ; 1069 | border-bottom: 1px dotted #CCC; 1070 | } 1071 | 1072 | blockquote { 1073 | margin-left:1em; 1074 | padding-left:1em; 1075 | border-left:1px solid #CCC; 1076 | } 1077 | 1078 | #sidebar-wrapper { 1079 | display:none; 1080 | } 1081 | 1082 | .more { 1083 | text-align:right; 1084 | } 1085 | 1086 | #content-body #homepage-boxes a.more { 1087 | display:block; 1088 | padding:3px 10px 0 0; 1089 | border-bottom:0; 1090 | } 1091 | 1092 | .homepage-box div { 1093 | padding: 0 15px 0 15px; 1094 | } 1095 | 1096 | /* Tweak alignment of the PyCon ad space */ 1097 | #house-ad {padding: 0 1px 0 0;} 1098 | #house-ad img {padding-left: 4px} 1099 | 1100 | #utility-menu dl { 1101 | font-size:90%; 1102 | text-align:right; 1103 | font-weight:bold; 1104 | } 1105 | 1106 | #utility-menu dd { 1107 | display:inline; 1108 | margin:0; 1109 | padding:0 0 0 8px; 1110 | font-weight:normal; 1111 | } 1112 | 1113 | #utility-menu dd a { 1114 | margin:0; 1115 | padding:0; 1116 | color:#888; 1117 | } 1118 | 1119 | #utility-menu dt { 1120 | display:inline; 1121 | margin:0; 1122 | } 1123 | 1124 | #siteinfo.homepage-box h1 { 1125 | font-size:70%; 1126 | margin:8px 0 0 0 ; 1127 | padding:0; 1128 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 1129 | font-size: 103%; 1130 | } 1131 | 1132 | #siteinfo.homepage-box div div { 1133 | margin-left:0; 1134 | padding:0; 1135 | } 1136 | #footer { 1137 | margin:3em 0 0 0; 1138 | padding:1em 0 1em 0; 1139 | border-top:1px dotted #CCC; 1140 | bottom:0; 1141 | font-size:90%; 1142 | position:relative; 1143 | clear:both; 1144 | } 1145 | #footer a:visited, #footer a:link { 1146 | color:#666; 1147 | display:inline; 1148 | } 1149 | #footer a:hover { 1150 | color:#333; 1151 | display:inline; 1152 | } 1153 | #footer #credits { 1154 | position:absolute; 1155 | top:0; 1156 | right:0; 1157 | margin:1em 0 0 0; 1158 | } 1159 | #footer #copyright { 1160 | text-align: center; 1161 | margin: 0; 1162 | padding: 0; 1163 | } 1164 | ul#iconbar li 1165 | { 1166 | display: inline; 1167 | } 1168 | 1169 | 1170 | #searchbox a.reference, #searchbox span.reference { 1171 | position:absolute; 1172 | top:2.9em; 1173 | right:8.3em; 1174 | font-size:85%; 1175 | color:#339; 1176 | text-decoration:none; 1177 | font-weight:normal; 1178 | } 1179 | 1180 | #searchbox a.reference { 1181 | border-bottom:1px dotted #CCC; 1182 | } 1183 | 1184 | iframe { 1185 | margin-top:20px; 1186 | } 1187 | 1188 | #content { 1189 | min-height:500px; 1190 | _height:500px; 1191 | font-size:115%; 1192 | } 1193 | 1194 | #content p, #content li { 1195 | line-height: 1.5; 1196 | } 1197 | 1198 | /* For Meeting Minutes */ 1199 | .action { 1200 | color:#B00; 1201 | } 1202 | 1203 | /* For Meeting Minutes */ 1204 | .followup { 1205 | color: #00A; 1206 | } 1207 | 1208 | /* For Meeting Minutes */ 1209 | .progress { 1210 | color: #090; 1211 | } 1212 | 1213 | /* For Meeting Minutes */ 1214 | .minutes-title { 1215 | text-align: center; 1216 | font-family: Georgia, "Bitstream Vera Serif", "New York", Palatino, serif; 1217 | font-size: 160%; 1218 | font-weight: bold; 1219 | line-height: 120%; 1220 | color: #234764; 1221 | margin-bottom: 1.0em; 1222 | } 1223 | 1224 | #body-main .success p a { 1225 | display:inline; 1226 | } 1227 | 1228 | #body-main .success p { 1229 | margin:4px 8px; 1230 | } 1231 | 1232 | /* For PEP header blocks */ 1233 | .rfc2822 th.field-name { 1234 | text-align: right; 1235 | padding-right: 0.5em; 1236 | } 1237 | 1238 | /* For field lists */ 1239 | th.field-name { 1240 | text-align: left; 1241 | padding-right: 0.5em; 1242 | } 1243 | 1244 | #applications h5 { 1245 | margin:0; 1246 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 1247 | font-size: 103%; 1248 | margin: 0 0 0.2em 1em; 1249 | font-weight:bold; 1250 | } 1251 | 1252 | #content-body #applications h5 a:visited, 1253 | #content-body #applications h5 a:link { 1254 | color:#444; 1255 | border:0; 1256 | } 1257 | 1258 | #applications p { 1259 | font-size: 85%; 1260 | width:100%; 1261 | margin: 0 0 0.5em 0.5em; 1262 | font-family: Arial, Verdana, Geneva, "Bitstream Vera Sans", Helvetica, sans-serif; 1263 | padding:0; 1264 | } 1265 | 1266 | .diff-header { 1267 | font-weight: bold; 1268 | } 1269 | 1270 | .diff-title { 1271 | background-color: #C0C0C0; 1272 | } 1273 | 1274 | .diff-added { 1275 | background-color: #E0FFE0; 1276 | vertical-align: sub; 1277 | } 1278 | 1279 | .diff-removed { 1280 | background-color: #FFFFE0; 1281 | vertical-align: sub; 1282 | } 1283 | 1284 | .diff-added span { 1285 | background-color: #80FF80; 1286 | } 1287 | 1288 | .diff-removed span { 1289 | background-color: #FFFF80; 1290 | } 1291 | 1292 | /* 1293 | * This is a set of styles for the embedding of videos such as you see on 1294 | * youtube.com and such. The www.python.org/docs/av/5minutes/ page is 1295 | * collecting these and you'll see them used in the videoframe.html file 1296 | * there. 1297 | */ 1298 | 1299 | .videoframes { 1300 | padding: 0; 1301 | border-collapse: collapse; 1302 | } 1303 | 1304 | .videoframes .title { 1305 | /* Size and Placement */ 1306 | margin: 0; 1307 | padding: 3px 6px 2px 6px; 1308 | width: 668px; 1309 | 1310 | /* Background and Color */ 1311 | background: #3775a9; /* Python Blue */ 1312 | color: white; 1313 | -moz-background-clip: -moz-initial; 1314 | -moz-background-origin: -moz-initial; 1315 | -moz-background-inline-policy: -moz-initial; 1316 | 1317 | /* Typeography */ 1318 | font-family: Verdana, sans-serif; 1319 | font-size: 120%; 1320 | font-size-adjust: none; 1321 | font-stretch: normal; 1322 | font-style: normal; 1323 | font-variant: normal; 1324 | font-weight: normal; 1325 | line-height: 1.3; 1326 | text-align: left; 1327 | text-decoration: none; 1328 | } 1329 | 1330 | .videoframes .author { 1331 | /* Size and Placement */ 1332 | width: 668px; 1333 | padding: 2px 6px 1px 6px; 1334 | margin: 0; 1335 | 1336 | /* Background and Color */ 1337 | background-color: #e0e0e0; 1338 | background-repeat: repeat-x; 1339 | color: #000000; 1340 | -moz-background-clip: -moz-initial; 1341 | -moz-background-origin: -moz-initial; 1342 | -moz-background-inline-policy: -moz-initial; 1343 | 1344 | /* Typeography */ 1345 | font-family: Verdana, sans-serif; 1346 | font-size: 100%; 1347 | font-size-adjust: none; 1348 | font-stretch: normal; 1349 | font-style: normal; 1350 | font-variant: normal; 1351 | font-weight: normal; 1352 | line-height: 1.3; 1353 | text-align: left; 1354 | } 1355 | 1356 | .videoframes .abstract { 1357 | /* Size and Placement */ 1358 | width: 654px; 1359 | padding: 2px 6px 4px 20px; 1360 | margin: 0; 1361 | 1362 | font-style: italic; 1363 | background-color: #e0e0e0; 1364 | } 1365 | 1366 | .videoframes .viewscreen { 1367 | /* Size and Placement */ 1368 | width: 640px; 1369 | padding: 4px 20px 4px 20px; 1370 | margin: 0; 1371 | 1372 | /* Background and Color */ 1373 | background-color: #e0e0e0; 1374 | } 1375 | 1376 | .videoframes .followup { 1377 | /* Size and Placement */ 1378 | width: 640px; 1379 | margin: 0 0 30px 0; 1380 | 1381 | font-size: 105%; 1382 | text-align: center; 1383 | color: black; 1384 | } 1385 | 1386 | .last, .with-subtitle { 1387 | margin-bottom: 0 ! important; 1388 | } 1389 | 1390 | div.sidebar { 1391 | margin-left: 1em; 1392 | border: 1px solid #999999; 1393 | padding: 1em; 1394 | background-color: #F7F7F7; 1395 | width: 40%; 1396 | float: right; 1397 | clear: right; 1398 | } 1399 | 1400 | p.sidebar-title { 1401 | font-family: sans-serif; 1402 | font-weight: bold; 1403 | font-size: larger; 1404 | text-align: center; 1405 | margin-top: 0; 1406 | margin-bottom: 10px; 1407 | } 1408 | 1409 | p.sidebar-title a:link, p.sidebar-title A:visited 1410 | { 1411 | border-bottom: none; 1412 | } 1413 | 1414 | p.sidebar-subtitle { 1415 | font-family: sans-serif; 1416 | font-weight: bold; 1417 | margin-top: 0; 1418 | text-align: center; 1419 | } 1420 | 1421 | .clear-right h2 { 1422 | clear: right; 1423 | } 1424 | 1425 | .align-right { 1426 | margin-left: 1em; 1427 | } 1428 | --------------------------------------------------------------------------------