├── .gitignore ├── .upload.py.kate-swp ├── README ├── TODO.md ├── appendix ├── about.html ├── abstracts.html ├── examples.html ├── fdl.html ├── fdl_aggregation.html ├── fdl_applicability.html ├── fdl_collections.html ├── fdl_combining.html ├── fdl_copying.html ├── fdl_copyinginquantity.html ├── fdl_future.html ├── fdl_howto.html ├── fdl_modifications.html ├── fdl_termination.html ├── fdl_translation.html ├── furtherreading.html ├── history.html ├── license.html ├── license_terms.html └── tips.html ├── css └── diveintopython.css ├── diveintopython.css ├── diveintopython.es ├── .dip.html.kate-swp ├── about.html ├── abstracts.html ├── apihelper_alltogether.html ├── apihelper_andor.html ├── apihelper_builtin.html ├── apihelper_divein.html ├── apihelper_filter.html ├── apihelper_getattr.html ├── apihelper_lambda.html ├── apihelper_optional.html ├── apihelper_summary.html ├── css │ ├── .diveintopython.css.kate-swp │ ├── diveintopython.css │ └── diveintopython.css~ ├── dip.html ├── examples.html ├── fdl.html ├── fdl_aggregation.html ├── fdl_applicability.html ├── fdl_collections.html ├── fdl_combining.html ├── fdl_copying.html ├── fdl_copyinginquantity.html ├── fdl_future.html ├── fdl_howto.html ├── fdl_modifications.html ├── fdl_termination.html ├── fdl_translation.html ├── furtherreading.html ├── history.html ├── images │ ├── callouts │ │ ├── 1.png │ │ ├── 10.png │ │ ├── 2.png │ │ ├── 3.png │ │ ├── 4.png │ │ ├── 5.png │ │ ├── 6.png │ │ ├── 7.png │ │ ├── 8.png │ │ └── 9.png │ ├── caution.png │ ├── diveintopython.png │ ├── dot.png │ ├── important.png │ ├── note.png │ ├── tip.png │ └── warning.png ├── index.html ├── index.html~ ├── license.html ├── license_terms.html ├── odbchelper_dict.html ├── odbchelper_divein.html ├── odbchelper_docstring.html ├── odbchelper_funcdef.html ├── odbchelper_indenting.html ├── odbchelper_join.html ├── odbchelper_list.html ├── odbchelper_map.html ├── odbchelper_multiassign.html ├── odbchelper_objects.html ├── odbchelper_stringformatting.html ├── odbchelper_summary.html ├── odbchelper_testing.html ├── odbchelper_tuple.html ├── odbchelper_vardef.html ├── scrape.py ├── scrape.py~ ├── tips.html ├── toc.html └── upload.py ├── diveintopython.fr ├── .scrape.py.kate-swp ├── appendix │ ├── fdl.html │ └── license.html ├── diveintopython.adrahon.org │ └── index.html ├── file_handling │ ├── all_together.html │ ├── for_loops.html │ ├── more_on_modules.html │ └── summary.html ├── getting_to_know_python │ ├── declaring_functions.html │ ├── documenting_functions.html │ ├── everything_is_an_object.html │ ├── indenting_code.html │ ├── index.html │ └── testing_modules.html ├── html_processing │ ├── all_together.html │ ├── basehtmlprocessor.html │ ├── dictionary_based_string_formatting.html │ ├── extracting_data.html │ ├── index.html │ ├── introducing_sgmllib.html │ ├── quoting_attribute_values.html │ └── summary.html ├── index.html ├── installing_python │ ├── debian.html │ ├── index.html │ ├── macos9.html │ ├── macosx.html │ ├── redhat.html │ ├── shell.html │ ├── source.html │ ├── summary.html │ └── windows.html ├── native_data_types │ ├── declaring_variables.html │ ├── formatting_strings.html │ ├── index.html │ ├── joining_lists.html │ ├── lists.html │ ├── mapping_lists.html │ ├── summary.html │ └── tuples.html ├── object_oriented_framework │ ├── class_attributes.html │ ├── defining_classes.html │ ├── importing_modules.html │ ├── index.html │ ├── instantiating_classes.html │ ├── private_functions.html │ ├── special_class_methods.html │ ├── special_class_methods2.html │ └── summary.html ├── power_of_introspection │ ├── all_together.html │ ├── and_or.html │ ├── built_in_functions.html │ ├── filtering_lists.html │ ├── getattr.html │ ├── index.html │ ├── lambda_functions.html │ ├── optional_arguments.html │ └── summary.html ├── regular_expressions │ ├── index.html │ ├── n_m_syntax.html │ ├── roman_numerals.html │ ├── street_addresses.html │ ├── summary.html │ └── verbose.html ├── scrape.py ├── scrape.py~ ├── scripts_and_streams │ └── stdin_stdout_stderr.html └── toc │ ├── index.html │ └── index.html.bak ├── diveintopython.it ├── .dip.html.kate-swp ├── .scrape.py.kate-swp ├── appendix │ ├── about.html │ ├── abstracts.html │ ├── examples.html │ ├── fdl.html │ ├── fdl_aggregation.html │ ├── fdl_applicability.html │ ├── fdl_collections.html │ ├── fdl_combining.html │ ├── fdl_copying.html │ ├── fdl_copyinginquantity.html │ ├── fdl_future.html │ ├── fdl_howto.html │ ├── fdl_modifications.html │ ├── fdl_termination.html │ ├── fdl_translation.html │ ├── furtherreading.html │ ├── history.html │ ├── license.html │ ├── license_terms.html │ └── tips.html ├── css │ ├── .diveintopython.css.kate-swp │ ├── diveintopython.css │ └── diveintopython.css~ ├── dip.html ├── dip.html~ ├── download │ └── diveintopython-pdf-it-2.0.zip ├── getting_to_know_python │ ├── declaring_functions.html │ ├── declaring_variables.html │ ├── dictionaries.html │ ├── documenting_functions.html │ ├── everything_is_an_object.html │ ├── formatting_strings.html │ ├── indenting_code.html │ ├── index.html │ ├── joining_lists.html │ ├── lists.html │ ├── mapping_lists.html │ ├── multiple_values.html │ ├── summary.html │ ├── testing_modules.html │ └── tuples.html ├── html_processing │ ├── all_together.html │ ├── basehtmlprocessor.html │ ├── dialect.html │ ├── dictionary_based_string_formatting.html │ ├── extracting_data.html │ ├── index.html │ ├── introducing_sgmllib.html │ ├── locals_and_globals.html │ ├── quoting_attribute_values.html │ ├── regular_expressions.html │ └── summary.html ├── images │ ├── callouts │ │ ├── 1.png │ │ ├── 10.png │ │ ├── 2.png │ │ ├── 3.png │ │ ├── 4.png │ │ ├── 5.png │ │ ├── 6.png │ │ ├── 7.png │ │ ├── 8.png │ │ └── 9.png │ ├── caution.png │ ├── diveintopython.png │ ├── dot.png │ ├── important.png │ ├── note.png │ ├── tip.png │ └── warning.png ├── index.html ├── index.html.1 ├── installing_python │ ├── debian.html │ ├── index.html │ ├── macos9.html │ ├── macosx.html │ ├── redhat.html │ ├── shell.html │ ├── source.html │ ├── summary.html │ └── windows.html ├── it.diveintopython.org │ └── index.html ├── object_oriented_framework │ ├── all_together.html │ ├── class_attributes.html │ ├── defining_classes.html │ ├── file_objects.html │ ├── for_loops.html │ ├── handling_exceptions.html │ ├── importing_modules.html │ ├── index.html │ ├── instantiating_classes.html │ ├── more_on_modules.html │ ├── os_module.html │ ├── private_functions.html │ ├── special_class_methods.html │ ├── special_class_methods2.html │ ├── summary.html │ └── userdict.html ├── power_of_introspection │ ├── all_together.html │ ├── and_or.html │ ├── built_in_functions.html │ ├── filtering_lists.html │ ├── getattr.html │ ├── index.html │ ├── lambda_functions.html │ ├── optional_arguments.html │ └── summary.html ├── regression_testing │ ├── all_together_part_1.html │ ├── data_centric.html │ ├── dynamic_import.html │ ├── filtering_lists.html │ ├── finding_the_path.html │ ├── index.html │ ├── inside_pyunit.html │ └── mapping_lists.html ├── scrape.py ├── scrape.py~ ├── toc │ ├── index.html │ └── index.html.1 ├── unit_testing │ ├── handling_bugs.html │ ├── handling_changing_requirements.html │ ├── index.html │ ├── postscript.html │ ├── refactoring.html │ ├── romantest.html │ ├── stage_1.html │ ├── stage_2.html │ ├── stage_3.html │ ├── stage_4.html │ ├── stage_5.html │ ├── summary.html │ ├── testing_for_failure.html │ ├── testing_for_sanity.html │ └── testing_for_success.html ├── upload.py ├── upload.py~ └── xml_processing │ ├── all_together.html │ ├── attributes.html │ ├── caching.html │ ├── child_nodes.html │ ├── command_line_arguments.html │ ├── handlers_by_node_type.html │ ├── index.html │ ├── input_sources.html │ ├── packages.html │ ├── parsing_xml.html │ ├── searching.html │ ├── stdin_stdout_stderr.html │ ├── summary.html │ └── unicode.html ├── diveintopython.kr ├── .index.htm.kate-swp ├── chapter1.htm ├── chapter2.htm ├── chapter3.htm ├── chapter4.htm ├── chapter5.htm ├── css │ └── diveintopython.css ├── gnufdl.htm ├── gnugpl.htm ├── html │ ├── appendix.htm │ └── gnufdl.htm ├── images │ ├── callouts │ │ ├── 1.png │ │ ├── 10.png │ │ ├── 2.png │ │ ├── 3.png │ │ ├── 4.png │ │ ├── 5.png │ │ ├── 6.png │ │ ├── 7.png │ │ ├── 8.png │ │ └── 9.png │ ├── caution.png │ ├── diveintopython.png │ ├── dot.png │ ├── important.png │ ├── note.png │ ├── tip.png │ └── warning.png ├── index.htm ├── index.htm~ ├── preface.htm ├── upload.py └── upload.py~ ├── diveintopython.ru ├── .dip.html.kate-swp ├── .scrape.py.kate-swp ├── apihelper_alltogether.html ├── apihelper_andor.html ├── apihelper_builtin.html ├── apihelper_divein.html ├── apihelper_filter.html ├── apihelper_getattr.html ├── apihelper_lambda.html ├── apihelper_optional.html ├── apihelper_summary.html ├── css │ └── diveintopython.css ├── dip.html ├── dip.html~ ├── fdl.html ├── fdl_aggregation.html ├── fdl_applicability.html ├── fdl_collections.html ├── fdl_combining.html ├── fdl_copying.html ├── fdl_copyinginquantity.html ├── fdl_future.html ├── fdl_howto.html ├── fdl_modifications.html ├── fdl_termination.html ├── fdl_translation.html ├── fileinfo_alltogether.html ├── fileinfo_class.html ├── fileinfo_classattributes.html ├── fileinfo_divein.html ├── fileinfo_exception.html ├── fileinfo_files.html ├── fileinfo_for.html ├── fileinfo_fromimport.html ├── fileinfo_instantiation.html ├── fileinfo_modules.html ├── fileinfo_morespecial.html ├── fileinfo_os.html ├── fileinfo_private.html ├── fileinfo_specialmethods.html ├── fileinfo_summary.html ├── fileinfo_userdict.html ├── images │ ├── callouts │ │ ├── 1.png │ │ ├── 10.png │ │ ├── 2.png │ │ ├── 3.png │ │ ├── 4.png │ │ ├── 5.png │ │ ├── 6.png │ │ ├── 7.png │ │ ├── 8.png │ │ └── 9.png │ ├── caution.png │ ├── diveintopython.png │ ├── dot.png │ ├── important.png │ ├── logo.png │ ├── logo.png.crdownload │ ├── note.png │ ├── tip.png │ └── warning.png ├── index.html ├── license.html ├── license_terms.html ├── odbchelper_dict.html ├── odbchelper_divein.html ├── odbchelper_docstring.html ├── odbchelper_funcdef.html ├── odbchelper_indenting.html ├── odbchelper_join.html ├── odbchelper_list.html ├── odbchelper_map.html ├── odbchelper_multiassign.html ├── odbchelper_objects.html ├── odbchelper_stringformatting.html ├── odbchelper_summary.html ├── odbchelper_testing.html ├── odbchelper_tuple.html ├── odbchelper_vardef.html ├── preface.html ├── ru.diveintopython.org │ ├── apihelper_alltogether.html │ ├── apihelper_andor.html │ ├── apihelper_builtin.html │ ├── apihelper_divein.html │ ├── apihelper_filter.html │ ├── apihelper_getattr.html │ ├── apihelper_lambda.html │ ├── apihelper_optional.html │ ├── apihelper_summary.html │ ├── fdl.html │ ├── fdl_aggregation.html │ ├── fdl_applicability.html │ ├── fdl_collections.html │ ├── fdl_combining.html │ ├── fdl_copying.html │ ├── fdl_copyinginquantity.html │ ├── fdl_future.html │ ├── fdl_howto.html │ ├── fdl_modifications.html │ ├── fdl_termination.html │ ├── fdl_translation.html │ ├── fileinfo_alltogether.html │ ├── fileinfo_class.html │ ├── fileinfo_classattributes.html │ ├── fileinfo_divein.html │ ├── fileinfo_exception.html │ ├── fileinfo_files.html │ ├── fileinfo_for.html │ ├── fileinfo_fromimport.html │ ├── fileinfo_instantiation.html │ ├── fileinfo_modules.html │ ├── fileinfo_morespecial.html │ ├── fileinfo_os.html │ ├── fileinfo_private.html │ ├── fileinfo_specialmethods.html │ ├── fileinfo_summary.html │ ├── fileinfo_userdict.html │ ├── index.html │ ├── license.html │ ├── license_terms.html │ ├── odbchelper_dict.html │ ├── odbchelper_divein.html │ ├── odbchelper_docstring.html │ ├── odbchelper_funcdef.html │ ├── odbchelper_indenting.html │ ├── odbchelper_join.html │ ├── odbchelper_list.html │ ├── odbchelper_map.html │ ├── odbchelper_multiassign.html │ ├── odbchelper_objects.html │ ├── odbchelper_stringformatting.html │ ├── odbchelper_summary.html │ ├── odbchelper_testing.html │ ├── odbchelper_tuple.html │ ├── odbchelper_vardef.html │ ├── preface.html │ └── toc.html ├── scrape.py ├── scrape.py~ ├── toc.html ├── toc.html.1 ├── toc │ └── toc.html └── upload.py ├── download ├── diveintopython-common-5.4.zip ├── diveintopython-examples-5.4.zip ├── diveintopython-html-5.4 (3).zip ├── diveintopython-html-5.4.zip ├── diveintopython-html-flat-5.4.zip ├── diveintopython-pdf-5.4.zip ├── diveintopython-text-5.4.zip ├── diveintopython-word-5.4.zip ├── diveintopython-xml-5.4 (1).zip ├── diveintopython-xml-5.4.zip └── diveintopython.pdf ├── dynamic_functions ├── index.html ├── stage1.html ├── stage2.html ├── stage3.html ├── stage4.html ├── stage5.html ├── stage6.html └── summary.html ├── file_handling ├── all_together.html ├── file_objects.html ├── for_loops.html ├── index.html ├── more_on_modules.html ├── os_module.html └── summary.html ├── functional_programming ├── all_together.html ├── data_centric.html ├── dynamic_import.html ├── filtering_lists.html ├── finding_the_path.html ├── index.html ├── mapping_lists.html └── summary.html ├── getting_to_know_python ├── declaring_functions.html ├── documenting_functions.html ├── everything_is_an_object.html ├── indenting_code.html ├── indenting_code.html~ ├── index.html └── testing_modules.html ├── history.xml ├── html_processing ├── all_together.html ├── basehtmlprocessor.html ├── dialect.html ├── dictionary_based_string_formatting.html ├── extracting_data.html ├── index.html ├── introducing_sgmllib.html ├── locals_and_globals.html ├── quoting_attribute_values.html └── summary.html ├── http_web_services ├── alltogether.html ├── debugging.html ├── etags.html ├── gzip_compression.html ├── http_features.html ├── index.html ├── redirects.html ├── review.html ├── summary.html └── user_agent.html ├── images ├── callouts │ ├── 1.png │ ├── 10.png │ ├── 2.png │ ├── 3.png │ ├── 4.png │ ├── 5.png │ ├── 6.png │ ├── 7.png │ ├── 8.png │ └── 9.png ├── caution.png ├── diveintopython.png ├── dot.png ├── important.png ├── note.png ├── tip.png └── warning.png ├── index.html ├── installing_python ├── debian.html ├── index.html ├── macos9.html ├── macosx.html ├── redhat.html ├── shell.html ├── source.html ├── summary.html └── windows.html ├── native_data_types ├── declaring_variables.html ├── formatting_strings.html ├── index.html ├── joining_lists.html ├── lists.html ├── mapping_lists.html ├── summary.html └── tuples.html ├── object_oriented_framework ├── class_attributes.html ├── defining_classes.html ├── importing_modules.html ├── index.html ├── instantiating_classes.html ├── private_functions.html ├── special_class_methods.html ├── special_class_methods2.html ├── summary.html └── userdict.html ├── performance_tuning ├── dictionary_lookups.html ├── index.html ├── list_operations.html ├── regular_expressions.html ├── string_manipulation.html ├── summary.html └── timeit.html ├── power_of_introspection ├── all_together.html ├── and_or.html ├── built_in_functions.html ├── filtering_lists.html ├── getattr.html ├── index.html ├── lambda_functions.html ├── optional_arguments.html └── summary.html ├── refactoring ├── handling_changing_requirements.html ├── index.html ├── postscript.html ├── refactoring.html └── summary.html ├── regular_expressions ├── index.html ├── n_m_syntax.html ├── phone_numbers.html ├── roman_numerals.html ├── street_addresses.html ├── summary.html └── verbose.html ├── replace.log ├── replace.py ├── scrape.py ├── scrape.pyc ├── scripts_and_streams ├── all_together.html ├── caching.html ├── child_nodes.html ├── command_line_arguments.html ├── handlers_by_node_type.html ├── index.html ├── stdin_stdout_stderr.html └── summary.html ├── searcher.py ├── soap_web_services ├── debugging.html ├── first_steps.html ├── google.html ├── index.html ├── install.html ├── introspection.html ├── summary.html ├── troubleshooting.html └── wsdl.html ├── toc └── index.html ├── unit_testing ├── diving_in.html ├── index.html ├── romantest.html ├── stage_1.html ├── stage_2.html ├── stage_3.html ├── stage_4.html ├── stage_5.html ├── testing_for_failure.html ├── testing_for_sanity.html └── testing_for_success.html ├── upload.py ├── www.diveintopython.org └── index.html └── xml_processing ├── attributes.html ├── index.html ├── packages.html ├── parsing_xml.html ├── searching.html ├── summary.html └── unicode.html /.gitignore: -------------------------------------------------------------------------------- 1 | *.php 2 | *.swp 3 | *~ 4 | *.pyc 5 | .kate-swp 6 | .idea/ 7 | .commit* 8 | -------------------------------------------------------------------------------- /.upload.py.kate-swp: -------------------------------------------------------------------------------- 1 | Kate Swap File - Version 1.0SW T /I U ESW U I V ESI V fESR V ESI V iESI V fESI V ESR V ESI V fESR V ESR V U VE -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | Copyright Mark Pilgrim 2 | Updated by Josh Gachnang 3 | 4 | This is a clone of Mark Pilgrim's Dive Into Python. I am updating it for Python 2.7 and fixing links/other errors. 5 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | 1. In chapter 6.1: Exceptions and File Handling, 2 | it would be good to mention the *with* statement. 3 | https://docs.python.org/2/reference/compound_stmts.html#the-with-statement 4 | 2. In chapter 6.2: Working with File Objects, an example uses 5 | `f.seek(offset, whence)` where *whence* is a digit instead of 6 | [os module constant] 7 | (https://docs.python.org/2/library/stdtypes.html?highlight=seek#file.seek). 8 | It should be `os.SEEK_END` in this particular case. 9 | It has to be corrected in the chapter and the examples source files. 10 | -------------------------------------------------------------------------------- /appendix/about.html: -------------------------------------------------------------------------------- 1 | 2 | 4 | 5 |
6 | 7 |You are here: Home > Dive Into Python > About the book | 35 |<< >> | 36 |||||
39 | Dive Into Python40 |Python from novice to pro 41 | |
42 | 43 | 46 | | 47 |
This book was written in DocBook XML using Emacs, and converted to HTML using the SAXON XSLT processor from Michael Kay with a customized version of Norman Walsh's XSL stylesheets. From there, it was converted to PDF using HTMLDoc, and to plain text using w3m. Program listings and examples were colorized using an updated version of Just van Rossum's pyfontify.py, which is included in the example scripts. 60 |
61 |If you're interested in learning more about DocBook for technical writing, you can download the XML source and the build scripts, which include the customized XSL stylesheets used to create all the different formats of the book. You should also read the canonical book, DocBook: The Definitive Guide. If you're going to do any serious writing in DocBook, I would recommend subscribing to the DocBook mailing lists. 62 |
63 |<< Revision history |
67 | | | 68 | |
69 | GNU Free Documentation License >> |
70 |
You are here: Home > Dive Into Python > GNU Free Documentation License > Termination | 35 |<< >> | 36 |||||
39 | Dive Into Python40 |Python from novice to pro 41 | |
42 | 43 | 46 | | 47 |
You may not copy, modify, sublicense, or distribute the 60 | Document except as expressly provided for under this License. Any 61 | other attempt to copy, modify, sublicense or distribute the 62 | Document is void, and will automatically terminate your rights 63 | under this License. However, parties who have received copies, or 64 | rights, from you under this License will not have their licenses 65 | terminated so long as such parties remain in full 66 | compliance. 67 |
68 |<< Translation |
72 | | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 73 | |
74 | Future revisions of this license >> |
75 |
Inicio > Inmersión en Python > Sobre este libro | << >> | ||||
diveintopython.org Python para programadores con experiencia |
Este libro se escribió en DocBook XML y se transformó en 21 | HTML utilizando el procesador XSLT SAXON de Michael Kay con una versión adaptada de las hojas de 22 | estilo XSL de Norman Walsh. A partir de ahí, se realizó la 23 | conversión a PDF con HTMLDoc, y a texto llano con w3m. Los listados de programas y los 24 | ejemplos se colorearon con una versión actualizada de pyfontify.py, de Just van Rossum, que se 25 | incluye entre los scripts de 26 | ejemplo.
Si está usted interesado en aprender más sobre DocBook para 27 | escribir textos técnicos, puede descargar los ficheros 28 | fuente XML, que incluyen también las hojas de estilo XSL 29 | adaptadas utilizadas para crear los distintos formatos. Debería leer 30 | también el libro canónico, DocBook: The 31 | Definitive Guide. Si desea hacer algo serio con 32 | DocBook, le recomiendo que se suscriba a las listas de correo de DocBook.
Historial de revisiones | GNU Free Documentation License | |
Inicio > Inmersión en Python > GNU Free Documentation License > Collections of documents | << >> | ||||
diveintopython.org Python para programadores con experiencia |
You may make a collection consisting of the Document and 21 | other documents released under this License, and replace the 22 | individual copies of this License in the various documents with a 23 | single copy that is included in the collection, provided that you 24 | follow the rules of this License for verbatim copying of each of 25 | the documents in all other respects.
You may extract a single document from such a collection, 26 | and distribute it individually under this License, provided you 27 | insert a copy of this License into the extracted document, and 28 | follow this License in all other respects regarding verbatim 29 | copying of that document.
Combining documents | 1 2 3 4 5 6 7 8 9 10 11 12 | Aggregation with independent works |
Inicio > Inmersión en Python > GNU Free Documentation License > Verbatim copying | << >> | ||||
diveintopython.org Python para programadores con experiencia |
You may copy and distribute the Document in any medium, 21 | either commercially or noncommercially, provided that this 22 | License, the copyright notices, and the license notice saying this 23 | License applies to the Document are reproduced in all copies, and 24 | that you add no other conditions whatsoever to those of this 25 | License. You may not use technical measures to obstruct or 26 | control the reading or further copying of the copies you make or 27 | distribute. However, you may accept compensation in exchange for 28 | copies. If you distribute a large enough number of copies you 29 | must also follow the conditions in section 3.
You may also lend copies, under the same conditions stated 30 | above, and you may publicly display copies.
Applicability and definitions | 1 2 3 4 5 6 7 8 9 10 11 12 | Copying in quantity |
Inicio > Inmersión en Python > GNU Free Documentation License > Termination | << >> | ||||
diveintopython.org Python para programadores con experiencia |
You may not copy, modify, sublicense, or distribute the 21 | Document except as expressly provided for under this License. Any 22 | other attempt to copy, modify, sublicense or distribute the 23 | Document is void, and will automatically terminate your rights 24 | under this License. However, parties who have received copies, or 25 | rights, from you under this License will not have their licenses 26 | terminated so long as such parties remain in full 27 | compliance.
Translation | 1 2 3 4 5 6 7 8 9 10 11 12 | Future revisions of this license |
Inicio > Inmersión en Python > GNU Free Documentation License > Translation | << >> | ||||
diveintopython.org Python para programadores con experiencia |
Translation is considered a kind of modification, so you may 21 | distribute translations of the Document under the terms of section 22 | 4. Replacing Invariant Sections with translations requires 23 | special permission from their copyright holders, but you may 24 | include translations of some or all Invariant Sections in addition 25 | to the original versions of these Invariant Sections. You may 26 | include a translation of this License provided that you also 27 | include the original English version of this License. In case of 28 | a disagreement between the translation and the original English 29 | version of this License, the original English version will 30 | prevail.
Aggregation with independent works | 1 2 3 4 5 6 7 8 9 10 11 12 | Termination |
Inicio > Inmersión en Python > Historial de revisiones | << >> | ||||
diveintopython.org Python para programadores con experiencia |
30 de septiembre de 2001 (1.0)
21 |Lista de ejemplos | Sobre este libro | |
The requested URL /odbchelper_stringformatting.html was not found on this server.
20 | 21 | -------------------------------------------------------------------------------- /diveintopython.es/scrape.py: -------------------------------------------------------------------------------- 1 | from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, Comment 2 | import urllib 3 | import os 4 | import shutil 5 | import string 6 | import re 7 | 8 | URL='http://es.diveintopython.net/' 9 | 10 | GOOGLE_ANALYTICS_KEY = 'UA-9740779-18' 11 | 12 | def scrape(url): 13 | try: 14 | p = open(url, 'r') 15 | soup = BeautifulSoup(p.read()) 16 | except IOError, e: 17 | print "io error code: %d msg: %s" % (e.returncode, e.message) 18 | return None 19 | 20 | for i in soup.findAll('a'): 21 | if i.has_key('href'): 22 | if i['href'][0:4] == 'http' and '#' not in i['href']: 23 | try: 24 | filename = i['href'].split('/')[-2] + '/' + i['href'].split('/')[-1] 25 | print "saving %s into %s" % (i['href'], filename, ) 26 | if not os.path.exists(i['href'].split('/')[-2]): 27 | os.mkdir(i['href'].split('/')[-2]) 28 | with open(filename, 'w') as out: 29 | out.write(urllib.urlopen(i['href']).read()) 30 | except IOError, e: 31 | pass 32 | def purify(filename): 33 | 34 | with open(filename, 'r') as f: 35 | 36 | soup = BeautifulSoup(f) 37 | print "working on %s" % (filename, ) 38 | for div in soup.findAll('div'): 39 | if div.has_key('id'): 40 | if div['id'] == 'wm-ipp': 41 | div.extract() 42 | for script in soup.findAll('script'): 43 | script.extract() 44 | for comment in soup.findAll(text=lambda text:isinstance(text, Comment)): 45 | comment.extract() 46 | for link in soup.findAll('link'): 47 | if link.has_key('rev'): 48 | if link['rev'] == 'made': 49 | link['href'] = 'josh@servercobra.com' 50 | if link.has_key('rel'): 51 | if link['rel'] == "home": 52 | link['href'] = URL 53 | if link['rel'] == "stylesheet": 54 | link['href'] = "/css/diveintopython.css" 55 | if link['rel'] == "next" or link['rel'] == "up" or link['rel'] == "previous": 56 | link['href'] = URL + '/'.join(link['href'].split('/')[8:]) 57 | 58 | for a in soup.findAll('a'): 59 | if a.has_key('href'): 60 | if 'http://web.archive.org/' in a['href']: 61 | print "print cleaning up link: %s" % (a['href']) 62 | a['href'] = URL + '/'.join(a['href'].split('/')[8:]) 63 | if 'mailto:' in a['href']: 64 | a['href'] = 'mailto:josh@servercobra.com' 65 | 66 | #a['href'] = 'http://www.diveintopython.net/' a['href'].split('/')[8:] 67 | #if 'http://diveintopython.net/' in a['href']: 68 | for form in soup.findAll('form'): 69 | if form.has_key('action'): 70 | if 'http://web.archive.org/' in form['action']: 71 | form['action'] = 'http://www.google.com/' + '/'.join(form['action'].split('/')[8:]) 72 | for img in soup.findAll('img'): 73 | if img.has_key('src'): 74 | if 'http://web.archive.org/' in img['src']: 75 | img['src'] = URL + '/'.join(img['src'].split('/')[8:]) 76 | 77 | #TODO: insert Google Analytics 78 | #soup.head.insert(len(a.head.contents), '') 79 | 80 | # Insert Google Analytics Async Tracking Code 81 | code = '''''' % (GOOGLE_ANALYTICS_KEY, ) 94 | if GOOGLE_ANALYTICS_KEY not in soup.head.contents: 95 | soup.head.insert(len(soup.head.contents), code) 96 | 97 | new_soup = BeautifulSoup(soup.renderContents()) 98 | for i in new_soup.findAll('a'): 99 | if i.has_key('href'): 100 | if i['href'][0:4] == 'http': 101 | #print i['href'] 102 | pass 103 | with open(filename, 'w') as out: 104 | out.write(new_soup.renderContents()) 105 | 106 | #def replace_url(old, new): 107 | #for file in os.listdir('/home/josh/programming/diveintopython'): 108 | #if os.path.isdir(file): 109 | #directory = file 110 | #for f in os.listdir(file): 111 | #if 'html' in f: 112 | #with open(directory + '/' + f, 'w+') as f2: 113 | #text = f2.read() 114 | #f2.write(re.sub('http://diveintopython.net', 'http://www.diveintopython.net', text)) 115 | if __name__ == '__main__': 116 | 117 | #scrape('dip.html') 118 | 119 | for file in os.listdir('/home/josh/programming/diveintopython.es'): 120 | if os.path.isdir(file): 121 | directory = file 122 | for f in os.listdir(file): 123 | if 'html' in f: 124 | purify(directory + '/' + f) 125 | 126 | 127 | 128 | -------------------------------------------------------------------------------- /diveintopython.es/scrape.py~: -------------------------------------------------------------------------------- 1 | from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, Comment 2 | import urllib 3 | import os 4 | import shutil 5 | import string 6 | import re 7 | 8 | URL='http://es.diveintopython.net/' 9 | 10 | GOOGLE_ANALYTICS_KEY = 'UA-9740779-18' 11 | 12 | def scrape(url): 13 | try: 14 | p = open(url, 'r') 15 | soup = BeautifulSoup(p.read()) 16 | except IOError, e: 17 | print "io error code: %d msg: %s" % (e.returncode, e.message) 18 | return None 19 | 20 | for i in soup.findAll('a'): 21 | if i.has_key('href'): 22 | if i['href'][0:4] == 'http' and '#' not in i['href']: 23 | try: 24 | filename = i['href'].split('/')[-2] + '/' + i['href'].split('/')[-1] 25 | print "saving %s into %s" % (i['href'], filename, ) 26 | if not os.path.exists(i['href'].split('/')[-2]): 27 | os.mkdir(i['href'].split('/')[-2]) 28 | with open(filename, 'w') as out: 29 | out.write(urllib.urlopen(i['href']).read()) 30 | except IOError, e: 31 | pass 32 | def purify(filename): 33 | 34 | with open(filename, 'r') as f: 35 | 36 | soup = BeautifulSoup(f) 37 | print "working on %s" % (filename, ) 38 | for div in soup.findAll('div'): 39 | if div.has_key('id'): 40 | if div['id'] == 'wm-ipp': 41 | div.extract() 42 | for script in soup.findAll('script'): 43 | script.extract() 44 | for comment in soup.findAll(text=lambda text:isinstance(text, Comment)): 45 | comment.extract() 46 | for link in soup.findAll('link'): 47 | if link.has_key('rev'): 48 | if link['rev'] == 'made': 49 | link['href'] = 'josh@servercobra.com' 50 | if link.has_key('rel'): 51 | if link['rel'] == "home": 52 | link['href'] = URL 53 | if link['rel'] == "stylesheet": 54 | link['href'] = "/css/diveintopython.css" 55 | if link['rel'] == "next" or link['rel'] == "up" or link['rel'] == "previous": 56 | link['href'] = URL + '/'.join(link['href'].split('/')[8:]) 57 | 58 | for a in soup.findAll('a'): 59 | if a.has_key('href'): 60 | if 'http://web.archive.org/' in a['href']: 61 | print "print cleaning up link: %s" % (a['href']) 62 | a['href'] = URL + '/'.join(a['href'].split('/')[8:]) 63 | if 'mailto:' in a['href']: 64 | a['href'] = 'mailto:josh@servercobra.com' 65 | 66 | #a['href'] = 'http://www.diveintopython.net/' a['href'].split('/')[8:] 67 | #if 'http://diveintopython.net/' in a['href']: 68 | for form in soup.findAll('form'): 69 | if form.has_key('action'): 70 | if 'http://web.archive.org/' in form['action']: 71 | form['action'] = 'http://www.google.com/' + '/'.join(form['action'].split('/')[8:]) 72 | for img in soup.findAll('img'): 73 | if img.has_key('src'): 74 | if 'http://web.archive.org/' in img['src']: 75 | img['src'] = URL + '/'.join(img['src'].split('/')[8:]) 76 | 77 | #TODO: insert Google Analytics 78 | #soup.head.insert(len(a.head.contents), '') 79 | 80 | # Insert Google Analytics Async Tracking Code 81 | code = '''''' % (GOOGLE_ANALYTICS_KEY, ) 94 | if GOOGLE_ANALYTICS_KEY not in soup.head.contents: 95 | soup.head.insert(len(soup.head.contents), code) 96 | 97 | new_soup = BeautifulSoup(soup.renderContents()) 98 | for i in new_soup.findAll('a'): 99 | if i.has_key('href'): 100 | if i['href'][0:4] == 'http': 101 | #print i['href'] 102 | pass 103 | with open(filename, 'w') as out: 104 | out.write(new_soup.renderContents()) 105 | 106 | #def replace_url(old, new): 107 | #for file in os.listdir('/home/josh/programming/diveintopython'): 108 | #if os.path.isdir(file): 109 | #directory = file 110 | #for f in os.listdir(file): 111 | #if 'html' in f: 112 | #with open(directory + '/' + f, 'w+') as f2: 113 | #text = f2.read() 114 | #f2.write(re.sub('http://diveintopython.net', 'http://www.diveintopython.net', text)) 115 | if __name__ == '__main__': 116 | 117 | #scrape('dip.html') 118 | 119 | for file in os.listdir('/home/josh/programming/diveintopython.sp'): 120 | if os.path.isdir(file): 121 | directory = file 122 | for f in os.listdir(file): 123 | if 'html' in f: 124 | purify(directory + '/' + f) 125 | 126 | 127 | 128 | -------------------------------------------------------------------------------- /diveintopython.es/upload.py: -------------------------------------------------------------------------------- 1 | import boto 2 | import os 3 | import sys 4 | 5 | BUCKET_NAME = 'es.diveintopython.net' 6 | ignored_folders = ('save', 'www.diveintopython.org', 'diveintopythonbak', '.git') 7 | ignored_files = ('scrape.py', 'upload.py', 'scrape.py~', 'upload.py~', '.gitignore') 8 | 9 | conn = boto.connect_s3() 10 | bucket = conn.get_bucket(BUCKET_NAME) 11 | sys.stdout.write("Beginning upload to %s" % BUCKET_NAME) 12 | 13 | def check_ignore(dir, file): 14 | for fol in ignored_folders: 15 | if fol in dir: 16 | return True 17 | for f in ignored_files: 18 | if f == file: 19 | return True 20 | return False 21 | 22 | def upload_file(arg, dirname, names): 23 | #'/'.join(a['href'].split('/')[8:]) 24 | if len(dirname.split('/')) == 5: 25 | dir = '/'.join(dirname.split('/')[5:]) 26 | else: 27 | dir = '/'.join(dirname.split('/')[5:]) + '/' 28 | print "dir is: %s" % dir 29 | 30 | #print "dirname is %s, dir is %s" % (dirname, dir) 31 | for file in names: 32 | 33 | #print "full path is %s" % (dir + file) 34 | if os.path.isdir(dir + file): 35 | continue 36 | if check_ignore(dir, file) == True: 37 | continue 38 | sys.stdout.write("uploading ") 39 | sys.stdout.write(dir + file) 40 | sys.stdout.write('\n') 41 | key = boto.s3.key.Key(bucket=bucket, name=(dir + file)) 42 | key.set_contents_from_filename((dir + file), cb=status, num_cb=10, policy="public-read") 43 | 44 | 45 | #if dirname == "": 46 | #key = boto.s3.key.Key(bucket=bucket, name=(name)) 47 | #key.set_contents_from_filename((name), cb=status, num_cb=10, policy="public-read") 48 | #else: 49 | #key = boto.s3.key.Key(bucket=bucket, name=(dirname + '/' + name)) 50 | #key.set_contents_from_filename((dirname + '/' + name), cb=status, num_cb=10, policy="public-read") 51 | #sys.stdout.write('\n') 52 | 53 | def upload(directory): 54 | os.path.walk(directory, upload_file, 'arg') 55 | 56 | def status(complete, total): 57 | sys.stdout.write('.') 58 | sys.stdout.flush() 59 | 60 | 61 | if __name__ == '__main__': 62 | upload('/home/josh/programming/diveintopython.es') 63 | -------------------------------------------------------------------------------- /diveintopython.fr/.scrape.py.kate-swp: -------------------------------------------------------------------------------- 1 | Kate Swap File - Version 1.0SI w #E -------------------------------------------------------------------------------- /diveintopython.fr/scrape.py: -------------------------------------------------------------------------------- 1 | from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, Comment 2 | import urllib 3 | import os 4 | import shutil 5 | import string 6 | import re 7 | 8 | URL='http://fr.diveintopython.net/' 9 | 10 | GOOGLE_ANALYTICS_KEY = 'UA-9740779-18' 11 | 12 | def scrape(): 13 | try: 14 | p = open('toc/index.html', 'r') 15 | soup = BeautifulSoup(p.read()) 16 | except IOError, e: 17 | print "io error code: %d msg: %s" % (e.returncode, e.message) 18 | return None 19 | 20 | for i in soup.findAll('a'): 21 | if i.has_key('href'): 22 | if i['href'][0:4] == 'http' and '#' not in i['href']: 23 | try: 24 | filename = i['href'].split('/')[-2] + '/' + i['href'].split('/')[-1] 25 | print "saving %s into %s" % (i['href'], filename, ) 26 | if not os.path.exists(i['href'].split('/')[-2]): 27 | os.mkdir(i['href'].split('/')[-2]) 28 | with open(filename, 'w') as out: 29 | out.write(urllib.urlopen(i['href']).read()) 30 | except IOError, e: 31 | pass 32 | def purify(filename): 33 | 34 | with open(filename, 'r') as f: 35 | 36 | soup = BeautifulSoup(f) 37 | print "working on %s" % (filename, ) 38 | for div in soup.findAll('div'): 39 | if div.has_key('id'): 40 | if div['id'] == 'wm-ipp': 41 | div.extract() 42 | for script in soup.findAll('script'): 43 | script.extract() 44 | for comment in soup.findAll(text=lambda text:isinstance(text, Comment)): 45 | comment.extract() 46 | for link in soup.findAll('link'): 47 | if link.has_key('rev'): 48 | if link['rev'] == 'made': 49 | link['href'] = 'josh@servercobra.com' 50 | if link.has_key('rel'): 51 | if link['rel'] == "home": 52 | link['href'] = URL 53 | if link['rel'] == "stylesheet": 54 | link['href'] = "/css/diveintopython.css" 55 | if link['rel'] == "next" or link['rel'] == "up" or link['rel'] == "previous": 56 | link['href'] = URL + '/'.join(link['href'].split('/')[8:]) 57 | 58 | for a in soup.findAll('a'): 59 | if a.has_key('href'): 60 | if 'http://web.archive.org/' in a['href']: 61 | print "print cleaning up link: %s" % (a['href']) 62 | a['href'] = URL + '/'.join(a['href'].split('/')[8:]) 63 | if 'mailto:' in a['href']: 64 | a['href'] = 'mailto:josh@servercobra.com' 65 | 66 | #a['href'] = 'http://www.diveintopython.net/' a['href'].split('/')[8:] 67 | #if 'http://diveintopython.net/' in a['href']: 68 | for form in soup.findAll('form'): 69 | if form.has_key('action'): 70 | if 'http://web.archive.org/' in form['action']: 71 | form['action'] = 'http://www.google.com/' + '/'.join(form['action'].split('/')[8:]) 72 | for img in soup.findAll('img'): 73 | if img.has_key('src'): 74 | if 'http://web.archive.org/' in img['src']: 75 | img['src'] = URL + '/'.join(img['src'].split('/')[8:]) 76 | 77 | #TODO: insert Google Analytics 78 | #soup.head.insert(len(a.head.contents), '') 79 | 80 | # Insert Google Analytics Async Tracking Code 81 | code = '''''' % (GOOGLE_ANALYTICS_KEY, ) 94 | if GOOGLE_ANALYTICS_KEY not in soup.head.contents: 95 | soup.head.insert(len(soup.head.contents), code) 96 | 97 | new_soup = BeautifulSoup(soup.renderContents()) 98 | for i in new_soup.findAll('a'): 99 | if i.has_key('href'): 100 | if i['href'][0:4] == 'http': 101 | #print i['href'] 102 | pass 103 | with open(filename, 'w') as out: 104 | out.write(new_soup.renderContents()) 105 | 106 | #def replace_url(old, new): 107 | #for file in os.listdir('/home/josh/programming/diveintopython'): 108 | #if os.path.isdir(file): 109 | #directory = file 110 | #for f in os.listdir(file): 111 | #if 'html' in f: 112 | #with open(directory + '/' + f, 'w+') as f2: 113 | #text = f2.read() 114 | #f2.write(re.sub('http://diveintopython.net', 'http://www.diveintopython.net', text)) 115 | if __name__ == '__main__': 116 | #for f in os.listdir('/home/josh/programming/diveintopython/'): 117 | #if ".html" in f.name: 118 | #purify(f) 119 | 120 | #purify('toc/index.html') 121 | scrape() 122 | #for file in os.listdir('/home/josh/programming/diveintopython.fr'): 123 | #if os.path.isdir(file): 124 | #directory = file 125 | #for f in os.listdir(file): 126 | #if 'html' in f: 127 | #purify(directory + '/' + f) 128 | 129 | #replace_url(None, None) 130 | 131 | -------------------------------------------------------------------------------- /diveintopython.fr/scrape.py~: -------------------------------------------------------------------------------- 1 | from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, Comment 2 | import urllib 3 | import os 4 | import shutil 5 | import string 6 | import re 7 | 8 | URL='http://fr.diveintopython.net/' 9 | 10 | GOOGLE_ANALYTICS_KEY = 'UA-9740779-18' 11 | 12 | def scrape(): 13 | try: 14 | p = open('toc/index.html', 'r') 15 | soup = BeautifulSoup(p.read()) 16 | except IOError, e: 17 | print "io error code: %d msg: %s" % (e.returncode, e.message) 18 | return None 19 | 20 | for i in soup.findAll('a'): 21 | if i.has_key('href'): 22 | if i['href'][0:4] == 'http' and '#' not in i['href']: 23 | try: 24 | filename = i['href'].split('/')[-2] + '/' + i['href'].split('/')[-1] 25 | print "saving %s into %s" % (i['href'], filename, ) 26 | if not os.path.exists(i['href'].split('/')[-2]): 27 | os.mkdir(i['href'].split('/')[-2]) 28 | with open(filename, 'w') as out: 29 | out.write(urllib.urlopen(i['href']).read()) 30 | except IOError, e: 31 | pass 32 | def purify(filename): 33 | 34 | with open(filename, 'r') as f: 35 | 36 | soup = BeautifulSoup(f) 37 | print "working on %s" % (filename, ) 38 | for div in soup.findAll('div'): 39 | if div.has_key('id'): 40 | if div['id'] == 'wm-ipp': 41 | div.extract() 42 | for script in soup.findAll('script'): 43 | script.extract() 44 | for comment in soup.findAll(text=lambda text:isinstance(text, Comment)): 45 | comment.extract() 46 | for link in soup.findAll('link'): 47 | if link.has_key('rev'): 48 | if link['rev'] == 'made': 49 | link['href'] = 'josh@servercobra.com' 50 | if link.has_key('rel'): 51 | if link['rel'] == "home": 52 | link['href'] = URL 53 | if link['rel'] == "stylesheet": 54 | link['href'] = "/css/diveintopython.css" 55 | if link['rel'] == "next" or link['rel'] == "up" or link['rel'] == "previous": 56 | link['href'] = URL + '/'.join(link['href'].split('/')[8:]) 57 | 58 | for a in soup.findAll('a'): 59 | if a.has_key('href'): 60 | if 'http://web.archive.org/' in a['href']: 61 | print "print cleaning up link: %s" % (a['href']) 62 | a['href'] = URL + '/'.join(a['href'].split('/')[8:]) 63 | if 'mailto:' in a['href']: 64 | a['href'] = 'mailto:josh@servercobra.com' 65 | 66 | #a['href'] = 'http://www.diveintopython.net/' a['href'].split('/')[8:] 67 | #if 'http://diveintopython.net/' in a['href']: 68 | for form in soup.findAll('form'): 69 | if form.has_key('action'): 70 | if 'http://web.archive.org/' in form['action']: 71 | form['action'] = 'http://www.google.com/' + '/'.join(form['action'].split('/')[8:]) 72 | for img in soup.findAll('img'): 73 | if img.has_key('src'): 74 | if 'http://web.archive.org/' in img['src']: 75 | img['src'] = URL + '/'.join(img['src'].split('/')[8:]) 76 | 77 | #TODO: insert Google Analytics 78 | #soup.head.insert(len(a.head.contents), '') 79 | 80 | # Insert Google Analytics Async Tracking Code 81 | code = '''''' % (GOOGLE_ANALYTICS_KEY, ) 94 | if GOOGLE_ANALYTICS_KEY not in soup.head.contents: 95 | soup.head.insert(len(soup.head.contents), code) 96 | 97 | new_soup = BeautifulSoup(soup.renderContents()) 98 | for i in new_soup.findAll('a'): 99 | if i.has_key('href'): 100 | if i['href'][0:4] == 'http': 101 | #print i['href'] 102 | pass 103 | with open(filename, 'w') as out: 104 | out.write(new_soup.renderContents()) 105 | 106 | #def replace_url(old, new): 107 | #for file in os.listdir('/home/josh/programming/diveintopython'): 108 | #if os.path.isdir(file): 109 | #directory = file 110 | #for f in os.listdir(file): 111 | #if 'html' in f: 112 | #with open(directory + '/' + f, 'w+') as f2: 113 | #text = f2.read() 114 | #f2.write(re.sub('http://diveintopython.net', 'http://www.diveintopython.net', text)) 115 | if __name__ == '__main__': 116 | #for f in os.listdir('/home/josh/programming/diveintopython/'): 117 | #if ".html" in f.name: 118 | #purify(f) 119 | 120 | purify('toc/index.html') 121 | 122 | #for file in os.listdir('/home/josh/programming/diveintopython.fr'): 123 | #if os.path.isdir(file): 124 | #directory = file 125 | #for f in os.listdir(file): 126 | #if 'html' in f: 127 | #purify(directory + '/' + f) 128 | 129 | #replace_url(None, None) 130 | 131 | -------------------------------------------------------------------------------- /diveintopython.fr/scripts_and_streams/stdin_stdout_stderr.html: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.fr/scripts_and_streams/stdin_stdout_stderr.html -------------------------------------------------------------------------------- /diveintopython.it/.dip.html.kate-swp: -------------------------------------------------------------------------------- 1 | Kate Swap File - Version 1.0SR EU E -------------------------------------------------------------------------------- /diveintopython.it/.scrape.py.kate-swp: -------------------------------------------------------------------------------- 1 | Kate Swap File - Version 1.0SR R ~ R } R | R { R z ESI y #E -------------------------------------------------------------------------------- /diveintopython.it/css/.diveintopython.css.kate-swp: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/css/.diveintopython.css.kate-swp -------------------------------------------------------------------------------- /diveintopython.it/css/diveintopython.css: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | /* 7 | FILE ARCHIVED ON 0:05:32 Jul 26, 2011 AND RETRIEVED FROM THE 8 | INTERNET ARCHIVE ON 20:15:07 Nov 4, 2011. 9 | JAVASCRIPT APPENDED BY WAYBACK MACHINE, COPYRIGHT INTERNET ARCHIVE. 10 | 11 | ALL OTHER CONTENT MAY ALSO BE PROTECTED BY COPYRIGHT (17 U.S.C. 12 | SECTION 108(a)(3)). 13 | */ 14 | body { 15 | background-color: white; 16 | color: #222; 17 | font-family: "Book Antiqua", Georgia, Palatino, Times, "Times New Roman", serif; 18 | line-height: 145%; 19 | } 20 | 21 | h1, h2, h3, h4, h5, h6, p#tagline, #menu, .Footer, #breadcrumb { 22 | margin: 0; 23 | padding: 0; 24 | font-family: "Lucida Grande", Tahoma, "Trebuchet MS", Verdana, Lucida, Geneva, Helvetica, sans-serif; 25 | } 26 | 27 | .titlepage h1 { 28 | margin-top: 1em; 29 | } 30 | 31 | h1 { 32 | font-size: 180%; 33 | background-color: white; 34 | color: maroon; 35 | } 36 | 37 | h1 a { 38 | background-color: white; 39 | color: maroon; 40 | text-decoration: none; 41 | } 42 | 43 | h2 { 44 | margin-top: 1em; 45 | font-size: 140%; 46 | } 47 | 48 | h3 { 49 | margin-top: 1em; 50 | font-size: 100%; 51 | } 52 | 53 | label, .divider { 54 | display: none; 55 | } 56 | 57 | #Header { 58 | border-bottom: 1px solid maroon; 59 | } 60 | 61 | p#tagline { 62 | margin: 3px 0 6px 2px; 63 | font-size: 90%; 64 | font-weight: bold; 65 | background-color: white; 66 | color: maroon; 67 | } 68 | 69 | #breadcrumb { 70 | font-size: 90%; 71 | } 72 | 73 | #navigation { 74 | font-size: 90%; 75 | } 76 | 77 | .thispage { 78 | font-weight: bold; 79 | } 80 | 81 | .Footer { 82 | font-size: 90%; 83 | border-top: 1px solid maroon; 84 | background-color: white; 85 | color: maroon; 86 | width: 85%; 87 | } 88 | 89 | table.Footer { 90 | margin-top: 1em; 91 | } 92 | 93 | #breadcrumb { 94 | padding: 0 0 1em 2px; 95 | } 96 | 97 | .tip, .note, .warning, .caution, .important, .footnote { 98 | margin: 1em 2em 1em 2em; 99 | } 100 | 101 | .furtherreading { 102 | margin-top: 1em; 103 | } 104 | 105 | .screen, .programlisting, .example table { 106 | margin: 1em 1em 0 1em; 107 | padding: 0; 108 | } 109 | 110 | .example table { 111 | margin-bottom: 1em; 112 | } 113 | 114 | .toc li { 115 | list-style: none; 116 | } 117 | 118 | /* ----- Python code syntax coloring ----- */ 119 | .computeroutput, .traceback, .pykeyword, .pystring, .pycomment, .pyfunction, .pyclass { 120 | background-color: white; 121 | } 122 | 123 | .pykeyword, .pyfunction, .pyclass { 124 | font-weight: bold; 125 | } 126 | 127 | .computeroutput { 128 | color: teal; 129 | } 130 | 131 | .traceback { 132 | color: red; 133 | } 134 | 135 | .pykeyword { 136 | color: navy; 137 | } 138 | 139 | .pystring { 140 | color: olive; 141 | } 142 | 143 | .pycomment { 144 | color: green; 145 | font-style: italic; 146 | } 147 | 148 | .pyfunction { 149 | color: teal; 150 | } 151 | 152 | .pyclass { 153 | color: blue; 154 | } 155 | 156 | /* ----- home page ----- */ 157 | #wrapper { 158 | float: left; 159 | width: 66%; 160 | border: 0; 161 | background-color: white; 162 | color: #222; 163 | margin: 1em 2em 0 0; 164 | } 165 | 166 | #menu { 167 | font-size: 90%; 168 | margin-top: 1.5em; 169 | } 170 | 171 | #menu h2 { 172 | background-color: white; 173 | color: maroon; 174 | } 175 | 176 | #menu ul { 177 | list-style: none; 178 | } 179 | 180 | .selfad { 181 | padding: 5px; 182 | background-color: #ccc; 183 | color: #222; 184 | border: 1px solid #222; 185 | } 186 | 187 | .selfad p { 188 | font-family: Tahoma, sans-serif; 189 | font-size: 90%; 190 | margin: 10px; 191 | line-height: 140%; 192 | } 193 | 194 | #sponsoredlinks { 195 | float: right; 196 | margin-top: 20px; 197 | margin-left: 20px; 198 | } 199 | -------------------------------------------------------------------------------- /diveintopython.it/css/diveintopython.css~: -------------------------------------------------------------------------------- 1 | 2 | -------------------------------------------------------------------------------- /diveintopython.it/download/diveintopython-pdf-it-2.0.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/download/diveintopython-pdf-it-2.0.zip -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/1.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/10.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/10.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/2.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/3.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/4.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/5.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/6.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/7.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/8.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/8.png -------------------------------------------------------------------------------- /diveintopython.it/images/callouts/9.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/callouts/9.png -------------------------------------------------------------------------------- /diveintopython.it/images/caution.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/caution.png -------------------------------------------------------------------------------- /diveintopython.it/images/diveintopython.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/diveintopython.png -------------------------------------------------------------------------------- /diveintopython.it/images/dot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/dot.png -------------------------------------------------------------------------------- /diveintopython.it/images/important.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/important.png -------------------------------------------------------------------------------- /diveintopython.it/images/note.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/note.png -------------------------------------------------------------------------------- /diveintopython.it/images/tip.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/tip.png -------------------------------------------------------------------------------- /diveintopython.it/images/warning.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/joshgachnang/diveintopython/b9e739bf083b952c2dbd970d470aac67a85c0b06/diveintopython.it/images/warning.png -------------------------------------------------------------------------------- /diveintopython.it/scrape.py: -------------------------------------------------------------------------------- 1 | from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, Comment 2 | import urllib 3 | import os 4 | import shutil 5 | import string 6 | import re 7 | 8 | URL='http://it.diveintopython.net/' 9 | 10 | GOOGLE_ANALYTICS_KEY = 'UA-9740779-18' 11 | 12 | def scrape(): 13 | try: 14 | p = open('dip.html', 'r') 15 | soup = BeautifulSoup(p.read()) 16 | except IOError, e: 17 | print "io error code: %d msg: %s" % (e.returncode, e.message) 18 | return None 19 | 20 | for i in soup.findAll('a'): 21 | if i.has_key('href'): 22 | if i['href'][0:4] == 'http' and '#' not in i['href']: 23 | try: 24 | filename = i['href'].split('/')[-2] + '/' + i['href'].split('/')[-1] 25 | print "saving %s into %s" % (i['href'], filename, ) 26 | if not os.path.exists(i['href'].split('/')[-2]): 27 | os.mkdir(i['href'].split('/')[-2]) 28 | with open(filename, 'w') as out: 29 | out.write(urllib.urlopen(i['href']).read()) 30 | except IOError, e: 31 | pass 32 | def purify(filename): 33 | 34 | with open(filename, 'r') as f: 35 | 36 | soup = BeautifulSoup(f) 37 | print "working on %s" % (filename, ) 38 | for div in soup.findAll('div'): 39 | if div.has_key('id'): 40 | if div['id'] == 'wm-ipp': 41 | div.extract() 42 | for script in soup.findAll('script'): 43 | script.extract() 44 | for comment in soup.findAll(text=lambda text:isinstance(text, Comment)): 45 | comment.extract() 46 | for link in soup.findAll('link'): 47 | if link.has_key('rev'): 48 | if link['rev'] == 'made': 49 | link['href'] = 'josh@servercobra.com' 50 | if link.has_key('rel'): 51 | if link['rel'] == "home": 52 | link['href'] = URL 53 | if link['rel'] == "stylesheet": 54 | link['href'] = "/css/diveintopython.css" 55 | if link['rel'] == "next" or link['rel'] == "up" or link['rel'] == "previous": 56 | link['href'] = URL + '/'.join(link['href'].split('/')[8:]) 57 | 58 | for a in soup.findAll('a'): 59 | if a.has_key('href'): 60 | if 'http://web.archive.org/' in a['href']: 61 | print "print cleaning up link: %s" % (a['href']) 62 | a['href'] = URL + '/'.join(a['href'].split('/')[8:]) 63 | if 'mailto:' in a['href']: 64 | a['href'] = 'mailto:josh@servercobra.com' 65 | 66 | #a['href'] = 'http://www.diveintopython.net/' a['href'].split('/')[8:] 67 | #if 'http://diveintopython.net/' in a['href']: 68 | for form in soup.findAll('form'): 69 | if form.has_key('action'): 70 | if 'http://web.archive.org/' in form['action']: 71 | form['action'] = 'http://www.google.com/' + '/'.join(form['action'].split('/')[8:]) 72 | for img in soup.findAll('img'): 73 | if img.has_key('src'): 74 | if 'http://web.archive.org/' in img['src']: 75 | img['src'] = URL + '/'.join(img['src'].split('/')[8:]) 76 | 77 | #TODO: insert Google Analytics 78 | #soup.head.insert(len(a.head.contents), '') 79 | 80 | # Insert Google Analytics Async Tracking Code 81 | code = '''''' % (GOOGLE_ANALYTICS_KEY, ) 94 | if GOOGLE_ANALYTICS_KEY not in soup.head.contents: 95 | soup.head.insert(len(soup.head.contents), code) 96 | 97 | new_soup = BeautifulSoup(soup.renderContents()) 98 | for i in new_soup.findAll('a'): 99 | if i.has_key('href'): 100 | if i['href'][0:4] == 'http': 101 | #print i['href'] 102 | pass 103 | with open(filename, 'w') as out: 104 | out.write(new_soup.renderContents()) 105 | 106 | #def replace_url(old, new): 107 | #for file in os.listdir('/home/josh/programming/diveintopython'): 108 | #if os.path.isdir(file): 109 | #directory = file 110 | #for f in os.listdir(file): 111 | #if 'html' in f: 112 | #with open(directory + '/' + f, 'w+') as f2: 113 | #text = f2.read() 114 | #f2.write(re.sub('http://diveintopython.net', 'http://www.diveintopython.net', text)) 115 | if __name__ == '__main__': 116 | 117 | 118 | #scrape() 119 | 120 | 121 | for file in os.listdir('/home/josh/programming/diveintopython.it'): 122 | if os.path.isdir(file): 123 | directory = file 124 | for f in os.listdir(file): 125 | if 'html' in f: 126 | purify(directory + '/' + f) 127 | 128 | 129 | 130 | 131 | -------------------------------------------------------------------------------- /diveintopython.it/scrape.py~: -------------------------------------------------------------------------------- 1 | from BeautifulSoup import BeautifulStoneSoup, BeautifulSoup, Comment 2 | import urllib 3 | import os 4 | import shutil 5 | import string 6 | import re 7 | 8 | URL='http://it.diveintopython.net/' 9 | 10 | GOOGLE_ANALYTICS_KEY = 'UA-9740779-18' 11 | 12 | def scrape(): 13 | try: 14 | p = open('dip.html', 'r') 15 | soup = BeautifulSoup(p.read()) 16 | except IOError, e: 17 | print "io error code: %d msg: %s" % (e.returncode, e.message) 18 | return None 19 | 20 | for i in soup.findAll('a'): 21 | if i.has_key('href'): 22 | if i['href'][0:4] == 'http' and '#' not in i['href']: 23 | try: 24 | filename = i['href'].split('/')[-2] + '/' + i['href'].split('/')[-1] 25 | print "saving %s into %s" % (i['href'], filename, ) 26 | if not os.path.exists(i['href'].split('/')[-2]): 27 | os.mkdir(i['href'].split('/')[-2]) 28 | with open(filename, 'w') as out: 29 | out.write(urllib.urlopen(i['href']).read()) 30 | except IOError, e: 31 | pass 32 | def purify(filename): 33 | 34 | with open(filename, 'r') as f: 35 | 36 | soup = BeautifulSoup(f) 37 | print "working on %s" % (filename, ) 38 | for div in soup.findAll('div'): 39 | if div.has_key('id'): 40 | if div['id'] == 'wm-ipp': 41 | div.extract() 42 | for script in soup.findAll('script'): 43 | script.extract() 44 | for comment in soup.findAll(text=lambda text:isinstance(text, Comment)): 45 | comment.extract() 46 | for link in soup.findAll('link'): 47 | if link.has_key('rev'): 48 | if link['rev'] == 'made': 49 | link['href'] = 'josh@servercobra.com' 50 | if link.has_key('rel'): 51 | if link['rel'] == "home": 52 | link['href'] = URL 53 | if link['rel'] == "stylesheet": 54 | link['href'] = "/css/diveintopython.css" 55 | if link['rel'] == "next" or link['rel'] == "up" or link['rel'] == "previous": 56 | link['href'] = URL + '/'.join(link['href'].split('/')[8:]) 57 | 58 | for a in soup.findAll('a'): 59 | if a.has_key('href'): 60 | if 'http://web.archive.org/' in a['href']: 61 | print "print cleaning up link: %s" % (a['href']) 62 | a['href'] = URL + '/'.join(a['href'].split('/')[8:]) 63 | if 'mailto:' in a['href']: 64 | a['href'] = 'mailto:josh@servercobra.com' 65 | 66 | #a['href'] = 'http://www.diveintopython.net/' a['href'].split('/')[8:] 67 | #if 'http://diveintopython.net/' in a['href']: 68 | for form in soup.findAll('form'): 69 | if form.has_key('action'): 70 | if 'http://web.archive.org/' in form['action']: 71 | form['action'] = 'http://www.google.com/' + '/'.join(form['action'].split('/')[8:]) 72 | for img in soup.findAll('img'): 73 | if img.has_key('src'): 74 | if 'http://web.archive.org/' in img['src']: 75 | img['src'] = URL + '/'.join(img['src'].split('/')[8:]) 76 | 77 | #TODO: insert Google Analytics 78 | #soup.head.insert(len(a.head.contents), '') 79 | 80 | # Insert Google Analytics Async Tracking Code 81 | code = '''''' % (GOOGLE_ANALYTICS_KEY, ) 94 | if GOOGLE_ANALYTICS_KEY not in soup.head.contents: 95 | soup.head.insert(len(soup.head.contents), code) 96 | 97 | new_soup = BeautifulSoup(soup.renderContents()) 98 | for i in new_soup.findAll('a'): 99 | if i.has_key('href'): 100 | if i['href'][0:4] == 'http': 101 | #print i['href'] 102 | pass 103 | with open(filename, 'w') as out: 104 | out.write(new_soup.renderContents()) 105 | 106 | #def replace_url(old, new): 107 | #for file in os.listdir('/home/josh/programming/diveintopython'): 108 | #if os.path.isdir(file): 109 | #directory = file 110 | #for f in os.listdir(file): 111 | #if 'html' in f: 112 | #with open(directory + '/' + f, 'w+') as f2: 113 | #text = f2.read() 114 | #f2.write(re.sub('http://diveintopython.net', 'http://www.diveintopython.net', text)) 115 | if __name__ == '__main__': 116 | #for f in os.listdir('/home/josh/programming/diveintopython/'): 117 | #if ".html" in f.name: 118 | #purify(f) 119 | 120 | #purify('save/redhat.html') 121 | 122 | scrape() 123 | #for file in os.listdir('/home/josh/programming/diveintopython.it'): 124 | #if os.path.isdir(file): 125 | #directory = file 126 | #for f in os.listdir(file): 127 | #if 'html' in f: 128 | #purify(directory + '/' + f) 129 | 130 | #replace_url(None, None) 131 | 132 | 133 | -------------------------------------------------------------------------------- /diveintopython.it/upload.py: -------------------------------------------------------------------------------- 1 | import boto 2 | import os 3 | import sys 4 | 5 | BUCKET_NAME = 'it.diveintopython.net' 6 | ignored_folders = ('save', 'www.diveintopython.org', 'diveintopythonbak', '.git') 7 | ignored_files = ('scrape.py', 'upload.py', 'scrape.py~', 'upload.py~', '.gitignore') 8 | 9 | conn = boto.connect_s3() 10 | bucket = conn.get_bucket(BUCKET_NAME) 11 | sys.stdout.write("Beginning upload to %s" % BUCKET_NAME) 12 | 13 | def check_ignore(dir, file): 14 | for fol in ignored_folders: 15 | if fol in dir: 16 | return True 17 | for f in ignored_files: 18 | if f == file: 19 | return True 20 | return False 21 | 22 | def upload_file(arg, dirname, names): 23 | #'/'.join(a['href'].split('/')[8:]) 24 | if len(dirname.split('/')) == 5: 25 | dir = '/'.join(dirname.split('/')[5:]) 26 | else: 27 | dir = '/'.join(dirname.split('/')[5:]) + '/' 28 | print "dir is: %s" % dir 29 | 30 | #print "dirname is %s, dir is %s" % (dirname, dir) 31 | for file in names: 32 | 33 | #print "full path is %s" % (dir + file) 34 | if os.path.isdir(dir + file): 35 | continue 36 | if check_ignore(dir, file) == True: 37 | continue 38 | sys.stdout.write("uploading ") 39 | sys.stdout.write(dir + file) 40 | sys.stdout.write('\n') 41 | key = boto.s3.key.Key(bucket=bucket, name=(dir + file)) 42 | key.set_contents_from_filename((dir + file), cb=status, num_cb=10, policy="public-read") 43 | 44 | 45 | #if dirname == "": 46 | #key = boto.s3.key.Key(bucket=bucket, name=(name)) 47 | #key.set_contents_from_filename((name), cb=status, num_cb=10, policy="public-read") 48 | #else: 49 | #key = boto.s3.key.Key(bucket=bucket, name=(dirname + '/' + name)) 50 | #key.set_contents_from_filename((dirname + '/' + name), cb=status, num_cb=10, policy="public-read") 51 | #sys.stdout.write('\n') 52 | 53 | def upload(directory): 54 | os.path.walk(directory, upload_file, 'arg') 55 | 56 | def status(complete, total): 57 | sys.stdout.write('.') 58 | sys.stdout.flush() 59 | 60 | 61 | if __name__ == '__main__': 62 | upload('/home/josh/programming/diveintopython.it') -------------------------------------------------------------------------------- /diveintopython.it/upload.py~: -------------------------------------------------------------------------------- 1 | import boto 2 | import os 3 | import sys 4 | 5 | BUCKET_NAME = 'it.diveintopython.net' 6 | ignored_folders = ('save', 'www.diveintopython.org', 'diveintopythonbak', '.git') 7 | ignored_files = ('scrape.py', 'upload.py', 'scrape.py~', 'upload.py~', '.gitignore') 8 | 9 | conn = boto.connect_s3() 10 | bucket = conn.get_bucket(BUCKET_NAME) 11 | sys.stdout.write("Beginning upload to %s" % BUCKET_NAME) 12 | 13 | def check_ignore(dir, file): 14 | for fol in ignored_folders: 15 | if fol in dir: 16 | return True 17 | for f in ignored_files: 18 | if f == file: 19 | return True 20 | return False 21 | 22 | def upload_file(arg, dirname, names): 23 | #'/'.join(a['href'].split('/')[8:]) 24 | if len(dirname.split('/')) == 5: 25 | dir = '/'.join(dirname.split('/')[5:]) 26 | else: 27 | dir = '/'.join(dirname.split('/')[5:]) + '/' 28 | print "dir is: %s" % dir 29 | 30 | #print "dirname is %s, dir is %s" % (dirname, dir) 31 | for file in names: 32 | 33 | #print "full path is %s" % (dir + file) 34 | if os.path.isdir(dir + file): 35 | continue 36 | if check_ignore(dir, file) == True: 37 | continue 38 | sys.stdout.write("uploading ") 39 | sys.stdout.write(dir + file) 40 | sys.stdout.write('\n') 41 | key = boto.s3.key.Key(bucket=bucket, name=(dir + file)) 42 | key.set_contents_from_filename((dir + file), cb=status, num_cb=10, policy="public-read") 43 | 44 | 45 | #if dirname == "": 46 | #key = boto.s3.key.Key(bucket=bucket, name=(name)) 47 | #key.set_contents_from_filename((name), cb=status, num_cb=10, policy="public-read") 48 | #else: 49 | #key = boto.s3.key.Key(bucket=bucket, name=(dirname + '/' + name)) 50 | #key.set_contents_from_filename((dirname + '/' + name), cb=status, num_cb=10, policy="public-read") 51 | #sys.stdout.write('\n') 52 | 53 | def upload(directory): 54 | os.path.walk(directory, upload_file, 'arg') 55 | 56 | def status(complete, total): 57 | sys.stdout.write('.') 58 | sys.stdout.flush() 59 | 60 | 61 | if __name__ == '__main__': 62 | upload('/home/josh/programming/diveintopython') -------------------------------------------------------------------------------- /diveintopython.kr/.index.htm.kate-swp: -------------------------------------------------------------------------------- 1 | Kate Swap File - Version 1.0SE -------------------------------------------------------------------------------- /diveintopython.kr/css/diveintopython.css: -------------------------------------------------------------------------------- 1 | body { 2 | background-color: white; 3 | color: #222; 4 | font-family: "Book Antiqua", Georgia, Palatino, Times, "Times New Roman", serif; 5 | line-height: 145%; 6 | } 7 | 8 | h1, h2, h3, h4, h5, h6, p#tagline, #menu, .Footer, #breadcrumb { 9 | margin: 0; 10 | padding: 0; 11 | font-family: "Lucida Grande", Tahoma, "Trebuchet MS", Verdana, Lucida, Geneva, Helvetica, sans-serif; 12 | } 13 | 14 | .titlepage h1 { 15 | margin-top: 1em; 16 | } 17 | 18 | h1 { 19 | font-size: 180%; 20 | background-color: white; 21 | color: maroon; 22 | text-shadow: 2px 2px 2px #990000; 23 | } 24 | 25 | h1 a { 26 | background-color: white; 27 | color: maroon; 28 | text-decoration: none; 29 | } 30 | 31 | h2 { 32 | margin-top: 1em; 33 | font-size: 140%; 34 | text-shadow: 2px 2px 2px #999999; 35 | } 36 | 37 | h3 { 38 | margin-top: 1em; 39 | font-size: 100%; 40 | } 41 | 42 | label, .divider { 43 | display: none; 44 | } 45 | 46 | #Header { 47 | border-bottom: 1px solid maroon; 48 | } 49 | 50 | p#tagline { 51 | margin: 3px 0 6px 2px; 52 | font-size: 90%; 53 | font-weight: bold; 54 | background-color: white; 55 | color: maroon; 56 | } 57 | 58 | #breadcrumb { 59 | font-size: 90%; 60 | } 61 | 62 | #navigation { 63 | font-size: 90%; 64 | } 65 | 66 | .thispage { 67 | font-weight: bold; 68 | } 69 | 70 | .Footer { 71 | font-size: 90%; 72 | border-top: 1px solid maroon; 73 | background-color: white; 74 | color: maroon; 75 | width: 85%; 76 | } 77 | 78 | table.Footer { 79 | margin-top: 1em; 80 | } 81 | 82 | #breadcrumb { 83 | padding: 0 0 1em 2px; 84 | } 85 | 86 | .tip, .note, .warning, .caution, .important, .footnote { 87 | margin: 1em 2em 1em 2em; 88 | } 89 | 90 | .furtherreading { 91 | margin-top: 1em; 92 | } 93 | 94 | .screen, .programlisting, .example table { 95 | margin: 1em 1em 0 1em; 96 | padding: 0; 97 | } 98 | 99 | .example table { 100 | margin-bottom: 1em; 101 | } 102 | 103 | .toc li { 104 | list-style: none; 105 | } 106 | 107 | /* ----- Python code syntax coloring ----- */ 108 | .computeroutput, .traceback, .pykeyword, .pystring, .pycomment, .pyfunction, .pyclass { 109 | background-color: white; 110 | } 111 | 112 | .pykeyword, .pyfunction, .pyclass { 113 | font-weight: bold; 114 | } 115 | 116 | .computeroutput { 117 | color: teal; 118 | } 119 | 120 | .traceback { 121 | color: red; 122 | } 123 | 124 | .pykeyword { 125 | color: navy; 126 | } 127 | 128 | .pystring { 129 | color: olive; 130 | } 131 | 132 | .pycomment { 133 | color: green; 134 | font-style: italic; 135 | } 136 | 137 | .pyfunction { 138 | color: teal; 139 | } 140 | 141 | .pyclass { 142 | color: blue; 143 | } 144 | 145 | /* ----- home page ----- */ 146 | #wrapper { 147 | float: left; 148 | width: 66%; 149 | border: 0; 150 | background-color: white; 151 | color: #222; 152 | margin: 1em 2em 0 0; 153 | } 154 | 155 | #menu { 156 | font-size: 90%; 157 | margin-top: 1.5em; 158 | } 159 | 160 | #menu h2 { 161 | background-color: white; 162 | color: maroon; 163 | } 164 | 165 | #menu ul { 166 | list-style: none; 167 | } 168 | 169 | .selfad { 170 | padding: 5px; 171 | background-color: #ccc; 172 | color: #222; 173 | border: 1px solid #222; 174 | } 175 | 176 | .selfad p { 177 | font-family: Tahoma, sans-serif; 178 | font-size: 90%; 179 | margin: 10px; 180 | line-height: 140%; 181 | } 182 | 183 | #sponsoredlinks { 184 | float: right; 185 | margin-top: 20px; 186 | margin-left: 20px; 187 | } 188 | -------------------------------------------------------------------------------- /diveintopython.kr/gnugpl.htm: -------------------------------------------------------------------------------- 1 |<< GNU Free Document License | � 量 | � 量 >> |
GNU General Public License
健 珞밀ュ GNU General Public License≠ 蘆��� �ュ 繇��乾 腱傑잴 ×傑� 穢�槪 숩ヴ 皇鋼 �┶�읒∽ 꿱靴� 胛� �淮읒밀 虔뭡���梟 腱傑 뱉��잘� ��� ×� 珞밀頌 �급� �ュ 꼬ヴ. 健ュ 읏甦� 珞밀≠ �도�� �ュ 蘆�健 ���� 꿩� 輛��劍頌 傑湧�� 胛�밀 묽���꺌 � �균� �� 輛�� 꿇�� ��뭡 윈쇰읒 �� �珞≠�� ↑� 虔꾑읒 皇鋼 ��健 ���� ┐珞健ヴ. ��밀, 腱傑 뱉��잘� ��鋼 읫을健寧 �묠�� 祿瑩劍頌 乾�밀 烙휸� �도 匯況 솥�� ≠�뭡槪 藜읓읒 籃��� 珞밀≠ �� �ュ 蘆�� ��況 숩ヴ 皇鋼 �┶�읒∽ 茸숩�淞ュ ��들 ��槪 �균� 纜을珞槪 ×��劍頌 멨乾�� 꿩舡劍頌� �潢맸� � �槪 ↔健ヴ. 腱傑 뱉��잘� ��鋼 GNU General Public License況 �朗읒 ��� ±�, 읫曳 윳珞� GNU General Public License읒 ��밀梟 그 輛�� 湧悚健 이力花∽ 烙휸� � �舡槪 ���� �ヴ. 健 纜을珞鋼 輛�� ↑�잴 珞밀。� 돛件뭡 윈쇰읒 �� ↑�槪 ��� 꿩鋼 ↔健� 健頌 乾�밀 꺌�� � �槪 �도 匯況 輛��乾 珞靈읒 ��밀 �─� ��� 숩�도 �� 꿩ュヴ. Original Copy: GNU
General Public License |
<< � 量 | � 量 | 靈 1 � �健�槪 健��� >> |
健 甄鋼 름競숩況 胛� ↔도 껍レ�, ≫�健寧, 宖京健況 胛� ↔도 껍レヴ™. 健↔鋼 윈壽솥읒 ��윈 皇鋼 ↔槪 ≠穢�ヴ.
윈壽솥健 健靈 �� �頌그甦�槪 맸虔�ヴ�, 그↔鋼 윈壽솥鋼 �健�槪 ��� 꼬ヴュ ↔槪 ┥�ュ ↔健 껍レヴ. �健�鋼 ���읒 �� 맞�寧, 윈壽솥鋼 �뒵읒묽≠ 그↔槪 ���꺌 �ヴ. 寧ュ Learning to Program � How to Think Like a Computer Scientist 況 �굄 偈絹��, 그靴� Python.org 읒ュ �-�頌그甦蛔�槪 胛� �健� �頌그甦�읒 劍頌 ≠ュ 뱉⌒읒 �� ヴ患 換��健 皇健 �ヴ.
健靈 ヴ健�� ��≠ 숩腱
<< � 量 | � 量 | 靈 1 � �健�槪 健��� >> |
Начало > В глубь языка Python > GNU Free Documentation License > Collections of documents | << >> | ||||
![]() | В глубь языка Python Для программистов |
You may make a collection consisting of the Document and 21 | other documents released under this License, and replace the 22 | individual copies of this License in the various documents with a 23 | single copy that is included in the collection, provided that you 24 | follow the rules of this License for verbatim copying of each of 25 | the documents in all other respects.
You may extract a single document from such a collection, 26 | and distribute it individually under this License, provided you 27 | insert a copy of this License into the extracted document, and 28 | follow this License in all other respects regarding verbatim 29 | copying of that document.
Combining documents | 1 2 3 4 5 6 7 8 9 10 11 12 | Aggregation with independent works |
Copyright © 2000, 2001, 2002 Марк Пилгрим Copyright © 2001, 2002, 2003 Перевод, Денис Откидач |
Начало > В глубь языка Python > GNU Free Documentation License > Verbatim copying | << >> | ||||
![]() | В глубь языка Python Для программистов |
You may copy and distribute the Document in any medium, 21 | either commercially or noncommercially, provided that this 22 | License, the copyright notices, and the license notice saying this 23 | License applies to the Document are reproduced in all copies, and 24 | that you add no other conditions whatsoever to those of this 25 | License. You may not use technical measures to obstruct or 26 | control the reading or further copying of the copies you make or 27 | distribute. However, you may accept compensation in exchange for 28 | copies. If you distribute a large enough number of copies you 29 | must also follow the conditions in section 3.
You may also lend copies, under the same conditions stated 30 | above, and you may publicly display copies.
Applicability and definitions | 1 2 3 4 5 6 7 8 9 10 11 12 | Copying in quantity |
Copyright © 2000, 2001, 2002 Марк Пилгрим Copyright © 2001, 2002, 2003 Перевод, Денис Откидач |
Начало > В глубь языка Python > GNU Free Documentation License > Termination | << >> | ||||
![]() | В глубь языка Python Для программистов |
You may not copy, modify, sublicense, or distribute the 21 | Document except as expressly provided for under this License. Any 22 | other attempt to copy, modify, sublicense or distribute the 23 | Document is void, and will automatically terminate your rights 24 | under this License. However, parties who have received copies, or 25 | rights, from you under this License will not have their licenses 26 | terminated so long as such parties remain in full 27 | compliance.
Translation | 1 2 3 4 5 6 7 8 9 10 11 12 | Future revisions of this license |
Copyright © 2000, 2001, 2002 Марк Пилгрим Copyright © 2001, 2002, 2003 Перевод, Денис Откидач |
Начало > В глубь языка Python > GNU Free Documentation License > Translation | << >> | ||||
![]() | В глубь языка Python Для программистов |
Translation is considered a kind of modification, so you may 21 | distribute translations of the Document under the terms of section 22 | 4. Replacing Invariant Sections with translations requires 23 | special permission from their copyright holders, but you may 24 | include translations of some or all Invariant Sections in addition 25 | to the original versions of these Invariant Sections. You may 26 | include a translation of this License provided that you also 27 | include the original English version of this License. In case of 28 | a disagreement between the translation and the original English 29 | version of this License, the original English version will 30 | prevail.
Aggregation with independent works | 1 2 3 4 5 6 7 8 9 10 11 12 | Termination |
Copyright © 2000, 2001, 2002 Марк Пилгрим Copyright © 2001, 2002, 2003 Перевод, Денис Откидач |
![]() | В глубь языка Python Для программистов |
В глубь языка Python — свободная книга о языке программирования Python для 21 | опытных программистов. Вы можете читать эту книгу 22 | online или загрузить ее в одном из 23 | предлагаемых форматов.
Copyright © 2000, 2001, 2002 Марк Пилгрим Copyright © 2001, 2002, 2003 Перевод, Денис Откидач |
You are here: Home > Dive Into Python > Installing Python > Summary | 35 |<< >> | 36 |||||
39 | Dive Into Python40 |Python from novice to pro 41 | |
42 | 43 | 46 | | 47 |
You should now have a version of Python installed that works for you. 61 |
62 |Depending on your platform, you may have more than one version of Python intsalled. If so, you need to be aware of your paths. If simply typing python on the command line doesn't run the version of Python that you want to use, you may need to enter the full pathname of your preferred version. 64 |
65 |Congratulations, and welcome to Python. 66 |
67 |<< The Interactive Shell |
71 | | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 72 | |
73 | Your First Python Program >> |
74 |