• Home
  • Line#
  • Scopes#
  • Navigate#
  • Raw
  • Download
1# -*- coding: utf-8 -*-
2
3#-------------------------------------------------------------------------
4# drawElements Quality Program utilities
5# --------------------------------------
6#
7# Copyright 2015 The Android Open Source Project
8#
9# Licensed under the Apache License, Version 2.0 (the "License");
10# you may not use this file except in compliance with the License.
11# You may obtain a copy of the License at
12#
13#      http://www.apache.org/licenses/LICENSE-2.0
14#
15# Unless required by applicable law or agreed to in writing, software
16# distributed under the License is distributed on an "AS IS" BASIS,
17# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
18# See the License for the specific language governing permissions and
19# limitations under the License.
20#
21#-------------------------------------------------------------------------
22
23import os
24import sys
25import shutil
26import tarfile
27import urllib2
28import hashlib
29import argparse
30import subprocess
31
32sys.path.append(os.path.join(os.path.dirname(__file__), "..", "scripts"))
33
34from build.common import *
35
36EXTERNAL_DIR	= os.path.realpath(os.path.normpath(os.path.dirname(__file__)))
37
38def computeChecksum (data):
39	return hashlib.sha256(data).hexdigest()
40
41class Source:
42	def __init__(self, baseDir, extractDir):
43		self.baseDir		= baseDir
44		self.extractDir		= extractDir
45
46	def clean (self):
47		fullDstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir)
48		if os.path.exists(fullDstPath):
49			shutil.rmtree(fullDstPath, ignore_errors=False)
50
51class SourcePackage (Source):
52	def __init__(self, url, filename, checksum, baseDir, extractDir = "src", postExtract=None):
53		Source.__init__(self, baseDir, extractDir)
54		self.url			= url
55		self.filename		= filename
56		self.checksum		= checksum
57		self.archiveDir		= "packages"
58		self.postExtract	= postExtract
59
60	def clean (self):
61		Source.clean(self)
62		self.removeArchives()
63
64	def update (self):
65		if not self.isArchiveUpToDate():
66			self.fetchAndVerifyArchive()
67
68		# \note No way to verify that extracted contents match archive, re-extract
69		Source.clean(self)
70		self.extract()
71
72	def removeArchives (self):
73		archiveDir = os.path.join(EXTERNAL_DIR, pkg.baseDir, pkg.archiveDir)
74		if os.path.exists(archiveDir):
75			shutil.rmtree(archiveDir, ignore_errors=False)
76
77	def isArchiveUpToDate (self):
78		archiveFile = os.path.join(EXTERNAL_DIR, pkg.baseDir, pkg.archiveDir, pkg.filename)
79		if os.path.exists(archiveFile):
80			return computeChecksum(readFile(archiveFile)) == self.checksum
81		else:
82			return False
83
84	def fetchAndVerifyArchive (self):
85		print "Fetching %s" % self.url
86
87		req			= urllib2.urlopen(self.url)
88		data		= req.read()
89		checksum	= computeChecksum(data)
90		dstPath		= os.path.join(EXTERNAL_DIR, self.baseDir, self.archiveDir, self.filename)
91
92		if checksum != self.checksum:
93			raise Exception("Checksum mismatch for %s, exepected %s, got %s" % (self.filename, self.checksum, checksum))
94
95		if not os.path.exists(os.path.dirname(dstPath)):
96			os.mkdir(os.path.dirname(dstPath))
97
98		writeFile(dstPath, data)
99
100	def extract (self):
101		print "Extracting %s to %s/%s" % (self.filename, self.baseDir, self.extractDir)
102
103		srcPath	= os.path.join(EXTERNAL_DIR, self.baseDir, self.archiveDir, self.filename)
104		tmpPath	= os.path.join(EXTERNAL_DIR, ".extract-tmp-%s" % self.baseDir)
105		dstPath	= os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir)
106		archive	= tarfile.open(srcPath)
107
108		if os.path.exists(tmpPath):
109			shutil.rmtree(tmpPath, ignore_errors=False)
110
111		os.mkdir(tmpPath)
112
113		archive.extractall(tmpPath)
114		archive.close()
115
116		extractedEntries = os.listdir(tmpPath)
117		if len(extractedEntries) != 1 or not os.path.isdir(os.path.join(tmpPath, extractedEntries[0])):
118			raise Exception("%s doesn't contain single top-level directory" % self.filename)
119
120		topLevelPath = os.path.join(tmpPath, extractedEntries[0])
121
122		if not os.path.exists(dstPath):
123			os.mkdir(dstPath)
124
125		for entry in os.listdir(topLevelPath):
126			if os.path.exists(os.path.join(dstPath, entry)):
127				raise Exception("%s exists already" % entry)
128
129			shutil.move(os.path.join(topLevelPath, entry), dstPath)
130
131		shutil.rmtree(tmpPath, ignore_errors=True)
132
133		if self.postExtract != None:
134			self.postExtract(dstPath)
135
136class GitRepo (Source):
137	def __init__(self, url, revision, baseDir, extractDir = "src"):
138		Source.__init__(self, baseDir, extractDir)
139		self.url		= url
140		self.revision	= revision
141
142	def update (self):
143		fullDstPath = os.path.join(EXTERNAL_DIR, self.baseDir, self.extractDir)
144
145		if not os.path.exists(fullDstPath):
146			execute(["git", "clone", "--no-checkout", self.url, fullDstPath])
147
148		pushWorkingDir(fullDstPath)
149		try:
150			execute(["git", "fetch", self.url, "+refs/heads/*:refs/remotes/origin/*"])
151			execute(["git", "checkout", self.revision])
152		finally:
153			popWorkingDir()
154
155def postExtractLibpng (path):
156	shutil.copy(os.path.join(path, "scripts", "pnglibconf.h.prebuilt"),
157				os.path.join(path, "pnglibconf.h"))
158
159PACKAGES = [
160	SourcePackage(
161		"http://zlib.net/zlib-1.2.8.tar.gz",
162		"zlib-1.2.8.tar.gz",
163		"36658cb768a54c1d4dec43c3116c27ed893e88b02ecfcb44f2166f9c0b7f2a0d",
164		"zlib"),
165	SourcePackage(
166		"http://prdownloads.sourceforge.net/libpng/libpng-1.6.17.tar.gz",
167		"libpng-1.6.17.tar.gz",
168		"a18233c99e1dc59a256180e6871d9305a42e91b3f98799b3ceb98e87e9ec5e31",
169		"libpng",
170		postExtract = postExtractLibpng),
171	GitRepo(
172		"https://github.com/KhronosGroup/SPIRV-Tools.git",
173		"f7e63786a919040cb2e0e572d960a0650f2c2881",
174		"spirv-tools"),
175	GitRepo(
176		"https://github.com/KhronosGroup/glslang.git",
177		"5639f3aca5b75cbe5419a623eecf5e3794fab917",
178		"glslang"),
179]
180
181def parseArgs ():
182	parser = argparse.ArgumentParser(description = "Fetch external sources")
183	parser.add_argument('--clean', dest='clean', action='store_true', default=False,
184						help='Remove sources instead of fetching')
185	return parser.parse_args()
186
187if __name__ == "__main__":
188	args = parseArgs()
189
190	for pkg in PACKAGES:
191		if args.clean:
192			pkg.clean()
193		else:
194			pkg.update()
195