Merge pull request #6238 from scivision/wrap_sec

wrap: working to improve security
pull/6311/head
Jussi Pakkanen 5 years ago committed by GitHub
commit 4487c66507
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      manual tests/1 wrap/main.c
  2. 2
      manual tests/10 svn wrap/prog.c
  3. 8
      manual tests/11 wrap imposter/meson.build
  4. 10
      manual tests/11 wrap imposter/subprojects/zlib.wrap
  5. 4
      manual tests/12 wrap mirror/meson.build
  6. 10
      manual tests/12 wrap mirror/subprojects/zlib.wrap
  7. 2
      manual tests/3 git wrap/prog.c
  8. 2
      manual tests/4 standalone binaries/myapp.cpp
  9. 2
      manual tests/5 rpm/main.c
  10. 2
      manual tests/6 hg wrap/prog.c
  11. 2
      manual tests/8 timeout/sleepprog.c
  12. 2
      mesonbuild/modules/rpm.py
  13. 113
      mesonbuild/wrap/wrap.py

@ -1,7 +1,7 @@
#include<sqlite3.h>
#include<stdio.h>
int main(int argc, char **argv) {
int main(void) {
sqlite3 *db;
if(sqlite3_open(":memory:", &db) != SQLITE_OK) {
printf("Sqlite failed.\n");

@ -1,6 +1,6 @@
#include"subproj.h"
int main(int argc, char **argv) {
int main(void) {
subproj_function();
return 0;
}

@ -0,0 +1,8 @@
project('evil URL')
# showing that new Meson wrap.py code tries to stop imposter WrapDB URLs
# a WrapException is raised.
#
# ERROR: https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip may be a WrapDB-impersonating URL
#
subproject('zlib')

@ -0,0 +1,10 @@
[wrap-file]
directory = zlib-1.2.8
source_url = https://zlib.net/zlib-1.2.11.tar.gz
source_filename = zlib-1.2.11.tar.gz
source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
patch_url = https://wrapdb.mesonbuild.com.invalid/v1/projects/zlib/1.2.11/4/get_zip
patch_filename = zlib-1.2.11-4-wrap.zip
patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4

@ -0,0 +1,4 @@
project('downloader')
# this test will timeout, showing that a subdomain isn't caught as masquarading url
subproject('zlib')

@ -0,0 +1,10 @@
[wrap-file]
directory = zlib-1.2.8
source_url = https://zlib.net/zlib-1.2.11.tar.gz
source_filename = zlib-1.2.11.tar.gz
source_hash = c3e5e9fdd5004dcb542feda5ee4f0ff0744628baf8ed2dd5d66f8ca1197cb1a1
patch_url = https://mirror1.wrapdb.mesonbuild.com/v1/projects/zlib/1.2.11/4/get_zip
patch_filename = zlib-1.2.11-4-wrap.zip
patch_hash = 886b67480dbe73b406ad83a1dd6d9596f93089d90c220ccfc91944c95f1c68c4

@ -1,6 +1,6 @@
#include"subproj.h"
int main(int argc, char **argv) {
int main(void) {
subproj_function();
return 0;
}

@ -3,7 +3,7 @@
#include<iostream>
#include<string>
int main(int argc, char *argv[]) {
int main(void) {
SDL_Surface *screenSurface;
SDL_Event e;
int keepGoing = 1;

@ -1,6 +1,6 @@
#include<lib.h>
#include<stdio.h>
int main(int argc, char **argv)
int main(void)
{
char *t = meson_print();
printf("%s", t);

@ -1,6 +1,6 @@
#include"subproj.h"
int main(int argc, char **argv) {
int main(void) {
subproj_function();
return 0;
}

@ -1,6 +1,6 @@
#include<unistd.h>
int main(int argc, char **argv) {
int main(void) {
sleep(1000);
return 0;
}

@ -151,7 +151,7 @@ class RPMModule(ExtensionModule):
def __get_required_compilers(self):
required_compilers = set()
for compiler in self.coredata.compilers.values():
for compiler in self.coredata.environment.coredata.compilers.host.values():
# Elbrus has one 'lcc' package for every compiler
if isinstance(compiler, compilers.GnuCCompiler):
required_compilers.add('gcc')

@ -45,37 +45,49 @@ except ImportError:
req_timeout = 600.0
ssl_warning_printed = False
whitelist_subdomain = 'wrapdb.mesonbuild.com'
def quiet_git(cmd: typing.List[str], workingdir: str) -> typing.Tuple[bool, str]:
try:
pc = subprocess.run(['git', '-C', workingdir] + cmd, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except FileNotFoundError as e:
return False, str(e)
git = shutil.which('git')
if not git:
return False, 'Git program not found.'
pc = subprocess.run([git, '-C', workingdir] + cmd, universal_newlines=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
if pc.returncode != 0:
return False, pc.stderr
return True, pc.stdout
def whitelist_wrapdb(urlstr: str) -> urllib.parse.ParseResult:
""" raises WrapException if not whitelisted subdomain """
url = urllib.parse.urlparse(urlstr)
if not url.hostname:
raise WrapException('{} is not a valid URL'.format(urlstr))
if not url.hostname.endswith(whitelist_subdomain):
raise WrapException('{} is not a whitelisted WrapDB URL'.format(urlstr))
if has_ssl and not url.scheme == 'https':
raise WrapException('WrapDB did not have expected SSL https url, instead got {}'.format(urlstr))
return url
def open_wrapdburl(urlstring: str) -> 'http.client.HTTPResponse':
global ssl_warning_printed
url = whitelist_wrapdb(urlstring)
if has_ssl:
try:
return urllib.request.urlopen(urlstring, timeout=req_timeout) # , context=ssl.create_default_context())
except urllib.error.URLError:
if not ssl_warning_printed:
print('SSL connection failed. Falling back to unencrypted connections.', file=sys.stderr)
ssl_warning_printed = True
return urllib.request.urlopen(urllib.parse.urlunparse(url), timeout=req_timeout)
except urllib.error.URLError as excp:
raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
# following code is only for those without Python SSL
nossl_url = url._replace(scheme='http')
if not ssl_warning_printed:
print('Warning: SSL not available, traffic not authenticated.', file=sys.stderr)
mlog.warning('SSL module not available in {}: WrapDB traffic not authenticated.'.format(sys.executable))
ssl_warning_printed = True
# Trying to open SSL connection to wrapdb fails because the
# certificate is not known.
if urlstring.startswith('https'):
urlstring = 'http' + urlstring[5:]
try:
return urllib.request.urlopen(urlstring, timeout=req_timeout)
except urllib.error.URLError:
raise WrapException('failed to get {} is the internet available?'.format(urlstring))
return urllib.request.urlopen(urllib.parse.urlunparse(nossl_url), timeout=req_timeout)
except urllib.error.URLError as excp:
raise WrapException('WrapDB connection failed to {} with error {}'.format(urlstring, excp))
class WrapException(MesonException):
@ -189,6 +201,9 @@ class Resolver:
raise WrapException(m)
def resolve_git_submodule(self) -> bool:
git = shutil.which('git')
if not git:
raise WrapException('Git program not found.')
# Are we in a git repository?
ret, out = quiet_git(['rev-parse'], self.subdir_root)
if not ret:
@ -205,12 +220,13 @@ class Resolver:
raise WrapException('git submodule has merge conflicts')
# Submodule exists, but is deinitialized or wasn't initialized
elif out.startswith('-'):
if subprocess.call(['git', '-C', self.subdir_root, 'submodule', 'update', '--init', self.dirname]) == 0:
if subprocess.run([git, '-C', self.subdir_root,
'submodule', 'update', '--init', self.dirname]).returncode == 0:
return True
raise WrapException('git submodule failed to init')
# Submodule looks fine, but maybe it wasn't populated properly. Do a checkout.
elif out.startswith(' '):
subprocess.call(['git', 'checkout', '.'], cwd=self.dirname)
subprocess.run([git, 'checkout', '.'], cwd=self.dirname)
# Even if checkout failed, try building it anyway and let the user
# handle any problems manually.
return True
@ -233,6 +249,9 @@ class Resolver:
self.apply_patch()
def get_git(self) -> None:
git = shutil.which('git')
if not git:
raise WrapException('Git program not found.')
revno = self.wrap.get('revision')
is_shallow = False
depth_option = [] # type: typing.List[str]
@ -243,42 +262,42 @@ class Resolver:
if is_shallow and self.is_git_full_commit_id(revno):
# git doesn't support directly cloning shallowly for commits,
# so we follow https://stackoverflow.com/a/43136160
subprocess.check_call(['git', 'init', self.directory], cwd=self.subdir_root)
subprocess.check_call(['git', 'remote', 'add', 'origin', self.wrap.get('url')],
subprocess.check_call([git, 'init', self.directory], cwd=self.subdir_root)
subprocess.check_call([git, 'remote', 'add', 'origin', self.wrap.get('url')],
cwd=self.dirname)
revno = self.wrap.get('revision')
subprocess.check_call(['git', 'fetch', *depth_option, 'origin', revno],
subprocess.check_call([git, 'fetch', *depth_option, 'origin', revno],
cwd=self.dirname)
subprocess.check_call(['git', 'checkout', revno], cwd=self.dirname)
subprocess.check_call([git, 'checkout', revno], cwd=self.dirname)
if self.wrap.values.get('clone-recursive', '').lower() == 'true':
subprocess.check_call(['git', 'submodule', 'update',
subprocess.check_call([git, 'submodule', 'update',
'--init', '--checkout', '--recursive', *depth_option],
cwd=self.dirname)
push_url = self.wrap.values.get('push-url')
if push_url:
subprocess.check_call(['git', 'remote', 'set-url',
subprocess.check_call([git, 'remote', 'set-url',
'--push', 'origin', push_url],
cwd=self.dirname)
else:
if not is_shallow:
subprocess.check_call(['git', 'clone', self.wrap.get('url'),
subprocess.check_call([git, 'clone', self.wrap.get('url'),
self.directory], cwd=self.subdir_root)
if revno.lower() != 'head':
if subprocess.call(['git', 'checkout', revno], cwd=self.dirname) != 0:
subprocess.check_call(['git', 'fetch', self.wrap.get('url'), revno], cwd=self.dirname)
subprocess.check_call(['git', 'checkout', revno], cwd=self.dirname)
if subprocess.run([git, 'checkout', revno], cwd=self.dirname).returncode != 0:
subprocess.check_call([git, 'fetch', self.wrap.get('url'), revno], cwd=self.dirname)
subprocess.check_call([git, 'checkout', revno], cwd=self.dirname)
else:
subprocess.check_call(['git', 'clone', *depth_option,
subprocess.check_call([git, 'clone', *depth_option,
'--branch', revno,
self.wrap.get('url'),
self.directory], cwd=self.subdir_root)
if self.wrap.values.get('clone-recursive', '').lower() == 'true':
subprocess.check_call(['git', 'submodule', 'update',
subprocess.check_call([git, 'submodule', 'update',
'--init', '--checkout', '--recursive', *depth_option],
cwd=self.dirname)
push_url = self.wrap.values.get('push-url')
if push_url:
subprocess.check_call(['git', 'remote', 'set-url',
subprocess.check_call([git, 'remote', 'set-url',
'--push', 'origin', push_url],
cwd=self.dirname)
@ -290,29 +309,39 @@ class Resolver:
def get_hg(self) -> None:
revno = self.wrap.get('revision')
subprocess.check_call(['hg', 'clone', self.wrap.get('url'),
hg = shutil.which('hg')
if not hg:
raise WrapException('Mercurial program not found.')
subprocess.check_call([hg, 'clone', self.wrap.get('url'),
self.directory], cwd=self.subdir_root)
if revno.lower() != 'tip':
subprocess.check_call(['hg', 'checkout', revno],
subprocess.check_call([hg, 'checkout', revno],
cwd=self.dirname)
def get_svn(self) -> None:
revno = self.wrap.get('revision')
subprocess.check_call(['svn', 'checkout', '-r', revno, self.wrap.get('url'),
svn = shutil.which('svn')
if not svn:
raise WrapException('SVN program not found.')
subprocess.check_call([svn, 'checkout', '-r', revno, self.wrap.get('url'),
self.directory], cwd=self.subdir_root)
def get_data(self, url: str) -> typing.Tuple[str, str]:
def get_data(self, urlstring: str) -> typing.Tuple[str, str]:
blocksize = 10 * 1024
h = hashlib.sha256()
tmpfile = tempfile.NamedTemporaryFile(mode='wb', dir=self.cachedir, delete=False)
hostname = urllib.parse.urlparse(url).hostname
if hostname == 'wrapdb.mesonbuild.com' or hostname.endswith('.wrapdb.mesonbuild.com'):
resp = open_wrapdburl(url)
url = urllib.parse.urlparse(urlstring)
if not url.hostname:
raise WrapException('{} is not a valid URL'.format(urlstring))
if url.hostname.endswith(whitelist_subdomain):
resp = open_wrapdburl(urlstring)
elif whitelist_subdomain in urlstring:
raise WrapException('{} may be a WrapDB-impersonating URL'.format(urlstring))
else:
try:
resp = urllib.request.urlopen(url, timeout=req_timeout)
resp = urllib.request.urlopen(urlstring, timeout=req_timeout)
except urllib.error.URLError:
raise WrapException('could not get {} is the internet available?'.format(url))
raise WrapException('could not get {} is the internet available?'.format(urlstring))
with contextlib.closing(resp) as resp:
try:
dlsize = int(resp.info()['Content-Length'])

Loading…
Cancel
Save