From cf1a99b4e21e735bdc9357e3333e270720595be1 Mon Sep 17 00:00:00 2001
From: Koichiro IWAO
Date: Thu, 20 Oct 2016 18:49:11 +0900
Subject: [PATCH 1/2] buildx.sh: simplify counting lines
---
xorg/X11R7.6/buildx.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/xorg/X11R7.6/buildx.sh b/xorg/X11R7.6/buildx.sh
index ca0b29a9..c51c2b04 100755
--- a/xorg/X11R7.6/buildx.sh
+++ b/xorg/X11R7.6/buildx.sh
@@ -180,7 +180,7 @@ data_file=x11_file_list.txt
# was www.x.org/releases/X11R7.6/src/everything
download_url=http://server1.xrdp.org/xrdp/X11R7.6
-num_modules=`cat $data_file | wc -l`
+num_modules=`wc -l < $data_file`
count=0
##########################
From 6b589569ee2aeb20f157ef73b2ca240166fef6a4 Mon Sep 17 00:00:00 2001
From: Koichiro IWAO
Date: Thu, 20 Oct 2016 18:33:57 +0900
Subject: [PATCH 2/2] buildx.sh: download tarballs using keepalive
Executing wget like
wget http://example.com/file1 http://example.com/file2 ...
can do HTTP keepalive.
Quit calling wget with single file. Downloading multiple files in one
http connection is a little bit faster than executing wget more than
100 times. And parallelly execute two instances of wget to increase
download speed.
---
xorg/X11R7.6/buildx.sh | 41 ++++++++++++++++++-----------------------
1 file changed, 18 insertions(+), 23 deletions(-)
diff --git a/xorg/X11R7.6/buildx.sh b/xorg/X11R7.6/buildx.sh
index c51c2b04..4245c955 100755
--- a/xorg/X11R7.6/buildx.sh
+++ b/xorg/X11R7.6/buildx.sh
@@ -23,23 +23,19 @@
# debian packages needed
# flex bison libxml2-dev intltool xsltproc xutils-dev python-libxml2 g++ xutils
-download_file()
+download_all_files()
{
- local file url status
- file=$1
+ # download files parallelly using keepalive
+ # a little bit faster than calling wget with single file more than 100 times
+ < x11_file_list.txt cut -f1 -d: | sed -e "s|^|${download_url}/|" | \
+ xargs -P2 -n $(expr $num_modules / 2 + 1) \
+ wget \
+ --directory-prefix=downloads \
+ --no-verbose \
+ --timestamping \
+ --continue
- # if we already have the file, don't download it
- if [ -r downloads/$file ]; then
- return 0
- fi
-
- echo "downloading file $download_url/$file"
-
- cd downloads
-
- wget -cq $download_url/$file
status=$?
- cd ..
return $status
}
@@ -74,15 +70,6 @@ extract_it()
return 0
fi
- # download file
- if ! download_file $mod_file
- then
- echo ""
- echo "failed to download $mod_file - aborting build"
- echo ""
- exit 1
- fi
-
cd build_dir
# if pkg has not yet been extracted, do so now
@@ -263,6 +250,14 @@ if ! NPROC=`nproc`; then
NPROC=1
fi
+if ! download_all_files; then
+ echo ""
+ echo "download failed - aborting build"
+ echo "rerun this script to resume download/build"
+ echo ""
+ exit 1
+fi
+
while IFS=: read mod_file mod_dir mod_args
do
mod_args=`eval echo $mod_args`