Commit c88f1ec8 authored by Joshua Litt's avatar Joshua Litt Committed by Gerrit Code Review
Browse files

Android.mk file for vpx unittests

These changes are to support automated regressions of vpx on android
	new file:   test/android/Android.mk
	new file:   test/android/README
	new file:   test/android/get_files.py

Change-Id: I52c8e9daf3676a3561badbe710ec3a16fed72abd
parent a33a84b1
# Copyright (c) 2013 The WebM project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
#
# This make file builds vpx_test app for android.
# The test app itself runs on the command line through adb shell
# The paths are really messed up as the libvpx make file
# expects to be made from a parent directory.
# TODO(joshualitt)
# Fix android make files so they can be built from anywhere, will require
# changing the libvpx make file and this one.
CUR_WD := $(call my-dir)
BINDINGS_DIR := $(CUR_WD)/../../..
LOCAL_PATH := $(CUR_WD)/../../..
#libvpx
include $(CLEAR_VARS)
include $(BINDINGS_DIR)/libvpx/build/make/Android.mk
# Restore path
# TODO joshualitt Fix makefiles so this is no longer needed
LOCAL_PATH := $(CUR_WD)/../..
#libgtest
include $(CLEAR_VARS)
LOCAL_CPP_EXTENSION := .cc
LOCAL_MODULE := gtest
LOCAL_C_INCLUDES := $(LOCAL_PATH)/third_party/googletest/src/
LOCAL_C_INCLUDES += $(LOCAL_PATH)/third_party/googletest/src/include/
LOCAL_SRC_FILES := ./third_party/googletest/src/src/gtest-all.cc
include $(BUILD_STATIC_LIBRARY)
#libnestegg
include $(CLEAR_VARS)
LOCAL_CPP_EXTENSION := .cc
LOCAL_MODULE := nestegg
NESTEGG_PATH := $(LOCAL_PATH)/nestegg
LOCAL_C_INCLUDES := $(NESTEGG_PATH)/include
LOCAL_C_INCLUDES += $(LOCAL_PATH)/
LOCAL_C_INCLUDES += $(NESTEGG_PATH)/halloc/
LOCAL_SRC_FILES := ./nestegg/halloc/src/halloc.c
LOCAL_SRC_FILES += ./nestegg/src/nestegg.c
include $(BUILD_STATIC_LIBRARY)
#libvpx_test
include $(CLEAR_VARS)
LOCAL_MODULE := libvpx_test
LOCAL_STATIC_LIBRARIES := gtest
LOCAL_STATIC_LIBRARIES += nestegg
LOCAL_STATIC_LIBRARIES += cpufeatures
LOCAL_SHARED_LIBRARIES := vpx
LOCAL_C_INCLUDES := $(LOCAL_PATH)/
LOCAL_C_INCLUDES += $(BINDINGS_DIR)/
LOCAL_C_INCLUDES += $(LOCAL_PATH)/third_party/googletest/src/include
LOCAL_SRC_FILES := ./args.c
LOCAL_SRC_FILES += ./md5_utils.c
LOCAL_SRC_FILES += ./test/decode_test_driver.cc
LOCAL_SRC_FILES += ./test/test_libvpx.cc
LOCAL_SRC_FILES += ./test/test_vector_test.cc
include $(BUILD_EXECUTABLE)
Android.mk will build vpx unittests on android.
1) configure libvpx from the parent directory:
./libvpx/configure --target=armv7-android-gcc --enable-external-build --enable-postproc --disable-install-srcs --enable-multi-res-encoding --enable-temporal-denoising --disable-unit-tests --disable-install-docs --disable-examples --disable-runtime-cpu-detect --sdk=$NDK
2) from the parent directory, invoke ndk-build:
NDK_PROJECT_PATH=. ndk-build APP_BUILD_SCRIPT=./libvpx/test/android/Android.mk APP_ABI=armeabi-v7a APP_PLATFORM=android-18 APP_OPTIM=release APP_STL=gnustl_static APP_CPPFLAGS=-frtti
3) Run get_files.py to download the test files:
python get_files.py -i /path/to/test-data.sha1 -o /path/to/put/files -u http://libvpx-test-file-url
NOTE: currently the url of the test files is http://downloads.webmproject.org/test_data/libvpx
4) transfer files to device using adb. Currently, I put these files in /data/local/tmp
adb push /path/to/test_files /data/local/tmp
adb push /path/to/built_libs /data/local/tmp
NOTE: Built_libs defaults to parent_dir/libs/armeabi-v7a
5) run tests:
adb shell
(on device)
cd /data/local/tmp
LD_LIBRARY_PATH=. ./vpx_test
# Copyright (c) 2013 The WebM project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
#
# This simple script pulls test files from the webm homepage
# It is intelligent enough to only pull files if
# 1) File / test_data folder does not exist
# 2) SHA mismatch
import pycurl
import csv
import hashlib
import re
import os.path
import time
import itertools
import sys
import getopt
#globals
url = ''
file_list_path = ''
local_resource_path = ''
# Helper functions:
# A simple function which returns the sha hash of a file in hex
def get_file_sha(filename):
try:
sha_hash = hashlib.sha1()
with open(filename, 'rb') as file:
buf = file.read(HASH_CHUNK)
while len(buf) > 0:
sha_hash.update(buf)
buf = file.read(HASH_CHUNK)
return sha_hash.hexdigest()
except IOError:
print "Error reading " + filename
# Downloads a file from a url, and then checks the sha against the passed
# in sha
def download_and_check_sha(url, filename, sha):
path = os.path.join(local_resource_path, filename)
fp = open(path, "wb")
curl = pycurl.Curl()
curl.setopt(pycurl.URL, url + "/" + filename)
curl.setopt(pycurl.WRITEDATA, fp)
curl.perform()
curl.close()
fp.close()
return get_file_sha(path) == sha
#constants
ftp_retries = 3
SHA_COL = 0
NAME_COL = 1
EXPECTED_COL = 2
HASH_CHUNK = 65536
# Main script
try:
opts, args = \
getopt.getopt(sys.argv[1:], \
"u:i:o:", ["url=", "input_csv=", "output_dir="])
except:
print 'get_files.py -u <url> -i <input_csv> -o <output_dir>'
sys.exit(2)
for opt, arg in opts:
if opt == '-u':
url = arg
elif opt in ("-i", "--input_csv"):
file_list_path = os.path.join(arg)
elif opt in ("-o", "--output_dir"):
local_resource_path = os.path.join(arg)
if len(sys.argv) != 7:
print "Expects two paths and a url!"
exit(1)
if not os.path.isdir(local_resource_path):
os.makedirs(local_resource_path)
file_list_csv = open(file_list_path, "rb")
# Our 'csv' file uses multiple spaces as a delimiter, python's
# csv class only uses single character delimiters, so we convert them below
file_list_reader = csv.reader((re.sub(' +', ' ', line) \
for line in file_list_csv), delimiter = ' ')
file_shas = []
file_names = []
for row in file_list_reader:
if len(row) != EXPECTED_COL:
continue
file_shas.append(row[SHA_COL])
file_names.append(row[NAME_COL])
file_list_csv.close()
# Download files, only if they don't already exist and have correct shas
for filename, sha in itertools.izip(file_names, file_shas):
path = os.path.join(local_resource_path, filename)
if os.path.isfile(path) \
and get_file_sha(path) == sha:
print path + ' exists, skipping'
continue
for retry in range(0, ftp_retries):
print "Downloading " + path
if not download_and_check_sha(url, filename, sha):
print "Sha does not match, retrying..."
else:
break
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment