summaryrefslogtreecommitdiffstats
path: root/mobile/android/fenix/automation/taskcluster
diff options
context:
space:
mode:
authorDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-15 03:35:49 +0000
committerDaniel Baumann <daniel.baumann@progress-linux.org>2024-05-15 03:35:49 +0000
commitd8bbc7858622b6d9c278469aab701ca0b609cddf (patch)
treeeff41dc61d9f714852212739e6b3738b82a2af87 /mobile/android/fenix/automation/taskcluster
parentReleasing progress-linux version 125.0.3-1~progress7.99u1. (diff)
downloadfirefox-d8bbc7858622b6d9c278469aab701ca0b609cddf.tar.xz
firefox-d8bbc7858622b6d9c278469aab701ca0b609cddf.zip
Merging upstream version 126.0.
Signed-off-by: Daniel Baumann <daniel.baumann@progress-linux.org>
Diffstat (limited to 'mobile/android/fenix/automation/taskcluster')
-rw-r--r--mobile/android/fenix/automation/taskcluster/.codecov1704
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/copy-robo-crash-artifacts.py273
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-beta.yml39
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-legacy-api-tests.yml55
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-robo-test.yml39
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-screenshots-tests.yml35
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test-robo.yml27
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test.yml39
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-arm64-v8a.yml36
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/flank-x86.yml37
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test-fromfile.py97
-rw-r--r--mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test.py89
-rwxr-xr-xmobile/android/fenix/automation/taskcluster/androidTest/robo-test.sh101
-rwxr-xr-xmobile/android/fenix/automation/taskcluster/androidTest/ui-test.sh150
14 files changed, 2721 insertions, 0 deletions
diff --git a/mobile/android/fenix/automation/taskcluster/.codecov b/mobile/android/fenix/automation/taskcluster/.codecov
new file mode 100644
index 0000000000..e366372544
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/.codecov
@@ -0,0 +1,1704 @@
+#!/usr/bin/env bash
+
+# Apache License Version 2.0, January 2004
+# https://github.com/codecov/codecov-bash/blob/master/LICENSE
+
+
+set -e +o pipefail
+
+VERSION="20200602-f809a24"
+
+url="https://codecov.io"
+env="$CODECOV_ENV"
+service=""
+token=""
+search_in=""
+flags=""
+exit_with=0
+curlargs=""
+curlawsargs=""
+dump="0"
+clean="0"
+curl_s="-s"
+name="$CODECOV_NAME"
+include_cov=""
+exclude_cov=""
+ddp="$(echo ~)/Library/Developer/Xcode/DerivedData"
+xp=""
+files=""
+cacert="$CODECOV_CA_BUNDLE"
+gcov_ignore="-not -path './bower_components/**' -not -path './node_modules/**' -not -path './vendor/**'"
+gcov_include=""
+
+ft_gcov="1"
+ft_coveragepy="1"
+ft_fix="1"
+ft_search="1"
+ft_s3="1"
+ft_network="1"
+ft_xcodellvm="1"
+ft_xcodeplist="0"
+ft_gcovout="1"
+ft_html="0"
+
+_git_root=$(git rev-parse --show-toplevel 2>/dev/null || hg root 2>/dev/null || echo $PWD)
+git_root="$_git_root"
+remote_addr=""
+if [ "$git_root" = "$PWD" ];
+then
+ git_root="."
+fi
+
+url_o=""
+pr_o=""
+build_o=""
+commit_o=""
+search_in_o=""
+tag_o=""
+branch_o=""
+slug_o=""
+prefix_o=""
+
+commit="$VCS_COMMIT_ID"
+branch="$VCS_BRANCH_NAME"
+pr="$VCS_PULL_REQUEST"
+slug="$VCS_SLUG"
+tag="$VCS_TAG"
+build_url="$CI_BUILD_URL"
+build="$CI_BUILD_ID"
+job="$CI_JOB_ID"
+
+beta_xcode_partials=""
+
+proj_root="$git_root"
+gcov_exe="gcov"
+gcov_arg=""
+
+b="\033[0;36m"
+g="\033[0;32m"
+r="\033[0;31m"
+e="\033[0;90m"
+x="\033[0m"
+
+show_help() {
+cat << EOF
+
+ Codecov Bash $VERSION
+
+ Global report uploading tool for Codecov
+ Documentation at https://docs.codecov.io/docs
+ Contribute at https://github.com/codecov/codecov-bash
+
+
+ -h Display this help and exit
+ -f FILE Target file(s) to upload
+
+ -f "path/to/file" only upload this file
+ skips searching unless provided patterns below
+
+ -f '!*.bar' ignore all files at pattern *.bar
+ -f '*.foo' include all files at pattern *.foo
+ Must use single quotes.
+ This is non-exclusive, use -s "*.foo" to match specific paths.
+
+ -s DIR Directory to search for coverage reports.
+ Already searches project root and artifact folders.
+ -t TOKEN Set the private repository token
+ (option) set environment variable CODECOV_TOKEN=:uuid
+
+ -t @/path/to/token_file
+ -t uuid
+
+ -n NAME Custom defined name of the upload. Visible in Codecov UI
+
+ -e ENV Specify environment variables to be included with this build
+ Also accepting environment variables: CODECOV_ENV=VAR,VAR2
+
+ -e VAR,VAR2
+
+ -X feature Toggle functionalities
+
+ -X gcov Disable gcov
+ -X coveragepy Disable python coverage
+ -X fix Disable report fixing
+ -X search Disable searching for reports
+ -X xcode Disable xcode processing
+ -X network Disable uploading the file network
+ -X gcovout Disable gcov output
+ -X html Enable coverage for HTML files
+
+ -N The commit SHA of the parent for which you are uploading coverage. If not present,
+ the parent will be determined using the API of your repository provider.
+ When using the repository provider's API, the parent is determined via finding
+ the closest ancestor to the commit.
+
+ -R root dir Used when not in git/hg project to identify project root directory
+ -F flag Flag the upload to group coverage metrics
+
+ -F unittests This upload is only unittests
+ -F integration This upload is only integration tests
+ -F ui,chrome This upload is Chrome - UI tests
+
+ -c Move discovered coverage reports to the trash
+ -Z Exit with 1 if not successful. Default will Exit with 0
+
+ -- xcode --
+ -D Custom Derived Data Path for Coverage.profdata and gcov processing
+ Default '~/Library/Developer/Xcode/DerivedData'
+ -J Specify packages to build coverage. Uploader will only build these packages.
+ This can significantly reduces time to build coverage reports.
+
+ -J 'MyAppName' Will match "MyAppName" and "MyAppNameTests"
+ -J '^ExampleApp$' Will match only "ExampleApp" not "ExampleAppTests"
+
+ -- gcov --
+ -g GLOB Paths to ignore during gcov gathering
+ -G GLOB Paths to include during gcov gathering
+ -p dir Project root directory
+ Also used when preparing gcov
+ -k prefix Prefix filepaths to help resolve path fixing: https://github.com/codecov/support/issues/472
+ -x gcovexe gcov executable to run. Defaults to 'gcov'
+ -a gcovargs extra arguments to pass to gcov
+
+ -- Override CI Environment Variables --
+ These variables are automatically detected by popular CI providers
+
+ -B branch Specify the branch name
+ -C sha Specify the commit sha
+ -P pr Specify the pull request number
+ -b build Specify the build number
+ -T tag Specify the git tag
+
+ -- Enterprise --
+ -u URL Set the target url for Enterprise customers
+ Not required when retrieving the bash uploader from your CCE
+ (option) Set environment variable CODECOV_URL=https://my-hosted-codecov.com
+ -r SLUG owner/repo slug used instead of the private repo token in Enterprise
+ (option) set environment variable CODECOV_SLUG=:owner/:repo
+ (option) set in your codecov.yml "codecov.slug"
+ -S PATH File path to your cacert.pem file used to verify ssl with Codecov Enterprise (optional)
+ (option) Set environment variable: CODECOV_CA_BUNDLE="/path/to/ca.pem"
+ -U curlargs Extra curl arguments to communicate with Codecov. e.g., -U "--proxy http://http-proxy"
+ -A curlargs Extra curl arguments to communicate with AWS.
+
+ -- Debugging --
+ -d Don't upload, but dump upload file to stdout
+ -K Remove color from the output
+ -v Verbose mode
+
+EOF
+}
+
+
+say() {
+ echo -e "$1"
+}
+
+
+urlencode() {
+ echo "$1" | curl -Gso /dev/null -w %{url_effective} --data-urlencode @- "" | cut -c 3- | sed -e 's/%0A//'
+}
+
+
+swiftcov() {
+ _dir=$(dirname "$1" | sed 's/\(Build\).*/\1/g')
+ for _type in app framework xctest
+ do
+ find "$_dir" -name "*.$_type" | while read f
+ do
+ _proj=${f##*/}
+ _proj=${_proj%."$_type"}
+ if [ "$2" = "" ] || [ "$(echo "$_proj" | grep -i "$2")" != "" ];
+ then
+ say " $g+$x Building reports for $_proj $_type"
+ dest=$([ -f "$f/$_proj" ] && echo "$f/$_proj" || echo "$f/Contents/MacOS/$_proj")
+ _proj_name=$(echo "$_proj" | sed -e 's/[[:space:]]//g')
+ xcrun llvm-cov show $beta_xcode_partials -instr-profile "$1" "$dest" > "$_proj_name.$_type.coverage.txt" \
+ || say " ${r}x>${x} llvm-cov failed to produce results for $dest"
+ fi
+ done
+ done
+}
+
+
+# Credits to: https://gist.github.com/pkuczynski/8665367
+parse_yaml() {
+ local prefix=$2
+ local s='[[:space:]]*' w='[a-zA-Z0-9_]*' fs=$(echo @|tr @ '\034')
+ sed -ne "s|^\($s\)\($w\)$s:$s\"\(.*\)\"$s\$|\1$fs\2$fs\3|p" \
+ -e "s|^\($s\)\($w\)$s:$s\(.*\)$s\$|\1$fs\2$fs\3|p" $1 |
+ awk -F$fs '{
+ indent = length($1)/2;
+ vname[indent] = $2;
+ for (i in vname) {if (i > indent) {delete vname[i]}}
+ if (length($3) > 0) {
+ vn=""; if (indent > 0) {vn=(vn)(vname[0])("_")}
+ printf("%s%s%s=\"%s\"\n", "'$prefix'",vn, $2, $3);
+ }
+ }'
+}
+
+
+if [ $# != 0 ];
+then
+ while getopts "a:A:b:B:cC:dD:e:f:F:g:G:hJ:k:Kn:p:P:r:R:y:s:S:t:T:u:U:vx:X:ZN:" o
+ do
+ case "$o" in
+ "N")
+ parent=$OPTARG
+ ;;
+ "a")
+ gcov_arg=$OPTARG
+ ;;
+ "A")
+ curlawsargs="$OPTARG"
+ ;;
+ "b")
+ build_o="$OPTARG"
+ ;;
+ "B")
+ branch_o="$OPTARG"
+ ;;
+ "c")
+ clean="1"
+ ;;
+ "C")
+ commit_o="$OPTARG"
+ ;;
+ "d")
+ dump="1"
+ ;;
+ "D")
+ ddp="$OPTARG"
+ ;;
+ "e")
+ env="$env,$OPTARG"
+ ;;
+ "f")
+ if [ "${OPTARG::1}" = "!" ];
+ then
+ exclude_cov="$exclude_cov -not -path '${OPTARG:1}'"
+
+ elif [[ "$OPTARG" = *"*"* ]];
+ then
+ include_cov="$include_cov -or -name '$OPTARG'"
+
+ else
+ ft_search=0
+ if [ "$files" = "" ];
+ then
+ files="$OPTARG"
+ else
+ files="$files
+$OPTARG"
+ fi
+ fi
+ ;;
+ "F")
+ if [ "$flags" = "" ];
+ then
+ flags="$OPTARG"
+ else
+ flags="$flags,$OPTARG"
+ fi
+ ;;
+ "g")
+ gcov_ignore="$gcov_ignore -not -path '$OPTARG'"
+ ;;
+ "G")
+ gcov_include="$gcov_include -path '$OPTARG'"
+ ;;
+ "h")
+ show_help
+ exit 0;
+ ;;
+ "J")
+ ft_xcodellvm="1"
+ ft_xcodeplist="0"
+ if [ "$xp" = "" ];
+ then
+ xp="$OPTARG"
+ else
+ xp="$xp\|$OPTARG"
+ fi
+ ;;
+ "k")
+ prefix_o=$(echo "$OPTARG" | sed -e 's:^/*::' -e 's:/*$::')
+ ;;
+ "K")
+ b=""
+ g=""
+ r=""
+ e=""
+ x=""
+ ;;
+ "n")
+ name="$OPTARG"
+ ;;
+ "p")
+ proj_root="$OPTARG"
+ ;;
+ "P")
+ pr_o="$OPTARG"
+ ;;
+ "r")
+ slug_o="$OPTARG"
+ ;;
+ "R")
+ git_root="$OPTARG"
+ ;;
+ "s")
+ if [ "$search_in_o" = "" ];
+ then
+ search_in_o="$OPTARG"
+ else
+ search_in_o="$search_in_o $OPTARG"
+ fi
+ ;;
+ "S")
+ cacert="--cacert \"$OPTARG\""
+ ;;
+ "t")
+ if [ "${OPTARG::1}" = "@" ];
+ then
+ token=$(cat "${OPTARG:1}" | tr -d ' \n')
+ else
+ token="$OPTARG"
+ fi
+ ;;
+ "T")
+ tag_o="$OPTARG"
+ ;;
+ "u")
+ url_o=$(echo "$OPTARG" | sed -e 's/\/$//')
+ ;;
+ "U")
+ curlargs="$OPTARG"
+ ;;
+ "v")
+ set -x
+ curl_s=""
+ ;;
+ "x")
+ gcov_exe=$OPTARG
+ ;;
+ "X")
+ if [ "$OPTARG" = "gcov" ];
+ then
+ ft_gcov="0"
+ elif [ "$OPTARG" = "coveragepy" ] || [ "$OPTARG" = "py" ];
+ then
+ ft_coveragepy="0"
+ elif [ "$OPTARG" = "gcovout" ];
+ then
+ ft_gcovout="0"
+ elif [ "$OPTARG" = "xcodellvm" ];
+ then
+ ft_xcodellvm="1"
+ ft_xcodeplist="0"
+ elif [ "$OPTARG" = "fix" ] || [ "$OPTARG" = "fixes" ];
+ then
+ ft_fix="0"
+ elif [ "$OPTARG" = "xcode" ];
+ then
+ ft_xcodellvm="0"
+ ft_xcodeplist="0"
+ elif [ "$OPTARG" = "search" ];
+ then
+ ft_search="0"
+ elif [ "$OPTARG" = "xcodepartials" ];
+ then
+ beta_xcode_partials="-use-color"
+ elif [ "$OPTARG" = "network" ];
+ then
+ ft_network="0"
+ elif [ "$OPTARG" = "s3" ];
+ then
+ ft_s3="0"
+ elif [ "$OPTARG" = "html" ];
+ then
+ ft_html="1"
+ fi
+ ;;
+ "y")
+ echo -e "${r}DeprecationWarning${x}: The -y flag is no longer supported by Codecov."`
+ `"\n codecov.yml must be located underneath the root, dev/, or .github/ directories"
+ ;;
+ "Z")
+ exit_with=1
+ ;;
+ esac
+ done
+fi
+
+say "
+ _____ _
+ / ____| | |
+| | ___ __| | ___ ___ _____ __
+| | / _ \\ / _\` |/ _ \\/ __/ _ \\ \\ / /
+| |___| (_) | (_| | __/ (_| (_) \\ V /
+ \\_____\\___/ \\__,_|\\___|\\___\\___/ \\_/
+ Bash-$VERSION
+
+"
+
+search_in="$proj_root"
+
+if [ "$JENKINS_URL" != "" ];
+then
+ say "$e==>$x Jenkins CI detected."
+ # https://wiki.jenkins-ci.org/display/JENKINS/Building+a+software+project
+ # https://wiki.jenkins-ci.org/display/JENKINS/GitHub+pull+request+builder+plugin#GitHubpullrequestbuilderplugin-EnvironmentVariables
+ service="jenkins"
+
+ if [ "$ghprbSourceBranch" != "" ];
+ then
+ branch="$ghprbSourceBranch"
+ elif [ "$GIT_BRANCH" != "" ];
+ then
+ branch="$GIT_BRANCH"
+ elif [ "$BRANCH_NAME" != "" ];
+ then
+ branch="$BRANCH_NAME"
+ fi
+
+ if [ "$ghprbActualCommit" != "" ];
+ then
+ commit="$ghprbActualCommit"
+ elif [ "$GIT_COMMIT" != "" ];
+ then
+ commit="$GIT_COMMIT"
+ fi
+
+ if [ "$ghprbPullId" != "" ];
+ then
+ pr="$ghprbPullId"
+ elif [ "$CHANGE_ID" != "" ];
+ then
+ pr="$CHANGE_ID"
+ fi
+
+ build="$BUILD_NUMBER"
+ build_url=$(urlencode "$BUILD_URL")
+
+elif [ "$CI" = "true" ] && [ "$TRAVIS" = "true" ] && [ "$SHIPPABLE" != "true" ];
+then
+ say "$e==>$x Travis CI detected."
+ # https://docs.travis-ci.com/user/environment-variables/
+ service="travis"
+ commit="${TRAVIS_PULL_REQUEST_SHA:-$TRAVIS_COMMIT}"
+ build="$TRAVIS_JOB_NUMBER"
+ pr="$TRAVIS_PULL_REQUEST"
+ job="$TRAVIS_JOB_ID"
+ slug="$TRAVIS_REPO_SLUG"
+ env="$env,TRAVIS_OS_NAME"
+ tag="$TRAVIS_TAG"
+ if [ "$TRAVIS_BRANCH" != "$TRAVIS_TAG" ];
+ then
+ branch="$TRAVIS_BRANCH"
+ fi
+
+ language=$(compgen -A variable | grep "^TRAVIS_.*_VERSION$" | head -1)
+ if [ "$language" != "" ];
+ then
+ env="$env,${!language}"
+ fi
+
+elif [ "$CODEBUILD_CI" = "true" ];
+then
+ say "$e==>$x AWS Codebuild detected."
+ # https://docs.aws.amazon.com/codebuild/latest/userguide/build-env-ref-env-vars.html
+ service="codebuild"
+ commit="$CODEBUILD_RESOLVED_SOURCE_VERSION"
+ build="$CODEBUILD_BUILD_ID"
+ branch="$(echo $CODEBUILD_WEBHOOK_HEAD_REF | sed 's/^refs\/heads\///')"
+ if [ "${CODEBUILD_SOURCE_VERSION/pr}" = "$CODEBUILD_SOURCE_VERSION" ] ; then
+ pr="false"
+ else
+ pr="$(echo $CODEBUILD_SOURCE_VERSION | sed 's/^pr\///')"
+ fi
+ job="$CODEBUILD_BUILD_ID"
+ slug="$(echo $CODEBUILD_SOURCE_REPO_URL | sed 's/^.*:\/\/[^\/]*\///' | sed 's/\.git$//')"
+
+elif [ "$DOCKER_REPO" != "" ];
+then
+ say "$e==>$x Docker detected."
+ # https://docs.docker.com/docker-cloud/builds/advanced/
+ service="docker"
+ branch="$SOURCE_BRANCH"
+ commit="$SOURCE_COMMIT"
+ slug="$DOCKER_REPO"
+ tag="$CACHE_TAG"
+ env="$env,IMAGE_NAME"
+
+elif [ "$CI" = "true" ] && [ "$CI_NAME" = "codeship" ];
+then
+ say "$e==>$x Codeship CI detected."
+ # https://www.codeship.io/documentation/continuous-integration/set-environment-variables/
+ service="codeship"
+ branch="$CI_BRANCH"
+ build="$CI_BUILD_NUMBER"
+ build_url=$(urlencode "$CI_BUILD_URL")
+ commit="$CI_COMMIT_ID"
+
+elif [ ! -z "$CF_BUILD_URL" ] && [ ! -z "$CF_BUILD_ID" ];
+then
+ say "$e==>$x Codefresh CI detected."
+ # https://docs.codefresh.io/v1.0/docs/variables
+ service="codefresh"
+ branch="$CF_BRANCH"
+ build="$CF_BUILD_ID"
+ build_url=$(urlencode "$CF_BUILD_URL")
+ commit="$CF_REVISION"
+
+elif [ "$TEAMCITY_VERSION" != "" ];
+then
+ say "$e==>$x TeamCity CI detected."
+ # https://confluence.jetbrains.com/display/TCD8/Predefined+Build+Parameters
+ # https://confluence.jetbrains.com/plugins/servlet/mobile#content/view/74847298
+ if [ "$TEAMCITY_BUILD_BRANCH" = '' ];
+ then
+ echo " Teamcity does not automatically make build parameters available as environment variables."
+ echo " Add the following environment parameters to the build configuration"
+ echo " env.TEAMCITY_BUILD_BRANCH = %teamcity.build.branch%"
+ echo " env.TEAMCITY_BUILD_ID = %teamcity.build.id%"
+ echo " env.TEAMCITY_BUILD_URL = %teamcity.serverUrl%/viewLog.html?buildId=%teamcity.build.id%"
+ echo " env.TEAMCITY_BUILD_COMMIT = %system.build.vcs.number%"
+ echo " env.TEAMCITY_BUILD_REPOSITORY = %vcsroot.<YOUR TEAMCITY VCS NAME>.url%"
+ fi
+ service="teamcity"
+ branch="$TEAMCITY_BUILD_BRANCH"
+ build="$TEAMCITY_BUILD_ID"
+ build_url=$(urlencode "$TEAMCITY_BUILD_URL")
+ if [ "$TEAMCITY_BUILD_COMMIT" != "" ];
+ then
+ commit="$TEAMCITY_BUILD_COMMIT"
+ else
+ commit="$BUILD_VCS_NUMBER"
+ fi
+ remote_addr="$TEAMCITY_BUILD_REPOSITORY"
+
+elif [ "$CI" = "true" ] && [ "$CIRCLECI" = "true" ];
+then
+ say "$e==>$x Circle CI detected."
+ # https://circleci.com/docs/environment-variables
+ service="circleci"
+ branch="$CIRCLE_BRANCH"
+ build="$CIRCLE_BUILD_NUM"
+ job="$CIRCLE_NODE_INDEX"
+ if [ "$CIRCLE_PROJECT_REPONAME" != "" ];
+ then
+ slug="$CIRCLE_PROJECT_USERNAME/$CIRCLE_PROJECT_REPONAME"
+ else
+ # git@github.com:owner/repo.git
+ slug="${CIRCLE_REPOSITORY_URL##*:}"
+ # owner/repo.git
+ slug="${slug%%.git}"
+ fi
+ pr="$CIRCLE_PR_NUMBER"
+ commit="$CIRCLE_SHA1"
+ search_in="$search_in $CIRCLE_ARTIFACTS $CIRCLE_TEST_REPORTS"
+
+elif [ "$BUDDYBUILD_BRANCH" != "" ];
+then
+ say "$e==>$x buddybuild detected"
+ # http://docs.buddybuild.com/v6/docs/custom-prebuild-and-postbuild-steps
+ service="buddybuild"
+ branch="$BUDDYBUILD_BRANCH"
+ build="$BUDDYBUILD_BUILD_NUMBER"
+ build_url="https://dashboard.buddybuild.com/public/apps/$BUDDYBUILD_APP_ID/build/$BUDDYBUILD_BUILD_ID"
+ # BUDDYBUILD_TRIGGERED_BY
+ if [ "$ddp" = "$(echo ~)/Library/Developer/Xcode/DerivedData" ];
+ then
+ ddp="/private/tmp/sandbox/${BUDDYBUILD_APP_ID}/bbtest"
+ fi
+
+elif [ "${bamboo_planRepository_revision}" != "" ];
+then
+ say "$e==>$x Bamboo detected"
+ # https://confluence.atlassian.com/bamboo/bamboo-variables-289277087.html#Bamboovariables-Build-specificvariables
+ service="bamboo"
+ commit="${bamboo_planRepository_revision}"
+ branch="${bamboo_planRepository_branch}"
+ build="${bamboo_buildNumber}"
+ build_url="${bamboo_buildResultsUrl}"
+ remote_addr="${bamboo_planRepository_repositoryUrl}"
+
+elif [ "$CI" = "true" ] && [ "$BITRISE_IO" = "true" ];
+then
+ # http://devcenter.bitrise.io/faq/available-environment-variables/
+ say "$e==>$x Bitrise CI detected."
+ service="bitrise"
+ branch="$BITRISE_GIT_BRANCH"
+ build="$BITRISE_BUILD_NUMBER"
+ build_url=$(urlencode "$BITRISE_BUILD_URL")
+ pr="$BITRISE_PULL_REQUEST"
+ if [ "$GIT_CLONE_COMMIT_HASH" != "" ];
+ then
+ commit="$GIT_CLONE_COMMIT_HASH"
+ fi
+
+elif [ "$CI" = "true" ] && [ "$SEMAPHORE" = "true" ];
+then
+ say "$e==>$x Semaphore CI detected."
+ # https://semaphoreapp.com/docs/available-environment-variables.html
+ service="semaphore"
+ branch="$BRANCH_NAME"
+ build="$SEMAPHORE_BUILD_NUMBER"
+ job="$SEMAPHORE_CURRENT_THREAD"
+ pr="$PULL_REQUEST_NUMBER"
+ slug="$SEMAPHORE_REPO_SLUG"
+ commit="$REVISION"
+ env="$env,SEMAPHORE_TRIGGER_SOURCE"
+
+elif [ "$CI" = "true" ] && [ "$BUILDKITE" = "true" ];
+then
+ say "$e==>$x Buildkite CI detected."
+ # https://buildkite.com/docs/guides/environment-variables
+ service="buildkite"
+ branch="$BUILDKITE_BRANCH"
+ build="$BUILDKITE_BUILD_NUMBER"
+ job="$BUILDKITE_JOB_ID"
+ build_url=$(urlencode "$BUILDKITE_BUILD_URL")
+ slug="$BUILDKITE_PROJECT_SLUG"
+ commit="$BUILDKITE_COMMIT"
+ if [[ "$BUILDKITE_PULL_REQUEST" != "false" ]]; then
+ pr="$BUILDKITE_PULL_REQUEST"
+ fi
+ tag="$BUILDKITE_TAG"
+
+elif [ "$CI" = "drone" ] || [ "$DRONE" = "true" ];
+then
+ say "$e==>$x Drone CI detected."
+ # http://docs.drone.io/env.html
+ # drone commits are not full shas
+ service="drone.io"
+ branch="$DRONE_BRANCH"
+ build="$DRONE_BUILD_NUMBER"
+ build_url=$(urlencode "${DRONE_BUILD_LINK}")
+ pr="$DRONE_PULL_REQUEST"
+ job="$DRONE_JOB_NUMBER"
+ tag="$DRONE_TAG"
+
+elif [ "$HEROKU_TEST_RUN_BRANCH" != "" ];
+then
+ say "$e==>$x Heroku CI detected."
+ # https://devcenter.heroku.com/articles/heroku-ci#environment-variables
+ service="heroku"
+ branch="$HEROKU_TEST_RUN_BRANCH"
+ build="$HEROKU_TEST_RUN_ID"
+
+elif [[ "$CI" = "true" || "$CI" = "True" ]] && [[ "$APPVEYOR" = "true" || "$APPVEYOR" = "True" ]];
+then
+ say "$e==>$x Appveyor CI detected."
+ # http://www.appveyor.com/docs/environment-variables
+ service="appveyor"
+ branch="$APPVEYOR_REPO_BRANCH"
+ build=$(urlencode "$APPVEYOR_JOB_ID")
+ pr="$APPVEYOR_PULL_REQUEST_NUMBER"
+ job="$APPVEYOR_ACCOUNT_NAME%2F$APPVEYOR_PROJECT_SLUG%2F$APPVEYOR_BUILD_VERSION"
+ slug="$APPVEYOR_REPO_NAME"
+ commit="$APPVEYOR_REPO_COMMIT"
+ build_url=$(urlencode "${APPVEYOR_URL}/project/${APPVEYOR_REPO_NAME}/builds/$APPVEYOR_BUILD_ID/job/${APPVEYOR_JOB_ID}")
+elif [ "$CI" = "true" ] && [ "$WERCKER_GIT_BRANCH" != "" ];
+then
+ say "$e==>$x Wercker CI detected."
+ # http://devcenter.wercker.com/articles/steps/variables.html
+ service="wercker"
+ branch="$WERCKER_GIT_BRANCH"
+ build="$WERCKER_MAIN_PIPELINE_STARTED"
+ slug="$WERCKER_GIT_OWNER/$WERCKER_GIT_REPOSITORY"
+ commit="$WERCKER_GIT_COMMIT"
+
+elif [ "$CI" = "true" ] && [ "$MAGNUM" = "true" ];
+then
+ say "$e==>$x Magnum CI detected."
+ # https://magnum-ci.com/docs/environment
+ service="magnum"
+ branch="$CI_BRANCH"
+ build="$CI_BUILD_NUMBER"
+ commit="$CI_COMMIT"
+
+elif [ "$SHIPPABLE" = "true" ];
+then
+ say "$e==>$x Shippable CI detected."
+ # http://docs.shippable.com/ci_configure/
+ service="shippable"
+ branch=$([ "$HEAD_BRANCH" != "" ] && echo "$HEAD_BRANCH" || echo "$BRANCH")
+ build="$BUILD_NUMBER"
+ build_url=$(urlencode "$BUILD_URL")
+ pr="$PULL_REQUEST"
+ slug="$REPO_FULL_NAME"
+ commit="$COMMIT"
+
+elif [ "$TDDIUM" = "true" ];
+then
+ say "Solano CI detected."
+ # http://docs.solanolabs.com/Setup/tddium-set-environment-variables/
+ service="solano"
+ commit="$TDDIUM_CURRENT_COMMIT"
+ branch="$TDDIUM_CURRENT_BRANCH"
+ build="$TDDIUM_TID"
+ pr="$TDDIUM_PR_ID"
+
+elif [ "$GREENHOUSE" = "true" ];
+then
+ say "$e==>$x Greenhouse CI detected."
+ # http://docs.greenhouseci.com/docs/environment-variables-files
+ service="greenhouse"
+ branch="$GREENHOUSE_BRANCH"
+ build="$GREENHOUSE_BUILD_NUMBER"
+ build_url=$(urlencode "$GREENHOUSE_BUILD_URL")
+ pr="$GREENHOUSE_PULL_REQUEST"
+ commit="$GREENHOUSE_COMMIT"
+ search_in="$search_in $GREENHOUSE_EXPORT_DIR"
+
+elif [ "$GITLAB_CI" != "" ];
+then
+ say "$e==>$x GitLab CI detected."
+ # http://doc.gitlab.com/ce/ci/variables/README.html
+ service="gitlab"
+ branch="${CI_BUILD_REF_NAME:-$CI_COMMIT_REF_NAME}"
+ build="${CI_BUILD_ID:-$CI_JOB_ID}"
+ remote_addr="${CI_BUILD_REPO:-$CI_REPOSITORY_URL}"
+ commit="${CI_BUILD_REF:-$CI_COMMIT_SHA}"
+ slug="${CI_PROJECT_PATH}"
+
+elif [ "$GITHUB_ACTION" != "" ];
+then
+ say "$e==>$x GitHub Actions detected."
+
+ # https://github.com/features/actions
+ service="github-actions"
+
+ # https://help.github.com/en/articles/virtual-environments-for-github-actions#environment-variables
+ branch="${GITHUB_REF#refs/heads/}"
+ if [ "$GITHUB_HEAD_REF" != "" ];
+ then
+ # PR refs are in the format: refs/pull/7/merge
+ pr="${GITHUB_REF#refs/pull/}"
+ pr="${pr%/merge}"
+ branch="${GITHUB_HEAD_REF}"
+ fi
+ commit="${GITHUB_SHA}"
+ slug="${GITHUB_REPOSITORY}"
+ build="${GITHUB_RUN_ID}"
+ build_url=$(urlencode "http://github.com/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}")
+
+elif [ "$SYSTEM_TEAMFOUNDATIONSERVERURI" != "" ];
+then
+ say "$e==>$x Azure Pipelines detected."
+ # https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=vsts
+ # https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&viewFallbackFrom=vsts&tabs=yaml
+ service="azure_pipelines"
+ commit="$BUILD_SOURCEVERSION"
+ build="$BUILD_BUILDNUMBER"
+ if [ -z "$SYSTEM_PULLREQUEST_PULLREQUESTNUMBER" ];
+ then
+ pr="$SYSTEM_PULLREQUEST_PULLREQUESTID"
+ else
+ pr="$SYSTEM_PULLREQUEST_PULLREQUESTNUMBER"
+ fi
+ project="${SYSTEM_TEAMPROJECT}"
+ server_uri="${SYSTEM_TEAMFOUNDATIONSERVERURI}"
+ job="${BUILD_BUILDID}"
+ branch="$BUILD_SOURCEBRANCHNAME"
+ build_url=$(urlencode "${SYSTEM_TEAMFOUNDATIONSERVERURI}${SYSTEM_TEAMPROJECT}/_build/results?buildId=${BUILD_BUILDID}")
+elif [ "$CI" = "true" ] && [ "$BITBUCKET_BUILD_NUMBER" != "" ];
+then
+ say "$e==>$x Bitbucket detected."
+ # https://confluence.atlassian.com/bitbucket/variables-in-pipelines-794502608.html
+ service="bitbucket"
+ branch="$BITBUCKET_BRANCH"
+ build="$BITBUCKET_BUILD_NUMBER"
+ slug="$BITBUCKET_REPO_OWNER/$BITBUCKET_REPO_SLUG"
+ job="$BITBUCKET_BUILD_NUMBER"
+ pr="$BITBUCKET_PR_ID"
+ commit="$BITBUCKET_COMMIT"
+ # See https://jira.atlassian.com/browse/BCLOUD-19393
+ if [ "${#commit}" = 12 ];
+ then
+ commit=$(git rev-parse "$BITBUCKET_COMMIT")
+ fi
+elif [ "$CI" = "true" ] && [ "$BUDDY" = "true" ];
+then
+ say "$e==>$x Buddy CI detected."
+ # https://buddy.works/docs/pipelines/environment-variables
+ service="buddy"
+ branch="$BUDDY_EXECUTION_BRANCH"
+ build="$BUDDY_EXECUTION_ID"
+ build_url=$(urlencode "$BUDDY_EXECUTION_URL")
+ commit="$BUDDY_EXECUTION_REVISION"
+ pr="$BUDDY_EXECUTION_PULL_REQUEST_NO"
+ tag="$BUDDY_EXECUTION_TAG"
+ slug="$BUDDY_REPO_SLUG"
+
+elif [ "$CIRRUS_CI" != "" ];
+then
+ say "$e==>$x Cirrus CI detected."
+ # https://cirrus-ci.org/guide/writing-tasks/#environment-variables
+ service="cirrus-ci"
+ slug="$CIRRUS_REPO_FULL_NAME"
+ branch="$CIRRUS_BRANCH"
+ pr="$CIRRUS_PR"
+ commit="$CIRRUS_CHANGE_IN_REPO"
+ build="$CIRRUS_TASK_ID"
+ job="$CIRRUS_TASK_NAME"
+
+else
+ say "${r}x>${x} No CI provider detected."
+ say " Testing inside Docker? ${b}http://docs.codecov.io/docs/testing-with-docker${x}"
+ say " Testing with Tox? ${b}https://docs.codecov.io/docs/python#section-testing-with-tox${x}"
+
+fi
+
+say " ${e}project root:${x} $git_root"
+
+# find branch, commit, repo from git command
+if [ "$GIT_BRANCH" != "" ];
+then
+ branch="$GIT_BRANCH"
+
+elif [ "$branch" = "" ];
+then
+ branch=$(git rev-parse --abbrev-ref HEAD 2>/dev/null || hg branch 2>/dev/null || echo "")
+ if [ "$branch" = "HEAD" ];
+ then
+ branch=""
+ fi
+fi
+
+if [ "$commit_o" = "" ];
+then
+ # merge commit -> actual commit
+ mc=
+ if [ -n "$pr" ] && [ "$pr" != false ];
+ then
+ mc=$(git show --no-patch --format="%P" 2>/dev/null || echo "")
+ fi
+ if [[ "$mc" =~ ^[a-z0-9]{40}[[:space:]][a-z0-9]{40}$ ]];
+ then
+ say " Fixing merge commit SHA"
+ commit=$(echo "$mc" | cut -d' ' -f2)
+ elif [ "$GIT_COMMIT" != "" ];
+ then
+ commit="$GIT_COMMIT"
+ elif [ "$commit" = "" ];
+ then
+ commit=$(git log -1 --format="%H" 2>/dev/null || hg id -i --debug 2>/dev/null | tr -d '+' || echo "")
+ fi
+else
+ commit="$commit_o"
+fi
+
+if [ "$CODECOV_TOKEN" != "" ] && [ "$token" = "" ];
+then
+ say "${e}-->${x} token set from env"
+ token="$CODECOV_TOKEN"
+fi
+
+if [ "$CODECOV_URL" != "" ] && [ "$url_o" = "" ];
+then
+ say "${e}-->${x} url set from env"
+ url_o=$(echo "$CODECOV_URL" | sed -e 's/\/$//')
+fi
+
+if [ "$CODECOV_SLUG" != "" ];
+then
+ say "${e}-->${x} slug set from env"
+ slug_o="$CODECOV_SLUG"
+
+elif [ "$slug" = "" ];
+then
+ if [ "$remote_addr" = "" ];
+ then
+ remote_addr=$(git config --get remote.origin.url || hg paths default || echo '')
+ fi
+ if [ "$remote_addr" != "" ];
+ then
+ if echo "$remote_addr" | grep -q "//"; then
+ # https
+ slug=$(echo "$remote_addr" | cut -d / -f 4,5 | sed -e 's/\.git$//')
+ else
+ # ssh
+ slug=$(echo "$remote_addr" | cut -d : -f 2 | sed -e 's/\.git$//')
+ fi
+ fi
+ if [ "$slug" = "/" ];
+ then
+ slug=""
+ fi
+fi
+
+yaml=$(cd "$git_root" && \
+ git ls-files "*codecov.yml" "*codecov.yaml" 2>/dev/null \
+ || hg locate "*codecov.yml" "*codecov.yaml" 2>/dev/null \
+ || cd $proj_root && find . -maxdepth 1 -type f -name '*codecov.y*ml' 2>/dev/null \
+ || echo '')
+yaml=$(echo "$yaml" | head -1)
+
+if [ "$yaml" != "" ];
+then
+ say " ${e}Yaml found at:${x} $yaml"
+ if [[ "$yaml" != /* ]]; then
+ # relative path for yaml file given, assume relative to the repo root
+ yaml="$git_root/$yaml"
+ fi
+ config=$(parse_yaml "$yaml" || echo '')
+
+ # TODO validate the yaml here
+
+ if [ "$(echo "$config" | grep 'codecov_token="')" != "" ] && [ "$token" = "" ];
+ then
+ say "${e}-->${x} token set from yaml"
+ token="$(echo "$config" | grep 'codecov_token="' | sed -e 's/codecov_token="//' | sed -e 's/"\.*//')"
+ fi
+
+ if [ "$(echo "$config" | grep 'codecov_url="')" != "" ] && [ "$url_o" = "" ];
+ then
+ say "${e}-->${x} url set from yaml"
+ url_o="$(echo "$config" | grep 'codecov_url="' | sed -e 's/codecov_url="//' | sed -e 's/"\.*//')"
+ fi
+
+ if [ "$(echo "$config" | grep 'codecov_slug="')" != "" ] && [ "$slug_o" = "" ];
+ then
+ say "${e}-->${x} slug set from yaml"
+ slug_o="$(echo "$config" | grep 'codecov_slug="' | sed -e 's/codecov_slug="//' | sed -e 's/"\.*//')"
+ fi
+else
+ say " ${g}Yaml not found, that's ok! Learn more at${x} ${b}http://docs.codecov.io/docs/codecov-yaml${x}"
+
+fi
+
+if [ "$branch_o" != "" ];
+then
+ branch=$(urlencode "$branch_o")
+else
+ branch=$(urlencode "$branch")
+fi
+
+query="branch=$branch\
+ &commit=$commit\
+ &build=$([ "$build_o" = "" ] && echo "$build" || echo "$build_o")\
+ &build_url=$build_url\
+ &name=$(urlencode "$name")\
+ &tag=$([ "$tag_o" = "" ] && echo "$tag" || echo "$tag_o")\
+ &slug=$([ "$slug_o" = "" ] && urlencode "$slug" || urlencode "$slug_o")\
+ &service=$service\
+ &flags=$flags\
+ &pr=$([ "$pr_o" = "" ] && echo "${pr##\#}" || echo "${pr_o##\#}")\
+ &job=$job"
+
+if [ ! -z "$project" ] && [ ! -z "$server_uri" ];
+then
+ query=$(echo "$query&project=$project&server_uri=$server_uri" | tr -d ' ')
+fi
+
+if [ "$parent" != "" ];
+then
+ query=$(echo "parent=$parent&$query" | tr -d ' ')
+fi
+
+if [ "$ft_search" = "1" ];
+then
+ # detect bower comoponents location
+ bower_components="bower_components"
+ bower_rc=$(cd "$git_root" && cat .bowerrc 2>/dev/null || echo "")
+ if [ "$bower_rc" != "" ];
+ then
+ bower_components=$(echo "$bower_rc" | tr -d '\n' | grep '"directory"' | cut -d'"' -f4 | sed -e 's/\/$//')
+ if [ "$bower_components" = "" ];
+ then
+ bower_components="bower_components"
+ fi
+ fi
+
+ # Swift Coverage
+ if [ "$ft_xcodellvm" = "1" ] && [ -d "$ddp" ];
+ then
+ say "${e}==>${x} Processing Xcode reports via llvm-cov"
+ say " DerivedData folder: $ddp"
+ profdata_files=$(find "$ddp" -name '*.profdata' 2>/dev/null || echo '')
+ if [ "$profdata_files" != "" ];
+ then
+ # xcode via profdata
+ if [ "$xp" = "" ];
+ then
+ # xp=$(xcodebuild -showBuildSettings 2>/dev/null | grep -i "^\s*PRODUCT_NAME" | sed -e 's/.*= \(.*\)/\1/')
+ # say " ${e}->${x} Speed up Xcode processing by adding ${e}-J '$xp'${x}"
+ say " ${g}hint${x} Speed up Swift processing by using use ${g}-J 'AppName'${x} (regexp accepted)"
+ say " ${g}hint${x} This will remove Pods/ from your report. Also ${b}https://docs.codecov.io/docs/ignoring-paths${x}"
+ fi
+ while read -r profdata;
+ do
+ if [ "$profdata" != "" ];
+ then
+ swiftcov "$profdata" "$xp"
+ fi
+ done <<< "$profdata_files"
+ else
+ say " ${e}->${x} No Swift coverage found"
+ fi
+
+ # Obj-C Gcov Coverage
+ if [ "$ft_gcov" = "1" ];
+ then
+ say " ${e}->${x} Running $gcov_exe for Obj-C"
+ if [ "$ft_gcovout" = "0" ];
+ then
+ # suppress gcov output
+ bash -c "find $ddp -type f -name '*.gcda' $gcov_include $gcov_ignore -exec $gcov_exe -p $gcov_arg {} +" >/dev/null 2>&1 || true
+ else
+ bash -c "find $ddp -type f -name '*.gcda' $gcov_include $gcov_ignore -exec $gcov_exe -p $gcov_arg {} +" || true
+ fi
+ fi
+ fi
+
+ if [ "$ft_xcodeplist" = "1" ] && [ -d "$ddp" ];
+ then
+ say "${e}==>${x} Processing Xcode plists"
+ plists_files=$(find "$ddp" -name '*.xccoverage' 2>/dev/null || echo '')
+ if [ "$plists_files" != "" ];
+ then
+ while read -r plist;
+ do
+ if [ "$plist" != "" ];
+ then
+ say " ${g}Found${x} plist file at $plist"
+ plutil -convert xml1 -o "$(basename "$plist").plist" -- $plist
+ fi
+ done <<< "$plists_files"
+ fi
+ fi
+
+ # Gcov Coverage
+ if [ "$ft_gcov" = "1" ];
+ then
+ say "${e}==>${x} Running $gcov_exe in $proj_root ${e}(disable via -X gcov)${x}"
+ if [ "$ft_gcovout" = "0" ];
+ then
+ # suppress gcov output
+ bash -c "find $proj_root -type f -name '*.gcno' $gcov_include $gcov_ignore -exec $gcov_exe -pb $gcov_arg {} +" >/dev/null 2>&1 || true
+ else
+ bash -c "find $proj_root -type f -name '*.gcno' $gcov_include $gcov_ignore -exec $gcov_exe -pb $gcov_arg {} +" || true
+ fi
+ else
+ say "${e}==>${x} gcov disabled"
+ fi
+
+ # Python Coverage
+ if [ "$ft_coveragepy" = "1" ];
+ then
+ if [ ! -f coverage.xml ];
+ then
+ if which coverage >/dev/null 2>&1;
+ then
+ say "${e}==>${x} Python coveragepy exists ${e}disable via -X coveragepy${x}"
+
+ dotcoverage=$(find "$git_root" -name '.coverage' -or -name '.coverage.*' | head -1 || echo '')
+ if [ "$dotcoverage" != "" ];
+ then
+ cd "$(dirname "$dotcoverage")"
+ if [ ! -f .coverage ];
+ then
+ say " ${e}->${x} Running coverage combine"
+ coverage combine -a
+ fi
+ say " ${e}->${x} Running coverage xml"
+ if [ "$(coverage xml -i)" != "No data to report." ];
+ then
+ files="$files
+$PWD/coverage.xml"
+ else
+ say " ${r}No data to report.${x}"
+ fi
+ cd "$proj_root"
+ else
+ say " ${r}No .coverage file found.${x}"
+ fi
+ else
+ say "${e}==>${x} Python coveragepy not found"
+ fi
+ fi
+ else
+ say "${e}==>${x} Python coveragepy disabled"
+ fi
+
+ if [ "$search_in_o" != "" ];
+ then
+ # location override
+ search_in="$search_in_o"
+ fi
+
+ say "$e==>$x Searching for coverage reports in:"
+ for _path in $search_in
+ do
+ say " ${g}+${x} $_path"
+ done
+
+ patterns="find $search_in \( \
+ -name vendor \
+ -or -name htmlcov \
+ -or -name virtualenv \
+ -or -name js/generated/coverage \
+ -or -name .virtualenv \
+ -or -name virtualenvs \
+ -or -name .virtualenvs \
+ -or -name .env \
+ -or -name .envs \
+ -or -name env \
+ -or -name .yarn-cache \
+ -or -name envs \
+ -or -name .venv \
+ -or -name .venvs \
+ -or -name venv \
+ -or -name venvs \
+ -or -name .git \
+ -or -name .hg \
+ -or -name .tox \
+ -or -name __pycache__ \
+ -or -name '.egg-info*' \
+ -or -name '$bower_components' \
+ -or -name node_modules \
+ -or -name 'conftest_*.c.gcov' \
+ \) -prune -or \
+ -type f \( -name '*coverage*.*' \
+ -or -name 'nosetests.xml' \
+ -or -name 'jacoco*.xml' \
+ -or -name 'clover.xml' \
+ -or -name 'report.xml' \
+ -or -name '*.codecov.*' \
+ -or -name 'codecov.*' \
+ -or -name 'cobertura.xml' \
+ -or -name 'excoveralls.json' \
+ -or -name 'luacov.report.out' \
+ -or -name 'coverage-final.json' \
+ -or -name 'naxsi.info' \
+ -or -name 'lcov.info' \
+ -or -name 'lcov.dat' \
+ -or -name '*.lcov' \
+ -or -name '*.clover' \
+ -or -name 'cover.out' \
+ -or -name 'gcov.info' \
+ -or -name '*.gcov' \
+ -or -name '*.lst' \
+ $include_cov \) \
+ $exclude_cov \
+ -not -name '*.profdata' \
+ -not -name 'coverage-summary.json' \
+ -not -name 'phpunit-code-coverage.xml' \
+ -not -name '*/classycle/report.xml' \
+ -not -name 'remapInstanbul.coverage*.json' \
+ -not -name 'phpunit-coverage.xml' \
+ -not -name '*codecov.yml' \
+ -not -name '*.serialized' \
+ -not -name '.coverage*' \
+ -not -name '.*coveragerc' \
+ -not -name '*.sh' \
+ -not -name '*.bat' \
+ -not -name '*.ps1' \
+ -not -name '*.env' \
+ -not -name '*.cmake' \
+ -not -name '*.dox' \
+ -not -name '*.ec' \
+ -not -name '*.rst' \
+ -not -name '*.h' \
+ -not -name '*.scss' \
+ -not -name '*.o' \
+ -not -name '*.proto' \
+ -not -name '*.sbt' \
+ -not -name '*.xcoverage.*' \
+ -not -name '*.gz' \
+ -not -name '*.conf' \
+ -not -name '*.p12' \
+ -not -name '*.csv' \
+ -not -name '*.rsp' \
+ -not -name '*.m4' \
+ -not -name '*.pem' \
+ -not -name '*~' \
+ -not -name '*.exe' \
+ -not -name '*.am' \
+ -not -name '*.template' \
+ -not -name '*.cp' \
+ -not -name '*.bw' \
+ -not -name '*.crt' \
+ -not -name '*.log' \
+ -not -name '*.cmake' \
+ -not -name '*.pth' \
+ -not -name '*.in' \
+ -not -name '*.jar*' \
+ -not -name '*.pom*' \
+ -not -name '*.png' \
+ -not -name '*.jpg' \
+ -not -name '*.sql' \
+ -not -name '*.jpeg' \
+ -not -name '*.svg' \
+ -not -name '*.gif' \
+ -not -name '*.csv' \
+ -not -name '*.snapshot' \
+ -not -name '*.mak*' \
+ -not -name '*.bash' \
+ -not -name '*.data' \
+ -not -name '*.py' \
+ -not -name '*.class' \
+ -not -name '*.xcconfig' \
+ -not -name '*.ec' \
+ -not -name '*.coverage' \
+ -not -name '*.pyc' \
+ -not -name '*.cfg' \
+ -not -name '*.egg' \
+ -not -name '*.ru' \
+ -not -name '*.css' \
+ -not -name '*.less' \
+ -not -name '*.pyo' \
+ -not -name '*.whl' \
+ -not -name '*.html' \
+ -not -name '*.ftl' \
+ -not -name '*.erb' \
+ -not -name '*.rb' \
+ -not -name '*.js' \
+ -not -name '*.jade' \
+ -not -name '*.db' \
+ -not -name '*.md' \
+ -not -name '*.cpp' \
+ -not -name '*.gradle' \
+ -not -name '*.tar.tz' \
+ -not -name '*.scss' \
+ -not -name 'include.lst' \
+ -not -name 'fullLocaleNames.lst' \
+ -not -name 'inputFiles.lst' \
+ -not -name 'createdFiles.lst' \
+ -not -name 'scoverage.measurements.*' \
+ -not -name 'test_*_coverage.txt' \
+ -not -name 'testrunner-coverage*' \
+ -print 2>/dev/null"
+ files=$(eval "$patterns" || echo '')
+
+elif [ "$include_cov" != "" ];
+then
+ files=$(eval "find $search_in -type f \( ${include_cov:5} \)$exclude_cov 2>/dev/null" || echo '')
+fi
+
+num_of_files=$(echo "$files" | wc -l | tr -d ' ')
+if [ "$num_of_files" != '' ] && [ "$files" != '' ];
+then
+ say " ${e}->${x} Found $num_of_files reports"
+fi
+
+# no files found
+if [ "$files" = "" ];
+then
+ say "${r}-->${x} No coverage report found."
+ say " Please visit ${b}http://docs.codecov.io/docs/supported-languages${x}"
+ exit ${exit_with};
+fi
+
+if [ "$ft_network" == "1" ];
+then
+ say "${e}==>${x} Detecting git/mercurial file structure"
+ network=$(cd "$git_root" && git ls-files 2>/dev/null || hg locate 2>/dev/null || echo "")
+ if [ "$network" = "" ];
+ then
+ network=$(find "$git_root" \( \
+ -name virtualenv \
+ -name .virtualenv \
+ -name virtualenvs \
+ -name .virtualenvs \
+ -name '*.png' \
+ -name '*.gif' \
+ -name '*.jpg' \
+ -name '*.jpeg' \
+ -name '*.md' \
+ -name .env \
+ -name .envs \
+ -name env \
+ -name envs \
+ -name .venv \
+ -name .venvs \
+ -name venv \
+ -name venvs \
+ -name .git \
+ -name .egg-info \
+ -name shunit2-2.1.6 \
+ -name vendor \
+ -name __pycache__ \
+ -name node_modules \
+ -path '*/$bower_components/*' \
+ -path '*/target/delombok/*' \
+ -path '*/build/lib/*' \
+ -path '*/js/generated/coverage/*' \
+ \) -prune -or \
+ -type f -print 2>/dev/null || echo '')
+ fi
+
+ if [ "$prefix_o" != "" ];
+ then
+ network=$(echo "$network" | awk "{print \"$prefix_o/\"\$0}")
+ fi
+fi
+
+upload_file=`mktemp /tmp/codecov.XXXXXX`
+adjustments_file=`mktemp /tmp/codecov.adjustments.XXXXXX`
+
+cleanup() {
+ rm -f $upload_file $adjustments_file $upload_file.gz
+}
+
+trap cleanup INT ABRT TERM
+
+if [ "$env" != "" ];
+then
+ inc_env=""
+ say "${e}==>${x} Appending build variables"
+ for varname in $(echo "$env" | tr ',' ' ')
+ do
+ if [ "$varname" != "" ];
+ then
+ say " ${g}+${x} $varname"
+ inc_env="${inc_env}${varname}=$(eval echo "\$${varname}")
+"
+ fi
+ done
+
+echo "$inc_env<<<<<< ENV" >> $upload_file
+fi
+
+# Append git file list
+# write discovered yaml location
+echo "$yaml" >> $upload_file
+if [ "$ft_network" == "1" ];
+then
+ i="woff|eot|otf" # fonts
+ i="$i|gif|png|jpg|jpeg|psd" # images
+ i="$i|ptt|pptx|numbers|pages|md|txt|xlsx|docx|doc|pdf|csv" # docs
+ i="$i|yml|yaml|.gitignore" # supporting docs
+
+ if [ "$ft_html" != "1" ];
+ then
+ i="$i|html"
+ fi
+
+ echo "$network" | grep -vwE "($i)$" >> $upload_file
+fi
+echo "<<<<<< network" >> $upload_file
+
+fr=0
+say "${e}==>${x} Reading reports"
+while IFS='' read -r file;
+do
+ # read the coverage file
+ if [ "$(echo "$file" | tr -d ' ')" != '' ];
+ then
+ if [ -f "$file" ];
+ then
+ report_len=$(wc -c < "$file")
+ if [ "$report_len" -ne 0 ];
+ then
+ say " ${g}+${x} $file ${e}bytes=$(echo "$report_len" | tr -d ' ')${x}"
+ # append to to upload
+ _filename=$(basename "$file")
+ if [ "${_filename##*.}" = 'gcov' ];
+ then
+ echo "# path=$(echo "$file.reduced" | sed "s|^$git_root/||")" >> $upload_file
+ # get file name
+ head -1 "$file" >> $upload_file
+ # 1. remove source code
+ # 2. remove ending bracket lines
+ # 3. remove whitespace
+ # 4. remove contextual lines
+ # 5. remove function names
+ awk -F': *' '{print $1":"$2":"}' "$file" \
+ | sed '\/: *} *$/d' \
+ | sed 's/^ *//' \
+ | sed '/^-/d' \
+ | sed 's/^function.*/func/' >> $upload_file
+ else
+ echo "# path=$(echo "$file" | sed "s|^$git_root/||")" >> $upload_file
+ cat "$file" >> $upload_file
+ fi
+ echo "<<<<<< EOF" >> $upload_file
+ fr=1
+ if [ "$clean" = "1" ];
+ then
+ rm "$file"
+ fi
+ else
+ say " ${r}-${x} Skipping empty file $file"
+ fi
+ else
+ say " ${r}-${x} file not found at $file"
+ fi
+ fi
+done <<< "$(echo -e "$files")"
+
+if [ "$fr" = "0" ];
+then
+ say "${r}-->${x} No coverage data found."
+ say " Please visit ${b}http://docs.codecov.io/docs/supported-languages${x}"
+ say " search for your projects language to learn how to collect reports."
+ exit ${exit_with};
+fi
+
+if [ "$ft_fix" = "1" ];
+then
+ say "${e}==>${x} Appending adjustments"
+ say " ${b}https://docs.codecov.io/docs/fixing-reports${x}"
+
+ empty_line='^[[:space:]]*$'
+ # //
+ syntax_comment='^[[:space:]]*//.*'
+ # /* or */
+ syntax_comment_block='^[[:space:]]*(\/\*|\*\/)[[:space:]]*$'
+ # { or }
+ syntax_bracket='^[[:space:]]*[\{\}][[:space:]]*(//.*)?$'
+ # [ or ]
+ syntax_list='^[[:space:]]*[][][[:space:]]*(//.*)?$'
+
+ skip_dirs="-not -path '*/$bower_components/*' \
+ -not -path '*/node_modules/*'"
+
+ cut_and_join() {
+ awk 'BEGIN { FS=":" }
+ $3 ~ /\/\*/ || $3 ~ /\*\// { print $0 ; next }
+ $1!=key { if (key!="") print out ; key=$1 ; out=$1":"$2 ; next }
+ { out=out","$2 }
+ END { print out }' 2>/dev/null
+ }
+
+ if echo "$network" | grep -m1 '.kt$' 1>/dev/null;
+ then
+ # skip brackets and comments
+ find "$git_root" -type f \
+ -name '*.kt' \
+ -exec \
+ grep -nIHE -e $syntax_bracket \
+ -e $syntax_comment_block {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+
+ # last line in file
+ find "$git_root" -type f \
+ -name '*.kt' -exec \
+ wc -l {} \; \
+ | while read l; do echo "EOF: $l"; done \
+ 2>/dev/null \
+ >> $adjustments_file \
+ || echo ''
+
+ fi
+
+ if echo "$network" | grep -m1 '.go$' 1>/dev/null;
+ then
+ # skip empty lines, comments, and brackets
+ find "$git_root" -not -path '*/vendor/*' \
+ -type f \
+ -name '*.go' \
+ -exec \
+ grep -nIHE \
+ -e $empty_line \
+ -e $syntax_comment \
+ -e $syntax_comment_block \
+ -e $syntax_bracket \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+ fi
+
+ if echo "$network" | grep -m1 '.dart$' 1>/dev/null;
+ then
+ # skip brackets
+ find "$git_root" -type f \
+ -name '*.dart' \
+ -exec \
+ grep -nIHE \
+ -e $syntax_bracket \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+ fi
+
+ if echo "$network" | grep -m1 '.php$' 1>/dev/null;
+ then
+ # skip empty lines, comments, and brackets
+ find "$git_root" -not -path "*/vendor/*" \
+ -type f \
+ -name '*.php' \
+ -exec \
+ grep -nIHE \
+ -e $syntax_list \
+ -e $syntax_bracket \
+ -e '^[[:space:]]*\);[[:space:]]*(//.*)?$' \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+ fi
+
+ if echo "$network" | grep -m1 '\(.cpp\|.h\|.cxx\|.c\|.hpp\|.m\|.swift\)$' 1>/dev/null;
+ then
+ # skip brackets
+ find "$git_root" -type f \
+ $skip_dirs \
+ \( \
+ -name '*.h' \
+ -or -name '*.cpp' \
+ -or -name '*.cxx' \
+ -or -name '*.m' \
+ -or -name '*.c' \
+ -or -name '*.hpp' \
+ -or -name '*.swift' \
+ \) -exec \
+ grep -nIHE \
+ -e $empty_line \
+ -e $syntax_bracket \
+ -e '// LCOV_EXCL' \
+ {} \; \
+ | cut_and_join \
+ >> $adjustments_file \
+ || echo ''
+
+ # skip brackets
+ find "$git_root" -type f \
+ $skip_dirs \
+ \( \
+ -name '*.h' \
+ -or -name '*.cpp' \
+ -or -name '*.cxx' \
+ -or -name '*.m' \
+ -or -name '*.c' \
+ -or -name '*.hpp' \
+ -or -name '*.swift' \
+ \) -exec \
+ grep -nIH '// LCOV_EXCL' \
+ {} \; \
+ >> $adjustments_file \
+ || echo ''
+
+ fi
+
+ found=$(cat $adjustments_file | tr -d ' ')
+
+ if [ "$found" != "" ];
+ then
+ say " ${g}+${x} Found adjustments"
+ echo "# path=fixes" >> $upload_file
+ cat $adjustments_file >> $upload_file
+ echo "<<<<<< EOF" >> $upload_file
+ rm -rf $adjustments_file
+ else
+ say " ${e}->${x} No adjustments found"
+ fi
+fi
+
+if [ "$url_o" != "" ];
+then
+ url="$url_o"
+fi
+
+if [ "$dump" != "0" ];
+then
+ # trim whitespace from query
+ say " ${e}->${x} Dumping upload file (no upload)"
+ echo "$url/upload/v4?$(echo "package=bash-$VERSION&token=$token&$query" | tr -d ' ')"
+ cat $upload_file
+else
+
+ say "${e}==>${x} Gzipping contents"
+ gzip -nf9 $upload_file
+
+ query=$(echo "${query}" | tr -d ' ')
+ say "${e}==>${x} Uploading reports"
+ say " ${e}url:${x} $url"
+ say " ${e}query:${x} $query"
+
+ # Full query without token (to display on terminal output)
+ queryNoToken=$(echo "package=bash-$VERSION&token=secret&$query" | tr -d ' ')
+ # now add token to query
+ query=$(echo "package=bash-$VERSION&token=$token&$query" | tr -d ' ')
+
+ if [ "$ft_s3" = "1" ];
+ then
+ i="0"
+ while [ $i -lt 4 ]
+ do
+ i=$[$i+1]
+ say " ${e}->${x} Pinging Codecov"
+ say "$url/upload/v4?$queryNoToken"
+ res=$(curl $curl_s -X POST $curlargs $cacert \
+ -H 'X-Reduced-Redundancy: false' \
+ -H 'X-Content-Type: application/x-gzip' \
+ "$url/upload/v4?$query" || true)
+ # a good replay is "https://codecov.io" + "\n" + "https://codecov.s3.amazonaws.com/..."
+ status=$(echo "$res" | head -1 | grep 'HTTP ' | cut -d' ' -f2)
+ if [ "$status" = "" ];
+ then
+ s3target=$(echo "$res" | sed -n 2p)
+ say " ${e}->${x} Uploading"
+
+
+ s3=$(curl $curl_s -fiX PUT $curlawsargs \
+ --data-binary @$upload_file.gz \
+ -H 'Content-Type: application/x-gzip' \
+ -H 'Content-Encoding: gzip' \
+ "$s3target" || true)
+
+
+ if [ "$s3" != "" ];
+ then
+ say " ${g}->${x} View reports at ${b}$(echo "$res" | sed -n 1p)${x}"
+ exit 0
+ else
+ say " ${r}X>${x} Failed to upload"
+ fi
+ elif [ "$status" = "400" ];
+ then
+ # 400 Error
+ say "${g}${res}${x}"
+ exit ${exit_with}
+ fi
+ say " ${e}->${x} Sleeping for 30s and trying again..."
+ sleep 30
+ done
+ fi
+
+ say " ${e}->${x} Uploading to Codecov"
+ i="0"
+ while [ $i -lt 4 ]
+ do
+ i=$[$i+1]
+
+ res=$(curl $curl_s -X POST $curlargs $cacert \
+ --data-binary @$upload_file.gz \
+ -H 'Content-Type: text/plain' \
+ -H 'Content-Encoding: gzip' \
+ -H 'X-Content-Encoding: gzip' \
+ -H 'Accept: text/plain' \
+ "$url/upload/v2?$query" || echo 'HTTP 500')
+ # HTTP 200
+ # http://....
+ status=$(echo "$res" | head -1 | cut -d' ' -f2)
+ if [ "$status" = "" ];
+ then
+ say " View reports at ${b}$(echo "$res" | head -2 | tail -1)${x}"
+ exit 0
+
+ elif [ "${status:0:1}" = "5" ];
+ then
+ say " ${e}->${x} Sleeping for 30s and trying again..."
+ sleep 30
+
+ else
+ say " ${g}${res}${x}"
+ exit 0
+ exit ${exit_with}
+ fi
+
+ done
+
+ say " ${r}X> Failed to upload coverage reports${x}"
+fi
+
+exit ${exit_with}
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/copy-robo-crash-artifacts.py b/mobile/android/fenix/automation/taskcluster/androidTest/copy-robo-crash-artifacts.py
new file mode 100644
index 0000000000..9945f08ef6
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/copy-robo-crash-artifacts.py
@@ -0,0 +1,273 @@
+#!/usr/bin/env python3
+
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+"""
+This script is designed to automate the process of fetching crash logs from Google Cloud Storage (GCS) for failed devices in a Robo test.
+It is intended to be run as part of a Taskcluster job following a scheduled test task.
+The script requires the presence of a `matrix_ids.json` artifact in the results directory and the availability of the `gsutil` command in the environment.
+
+The script performs the following operations:
+- Loads the `matrix_ids.json` artifact to identify the GCS paths for the crash logs.
+- Identifies failed devices based on the outcomes specified in the `matrix_ids.json` artifact.
+- Fetches crash logs for the failed devices from the specified GCS paths.
+- Copies the fetched crash logs to the current worker artifact results directory.
+
+The script is configured to log its operations and errors, providing visibility into its execution process.
+It uses the `gsutil` command-line tool to interact with GCS, ensuring compatibility with the GCS environment.
+
+Usage:
+ python3 script_name.py
+
+Requirements:
+ - The `matrix_ids.json` artifact must be present in the results directory.
+ - The `gsutil` command must be available in the environment.
+ - The script should be run after a scheduled test task in a Taskcluster job.
+
+Output:
+ - Crash logs for failed devices are copied to the current worker artifact results directory.
+"""
+
+import json
+import logging
+import os
+import re
+import subprocess
+import sys
+from enum import Enum
+
+
+def setup_logging():
+ """Configure logging for the script."""
+ log_format = "%(message)s"
+ logging.basicConfig(level=logging.INFO, format=log_format)
+
+
+class Worker(Enum):
+ """
+ Worker paths
+ """
+
+ RESULTS_DIR = "/builds/worker/artifacts/results"
+ ARTIFACTS_DIR = "/builds/worker/artifacts"
+
+
+class ArtifactType(Enum):
+ """
+ Artifact types for fetching crash logs and matrix IDs artifact.
+ """
+
+ CRASH_LOG = "data_app_crash*.txt"
+ MATRIX_IDS = "matrix_ids.json"
+
+
+def load_matrix_ids_artifact(matrix_file_path):
+ """Load the matrix IDs artifact from the specified file path.
+
+ Args:
+ matrix_file_path (str): The file path to the matrix IDs artifact.
+ Returns:
+ dict: The contents of the matrix IDs artifact.
+ """
+ try:
+ with open(matrix_file_path, "r") as f:
+ return json.load(f)
+ except FileNotFoundError:
+ logging.error(f"Could not find matrix file: {matrix_file_path}")
+ sys.exit(1)
+ except json.JSONDecodeError:
+ logging.error(f"Error decoding matrix file: {matrix_file_path}")
+ sys.exit(1)
+
+
+def get_gcs_path(matrix_artifact_file):
+ """
+ Extract the root GCS path from the matrix artifact file.
+
+ Args:
+ matrix_artifact_file (dict): The matrix artifact file contents.
+ Returns:
+ str: The root GCS path extracted from the matrix artifact file.
+ """
+ for matrix in matrix_artifact_file.values():
+ gcs_path = matrix.get("gcsPath")
+ if gcs_path:
+ return gcs_path
+ return None
+
+
+def check_gsutil_availability():
+ """
+ Check the availability of the `gsutil` command in the environment.
+ Exit the script if `gsutil` is not available.
+ """
+ try:
+ result = subprocess.run(["gsutil", "--version"], capture_output=True, text=True)
+ if result.returncode != 0:
+ logging.error(f"Error executing gsutil: {result.stderr}")
+ sys.exit(1)
+ except Exception as e:
+ logging.error(f"Error executing gsutil: {e}")
+ sys.exit(1)
+
+
+def fetch_artifacts(root_gcs_path, device, artifact_pattern):
+ """
+ Fetch artifacts from the specified GCS path pattern for the given device.
+
+ Args:
+ root_gcs_path (str): The root GCS path for the artifacts.
+ device (str): The device name for which to fetch artifacts.
+ artifact_pattern (str): The pattern to match the artifacts.
+ Returns:
+ list: A list of artifacts matching the specified pattern.
+ """
+ gcs_path_pattern = f"gs://{root_gcs_path.rstrip('/')}/{device}/{artifact_pattern}"
+
+ try:
+ result = subprocess.run(
+ ["gsutil", "ls", gcs_path_pattern], capture_output=True, text=True
+ )
+ if result.returncode == 0:
+ return result.stdout.splitlines()
+ else:
+ if "AccessDeniedException" in result.stderr:
+ logging.error(f"Permission denied for GCS path: {gcs_path_pattern}")
+ elif "network error" in result.stderr.lower():
+ logging.error(f"Network error accessing GCS path: {gcs_path_pattern}")
+ else:
+ logging.error(f"Failed to list files: {result.stderr}")
+ return []
+ except Exception as e:
+ logging.error(f"Error executing gsutil: {e}")
+ return []
+
+
+def fetch_failed_device_names(matrix_artifact_file):
+ """
+ Fetch the names of devices that failed based on the outcomes specified in the matrix artifact file.
+
+ Args:
+ matrix_artifact_file (dict): The matrix artifact file contents.
+ Returns:
+ list: A list of device names that failed in the test.
+ """
+ failed_devices = []
+ for matrix in matrix_artifact_file.values():
+ axes = matrix.get("axes", [])
+ for axis in axes:
+ if axis.get("outcome") == "failure":
+ device = axis.get("device")
+ if device:
+ failed_devices.append(device)
+ return failed_devices
+
+
+def gsutil_cp(artifact, dest):
+ """
+ Copy the specified artifact to the destination path using `gsutil`.
+
+ Args:
+ artifact (str): The path to the artifact to copy.
+ dest (str): The destination path to copy the artifact to.
+ Returns:
+ None
+ """
+ logging.info(f"Copying {artifact} to {dest}")
+ try:
+ result = subprocess.run(
+ ["gsutil", "cp", artifact, dest], capture_output=True, text=True
+ )
+ if result.returncode != 0:
+ if "AccessDeniedException" in result.stderr:
+ logging.error(f"Permission denied for GCS path: {artifact}")
+ elif "network error" in result.stderr.lower():
+ logging.error(f"Network error accessing GCS path: {artifact}")
+ else:
+ logging.error(f"Failed to list files: {result.stderr}")
+ except Exception as e:
+ logging.error(f"Error executing gsutil: {e}")
+
+
+def parse_crash_log(log_path):
+ crashes_reported = 0
+ if os.path.isfile(log_path):
+ with open(log_path) as f:
+ contents = f.read()
+ proc = "unknown"
+ match = re.search(r"Process: (.*)\n", contents, re.MULTILINE)
+ if match and len(match.groups()) == 1:
+ proc = match.group(1)
+ # Isolate the crash stack and reformat it for treeherder.
+ # Variation in stacks makes the regex tricky!
+ # Example:
+ # java.lang.NullPointerException
+ # at org.mozilla.fenix.library.bookmarks.BookmarkFragment.getBookmarkInteractor(BookmarkFragment.kt:72)
+ # at org.mozilla.fenix.library.bookmarks.BookmarkFragment.refreshBookmarks(BookmarkFragment.kt:297) ...
+ # Example:
+ # java.lang.IllegalStateException: pending state not allowed
+ # at org.mozilla.fenix.onboarding.OnboardingFragment.onCreate(OnboardingFragment.kt:83)
+ # at androidx.fragment.app.Fragment.performCreate(Fragment.java:3094) ...
+ # Example:
+ # java.lang.IllegalArgumentException: No handler given, and current thread has no looper!
+ # at android.hardware.camera2.impl.CameraDeviceImpl.checkHandler(CameraDeviceImpl.java:2380)
+ # at android.hardware.camera2.impl.CameraDeviceImpl.checkHandler(CameraDeviceImpl.java:2395)
+ match = re.search(
+ r"\n([\w\.]+[:\s\w\.,!?#^\'\"]+)\s*(at\s.*\n)", contents, re.MULTILINE
+ )
+ if match and len(match.groups()) == 2:
+ top_frame = match.group(1).rstrip() + " " + match.group(2)
+ remainder = contents[match.span()[1] :]
+ logging.error(f"PROCESS-CRASH | {proc} | {top_frame}{remainder}")
+ crashes_reported = 1
+ return crashes_reported
+
+
+def process_artifacts(artifact_type):
+ """
+ Process the artifacts based on the specified artifact type.
+
+ Args:
+ artifact_type (ArtifactType): The type of artifact to process.
+ Returns:
+ Number of crashes reported in treeherder format.
+ """
+ matrix_ids_artifact = load_matrix_ids_artifact(
+ Worker.RESULTS_DIR.value + "/" + ArtifactType.MATRIX_IDS.value
+ )
+ failed_device_names = fetch_failed_device_names(matrix_ids_artifact)
+ root_gcs_path = get_gcs_path(matrix_ids_artifact)
+
+ if not root_gcs_path:
+ logging.error("Could not find root GCS path in matrix file.")
+ return 0
+
+ if not failed_device_names:
+ return 0
+
+ crashes_reported = 0
+ for device in failed_device_names:
+ artifacts = fetch_artifacts(root_gcs_path, device, artifact_type.value)
+ if not artifacts:
+ logging.info(f"No artifacts found for device: {device}")
+ continue
+
+ for artifact in artifacts:
+ gsutil_cp(artifact, Worker.RESULTS_DIR.value)
+ crashes_reported += parse_crash_log(
+ os.path.join(Worker.RESULTS_DIR.value, os.path.basename(artifact))
+ )
+
+ return crashes_reported
+
+
+def main():
+ setup_logging()
+ check_gsutil_availability()
+ return process_artifacts(ArtifactType.CRASH_LOG)
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-beta.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-beta.yml
new file mode 100644
index 0000000000..ad8d5570e6
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-beta.yml
@@ -0,0 +1,39 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: true
+
+ timeout: 15m
+ async: false
+ num-flaky-test-attempts: 1
+
+ app: /app/path
+ test: /test/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+ directories-to-pull:
+ - /sdcard/screenshots
+ performance-metrics: true
+
+ test-targets:
+ - class org.mozilla.fenix.ui.HistoryTest#verifyHistoryMenuWithHistoryItemsTest
+ - class org.mozilla.fenix.ui.SettingsSearchTest#verifyShowSearchSuggestionsToggleTest
+ - class org.mozilla.fenix.ui.CollectionTest#deleteCollectionTest
+ - class org.mozilla.fenix.ui.HistoryTest#noHistoryInPrivateBrowsingTest
+ - class org.mozilla.fenix.ui.NoNetworkAccessStartupTests#noNetworkConnectionStartupTest
+
+ device:
+ - model: Pixel2.arm
+ version: 30
+ locale: en_US
+
+flank:
+ project: GOOGLE_PROJECT
+ max-test-shards: 2
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-legacy-api-tests.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-legacy-api-tests.yml
new file mode 100644
index 0000000000..8712be3613
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-legacy-api-tests.yml
@@ -0,0 +1,55 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: true
+
+ timeout: 15m
+ async: false
+ num-flaky-test-attempts: 2
+
+ app: /app/path
+ test: /test/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+ directories-to-pull:
+ - /sdcard/screenshots
+ performance-metrics: true
+
+ test-targets:
+ - notPackage org.mozilla.fenix.screenshots
+ - notPackage org.mozilla.fenix.syncintegration
+ - notPackage org.mozilla.fenix.experimentintegration
+ - class org.mozilla.fenix.ui.MainMenuTest#goBackTest
+ - class org.mozilla.fenix.ui.MainMenuTest#goForwardTest
+ - class org.mozilla.fenix.ui.HistoryTest#verifyHistoryMenuWithHistoryItemsTest
+ - class org.mozilla.fenix.ui.CollectionTest#deleteCollectionTest
+ - class org.mozilla.fenix.ui.HistoryTest#noHistoryInPrivateBrowsingTest
+ - class org.mozilla.fenix.ui.TabbedBrowsingTest#openNewTabTest
+ - class org.mozilla.fenix.ui.TabbedBrowsingTest#openNewPrivateTabTest
+ - class org.mozilla.fenix.ui.TopSitesTest#openTopSiteInANewTabTest
+ - class org.mozilla.fenix.ui.BookmarksTest#addBookmarkTest
+
+ device:
+ - model: Pixel2.arm
+ version: 26
+ locale: en_US
+ - model: Pixel2.arm
+ version: 27
+ locale: en_US
+ - model: redfin
+ version: 30
+ locale: en_US
+ - model: x1q
+ version: 29
+ locale: en_US
+
+flank:
+ project: GOOGLE_PROJECT
+ max-test-shards: 50
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-robo-test.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-robo-test.yml
new file mode 100644
index 0000000000..813452db47
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-robo-test.yml
@@ -0,0 +1,39 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: false
+ timeout: 10m
+ async: false
+
+ app: /app/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+
+ device:
+ - model: java
+ version: 30
+ locale: en_US
+ - model: Pixel2.arm
+ version: 27
+ locale: en_US
+ - model: redfin
+ version: 30
+ locale: en_US
+ - model: oriole
+ version: 31
+ locale: en_US
+ - model: x1q
+ version: 29
+ locale: en_US
+
+ type: robo
+
+flank:
+ project: GOOGLE_PROJECT
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-screenshots-tests.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-screenshots-tests.yml
new file mode 100644
index 0000000000..d0ce0dfc86
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-screenshots-tests.yml
@@ -0,0 +1,35 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: true
+
+ timeout: 15m
+ async: false
+ num-flaky-test-attempts: 1
+
+ app: /app/path
+ test: /test/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+ directories-to-pull:
+ - /sdcard/screenshots
+ performance-metrics: true
+
+ test-targets:
+ - package org.mozilla.fenix.screenshots
+
+ device:
+ - model: Pixel2.arm
+ version: 30
+ locale: en_US
+
+flank:
+ project: GOOGLE_PROJECT
+ max-test-shards: 1
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test-robo.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test-robo.yml
new file mode 100644
index 0000000000..503eae25f4
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test-robo.yml
@@ -0,0 +1,27 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: false
+ timeout: 5m
+ async: false
+
+ app: /app/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+
+ device:
+ - model: MediumPhone.arm
+ version: 30
+ locale: en_US
+
+ type: robo
+
+flank:
+ project: GOOGLE_PROJECT
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test.yml
new file mode 100644
index 0000000000..1e0009a6a3
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm-start-test.yml
@@ -0,0 +1,39 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: true
+
+ timeout: 15m
+ async: false
+ num-flaky-test-attempts: 1
+
+ app: /app/path
+ test: /test/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+ directories-to-pull:
+ - /sdcard/screenshots
+ performance-metrics: true
+
+ test-targets:
+ - class org.mozilla.fenix.ui.HistoryTest#verifyHistoryMenuWithHistoryItemsTest
+ - class org.mozilla.fenix.ui.SettingsSearchTest#verifyShowSearchSuggestionsToggleTest
+ - class org.mozilla.fenix.ui.CollectionTest#deleteCollectionTest
+ - class org.mozilla.fenix.ui.HistoryTest#noHistoryInPrivateBrowsingTest
+ - class org.mozilla.fenix.ui.NoNetworkAccessStartupTests#noNetworkConnectionStartupTest
+
+ device:
+ - model: Pixel2.arm
+ version: 30
+ locale: en_US
+
+flank:
+ project: GOOGLE_PROJECT
+ max-test-shards: 2
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm64-v8a.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm64-v8a.yml
new file mode 100644
index 0000000000..d679f8ea76
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-arm64-v8a.yml
@@ -0,0 +1,36 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: true
+ timeout: 15m
+ async: false
+ num-flaky-test-attempts: 1
+
+ app: /app/path
+ test: /test/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+ directories-to-pull:
+ - /sdcard/screenshots
+ performance-metrics: true
+
+ test-targets:
+ - notPackage org.mozilla.fenix.screenshots
+ - notPackage org.mozilla.fenix.syncintegration
+ - notPackage org.mozilla.fenix.experimentintegration
+
+ device:
+ - model: Pixel2.arm
+ version: 30
+ locale: en_US
+
+flank:
+ project: GOOGLE_PROJECT
+ max-test-shards: 100
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/flank-x86.yml b/mobile/android/fenix/automation/taskcluster/androidTest/flank-x86.yml
new file mode 100644
index 0000000000..0b6e13b0b2
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/flank-x86.yml
@@ -0,0 +1,37 @@
+# Google Cloud Documentation: https://cloud.google.com/sdk/gcloud/reference/firebase/test/android/run
+# Flank Documentation: https://flank.github.io/flank/
+gcloud:
+ results-bucket: fenix_test_artifacts
+ record-video: true
+
+ timeout: 15m
+ async: false
+ num-flaky-test-attempts: 1
+
+ app: /app/path
+ test: /test/path
+
+ auto-google-login: false
+ use-orchestrator: true
+ environment-variables:
+ clearPackageData: true
+ directories-to-pull:
+ - /sdcard/screenshots
+ performance-metrics: true
+
+ test-targets:
+ - notPackage org.mozilla.fenix.screenshots
+ - notPackage org.mozilla.fenix.syncintegration
+ - notPackage org.mozilla.fenix.experimentintegration
+
+ device:
+ - model: Pixel2
+ version: 30
+ locale: en_US
+
+flank:
+ project: GOOGLE_PROJECT
+ max-test-shards: -1
+ num-test-runs: 1
+ output-style: compact
+ full-junit-result: true
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test-fromfile.py b/mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test-fromfile.py
new file mode 100644
index 0000000000..7b853d5e9c
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test-fromfile.py
@@ -0,0 +1,97 @@
+#!/usr/bin/python3
+
+import argparse
+import sys
+import xml
+from pathlib import Path
+
+from beautifultable import BeautifulTable
+from junitparser import Attr, Failure, JUnitXml, TestSuite
+
+
+def parse_args(cmdln_args):
+ parser = argparse.ArgumentParser(
+ description="Parse and print UI test JUnit results"
+ )
+ parser.add_argument(
+ "--results",
+ type=Path,
+ help="Directory containing task artifact results",
+ required=True,
+ )
+ return parser.parse_args(args=cmdln_args)
+
+
+class test_suite(TestSuite):
+ flakes = Attr()
+
+
+def parse_print_failure_results(results):
+ table = BeautifulTable(maxwidth=256)
+ table.columns.header = ["UI Test", "Outcome", "Details"]
+ table.columns.alignment = BeautifulTable.ALIGN_LEFT
+ table.set_style(BeautifulTable.STYLE_GRID)
+
+ failure_count = 0
+ for suite in results:
+ cur_suite = test_suite.fromelem(suite)
+ if cur_suite.flakes != "0":
+ for case in suite:
+ for entry in case.result:
+ if case.result:
+ table.rows.append(
+ [
+ "%s#%s" % (case.classname, case.name),
+ "Flaky",
+ entry.text.replace("\t", " "),
+ ]
+ )
+ break
+ else:
+ for case in suite:
+ for entry in case.result:
+ if isinstance(entry, Failure):
+ test_id = "%s#%s" % (case.classname, case.name)
+ details = entry.text.replace("\t", " ")
+ table.rows.append(
+ [
+ test_id,
+ "Failure",
+ details,
+ ]
+ )
+ print(f"TEST-UNEXPECTED-FAIL | {test_id} | {details}")
+ failure_count += 1
+ break
+ print(table)
+ return failure_count
+
+
+def load_results_file(filename):
+ ret = None
+ try:
+ f = open(filename, "r")
+ try:
+ ret = JUnitXml.fromfile(f)
+ except xml.etree.ElementTree.ParseError as e:
+ print(f"Error parsing {filename} file: {e}")
+ finally:
+ f.close()
+ except IOError as e:
+ print(e)
+
+ return ret
+
+
+def main():
+ args = parse_args(sys.argv[1:])
+
+ failure_count = 0
+ junitxml = load_results_file(args.results.joinpath("FullJUnitReport.xml"))
+ if junitxml:
+ failure_count = parse_print_failure_results(junitxml)
+ return failure_count
+
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test.py b/mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test.py
new file mode 100644
index 0000000000..de41a6c7e7
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/parse-ui-test.py
@@ -0,0 +1,89 @@
+#!/usr/bin/python3
+
+from __future__ import print_function
+
+import argparse
+import json
+import sys
+from pathlib import Path
+
+import yaml
+
+
+def parse_args(cmdln_args):
+ parser = argparse.ArgumentParser(description="Parse UI test logs an results")
+ parser.add_argument(
+ "--output-md",
+ type=argparse.FileType("w", encoding="utf-8"),
+ help="Output markdown file.",
+ required=True,
+ )
+ parser.add_argument(
+ "--log",
+ type=argparse.FileType("r", encoding="utf-8"),
+ help="Log output of flank.",
+ required=True,
+ )
+ parser.add_argument(
+ "--results", type=Path, help="Directory containing flank results", required=True
+ )
+ parser.add_argument(
+ "--exit-code", type=int, help="Exit code of flank.", required=True
+ )
+ parser.add_argument("--device-type", help="Type of device ", required=True)
+ parser.add_argument(
+ "--report-treeherder-failures",
+ help="Report failures in treeherder format.",
+ required=False,
+ action="store_true",
+ )
+ return parser.parse_args(args=cmdln_args)
+
+
+def extract_android_args(log):
+ return yaml.safe_load(log.split("AndroidArgs\n")[1].split("RunTests\n")[0])
+
+
+def main():
+ args = parse_args(sys.argv[1:])
+
+ log = args.log.read()
+ matrix_ids = json.loads(args.results.joinpath("matrix_ids.json").read_text())
+
+ android_args = extract_android_args(log)
+
+ print = args.output_md.write
+
+ print("# Devices\n")
+ print(yaml.safe_dump(android_args["gcloud"]["device"]))
+
+ print("# Results\n")
+ print("| Matrix | Result | Firebase Test Lab | Details\n")
+ print("| --- | --- | --- | --- |\n")
+ for matrix, matrix_result in matrix_ids.items():
+ for axis in matrix_result["axes"]:
+ print(
+ f"| {matrix_result['matrixId']} | {matrix_result['outcome']}"
+ f"| [Firebase Test Lab]({matrix_result['webLink']}) | {axis['details']}\n"
+ )
+ if (
+ args.report_treeherder_failures
+ and matrix_result["outcome"] != "success"
+ and matrix_result["outcome"] != "flaky"
+ ):
+ # write failures to test log in format known to treeherder logviewer
+ sys.stdout.write(
+ f"TEST-UNEXPECTED-FAIL | {matrix_result['outcome']} | {matrix_result['webLink']} | {axis['details']}\n"
+ )
+ print("---\n")
+ print("# References & Documentation\n")
+ print(
+ "* [Automated UI Testing Documentation](https://github.com/mozilla-mobile/shared-docs/blob/main/android/ui-testing.md)\n"
+ )
+ print(
+ "* Mobile Test Engineering on [Confluence](https://mozilla-hub.atlassian.net/wiki/spaces/MTE/overview) | [Slack](https://mozilla.slack.com/archives/C02KDDS9QM9) | [Alerts](https://mozilla.slack.com/archives/C0134KJ4JHL)\n"
+ )
+
+
+if __name__ == "__main__":
+ main()
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/robo-test.sh b/mobile/android/fenix/automation/taskcluster/androidTest/robo-test.sh
new file mode 100755
index 0000000000..c2b6ee76f9
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/robo-test.sh
@@ -0,0 +1,101 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script does the following:
+# 1. Retrieves gcloud service account token
+# 2. Activates gcloud service account
+# 3. Connects to Google's Firebase Test Lab (using Flank)
+# 4. Executes Robo test
+# 5. Puts any artifacts into the test_artifacts folder
+
+# If a command fails then do not proceed and fail this script too.
+set -e
+
+get_abs_filename() {
+ relative_filename="$1"
+ echo "$(cd "$(dirname "$relative_filename")" && pwd)/$(basename "$relative_filename")"
+}
+
+# Basic parameter check
+if [[ $# -lt 1 ]]; then
+ echo "Error: please provide a Flank configuration"
+ display_help
+ exit 1
+fi
+
+device_type="$1" # flank-arm-robo-test.yml | flank-x86-robo-test.yml
+APK_APP="$2"
+JAVA_BIN="/usr/bin/java"
+PATH_TEST="./automation/taskcluster/androidTest"
+FLANK_BIN="/builds/worker/test-tools/flank.jar"
+ARTIFACT_DIR="/builds/worker/artifacts"
+RESULTS_DIR="${ARTIFACT_DIR}/results"
+
+echo
+echo "ACTIVATE SERVICE ACCT"
+echo
+gcloud config set project "$GOOGLE_PROJECT"
+gcloud auth activate-service-account --key-file "$GOOGLE_APPLICATION_CREDENTIALS"
+echo
+echo
+echo
+
+set +e
+
+flank_template="${PATH_TEST}/flank-${device_type}.yml"
+if [ -f "$flank_template" ]; then
+ echo "Using Flank template: $flank_template"
+else
+ echo "Error: Flank template not found: $flank_template"
+ exit 1
+fi
+
+APK_APP="$(get_abs_filename $APK_APP)"
+
+function failure_check() {
+ echo
+ echo
+ if [[ $exitcode -ne 0 ]]; then
+ echo "FAILURE: Robo test run failed, please check above URL"
+ else
+ echo "Robo test was successful!"
+ fi
+
+ echo
+ echo "RESULTS"
+ echo
+
+ mkdir -p /builds/worker/artifacts/github
+ chmod +x $PATH_TEST/parse-ui-test.py
+ $PATH_TEST/parse-ui-test.py \
+ --exit-code "${exitcode}" \
+ --log flank.log \
+ --results "${RESULTS_DIR}" \
+ --output-md "${ARTIFACT_DIR}/github/customCheckRunText.md" \
+ --device-type "${device_type}"
+}
+
+echo
+echo "FLANK VERSION"
+echo
+$JAVA_BIN -jar $FLANK_BIN --version
+echo
+echo
+
+echo
+echo "EXECUTE ROBO TEST"
+echo
+set -o pipefail && $JAVA_BIN -jar $FLANK_BIN android run \
+ --config=$flank_template \
+ --app=$APK_APP \
+ --local-result-dir="${RESULTS_DIR}" \
+ --project=$GOOGLE_PROJECT \
+ --client-details=commit=${MOBILE_HEAD_REV:-None},pullRequest=${PULL_REQUEST_NUMBER:-None} \
+ | tee flank.log
+
+exitcode=$?
+failure_check
+
+exit $exitcode
diff --git a/mobile/android/fenix/automation/taskcluster/androidTest/ui-test.sh b/mobile/android/fenix/automation/taskcluster/androidTest/ui-test.sh
new file mode 100755
index 0000000000..3c5b29c172
--- /dev/null
+++ b/mobile/android/fenix/automation/taskcluster/androidTest/ui-test.sh
@@ -0,0 +1,150 @@
+#!/usr/bin/env bash
+# This Source Code Form is subject to the terms of the Mozilla Public
+# License, v. 2.0. If a copy of the MPL was not distributed with this
+# file, You can obtain one at http://mozilla.org/MPL/2.0/.
+
+# This script does the following:
+# 1. Retrieves gcloud service account token
+# 2. Activates gcloud service account
+# 3. Connects to google Firebase (using TestArmada's Flank tool)
+# 4. Executes UI tests
+# 5. Puts test artifacts into the test_artifacts folder
+
+# NOTE:
+# Flank supports sharding across multiple devices at a time, but gcloud API
+# only supports 1 defined APK per test run.
+
+
+# If a command fails then do not proceed and fail this script too.
+set -e
+
+#########################
+# The command line help #
+#########################
+display_help() {
+ echo "Usage: $0 Build_Variant [Number_Shards...]"
+ echo
+ echo "Examples:"
+ echo "To run UI tests on ARM device shard (1 test / shard)"
+ echo "$ ui-test.sh arm64-v8a -1"
+ echo
+ echo "To run UI tests on X86 device (on 3 shards)"
+ echo "$ ui-test.sh x86 3"
+ echo
+}
+
+get_abs_filename() {
+ relative_filename="$1"
+ echo "$(cd "$(dirname "$relative_filename")" && pwd)/$(basename "$relative_filename")"
+}
+
+
+# Basic parameter check
+if [[ $# -lt 1 ]]; then
+ echo "Error: please provide at least one build variant (arm|x86)"
+ display_help
+ exit 1
+fi
+
+device_type="$1" # arm64-v8a | armeabi-v7a | x86_64 | x86
+APK_APP="$2"
+APK_TEST="$3"
+if [[ ! -z "$4" ]]; then
+ num_shards=$4
+fi
+
+JAVA_BIN="/usr/bin/java"
+PATH_TEST="./automation/taskcluster/androidTest"
+FLANK_BIN="/builds/worker/test-tools/flank.jar"
+ARTIFACT_DIR="/builds/worker/artifacts"
+RESULTS_DIR="${ARTIFACT_DIR}/results"
+
+echo
+echo "ACTIVATE SERVICE ACCT"
+echo
+# this is where the Google Testcloud project ID is set
+gcloud config set project "$GOOGLE_PROJECT"
+echo
+
+gcloud auth activate-service-account --key-file "$GOOGLE_APPLICATION_CREDENTIALS"
+echo
+echo
+
+# Disable exiting on error. If the tests fail we want to continue
+# and try to download the artifacts. We will exit with the actual error code later.
+set +e
+
+if [[ "${device_type}" =~ ^(arm64-v8a|armeabi-v7a|x86_64|x86)$ ]]; then
+ flank_template="${PATH_TEST}/flank-${device_type}.yml"
+elif [[ "${device_type}" == "arm-start-test" ]]; then
+ flank_template="${PATH_TEST}/flank-arm-start-test.yml"
+elif [[ "${device_type}" == "arm-screenshots-tests" ]]; then
+ flank_template="${PATH_TEST}/flank-arm-screenshots-tests.yml"
+elif [[ "${device_type}" == "arm-beta-tests" ]]; then
+ flank_template="${PATH_TEST}/flank-arm-beta.yml"
+elif [[ "${device_type}" == "arm-legacy-api-tests" ]]; then
+ flank_template="${PATH_TEST}/flank-arm-legacy-api-tests.yml"
+else
+ echo "FAILURE: flank config file not found!"
+ exitcode=1
+fi
+
+APK_APP="$(get_abs_filename $APK_APP)"
+APK_TEST="$(get_abs_filename $APK_TEST)"
+echo "device_type: ${device_type}"
+echo "APK_APP: ${APK_APP}"
+echo "APK_TEST: ${APK_TEST}"
+
+# function to exit script with exit code from test run.
+# (Only 0 if all test executions passed)
+function failure_check() {
+ echo
+ echo
+ if [[ $exitcode -ne 0 ]]; then
+ echo "FAILURE: UI test run failed, please check above URL"
+ else
+ echo "All UI test(s) have passed!"
+ fi
+ echo
+ echo "RESULTS"
+ echo
+
+ mkdir -p /builds/worker/artifacts/github
+ chmod +x $PATH_TEST/parse-ui-test.py
+ $PATH_TEST/parse-ui-test.py \
+ --exit-code "${exitcode}" \
+ --log flank.log \
+ --results "${RESULTS_DIR}" \
+ --output-md "${ARTIFACT_DIR}/github/customCheckRunText.md" \
+ --device-type "${device_type}"
+
+ chmod +x $PATH_TEST/parse-ui-test-fromfile.py
+ $PATH_TEST/parse-ui-test-fromfile.py \
+ --results "${RESULTS_DIR}"
+}
+
+echo
+echo "FLANK VERSION"
+echo
+$JAVA_BIN -jar $FLANK_BIN --version
+echo
+echo
+
+echo
+echo "EXECUTE TEST(S)"
+echo
+# Note that if --local-results-dir is "results", timestamped sub-directory will
+# contain the results. For any other value, the directory itself will have the results.
+set -o pipefail && $JAVA_BIN -jar $FLANK_BIN android run \
+ --config=$flank_template \
+ --max-test-shards=$num_shards \
+ --app=$APK_APP --test=$APK_TEST \
+ --local-result-dir="${RESULTS_DIR}" \
+ --project=$GOOGLE_PROJECT \
+ --client-details=matrixLabel=${PULL_REQUEST_NUMBER:-None} \
+ | tee flank.log
+
+exitcode=$?
+failure_check
+
+exit $exitcode