Commit d64fc595 authored by Quan's avatar Quan

Initial commit

parents
Pipeline #840 failed with stages
#Thu Sep 23 09:21:22 ICT 2021
gradle.version=5.6.4
# Default ignored files
/shelf/
/workspace.xml
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="1.8" />
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="testRunner" value="GRADLE" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleJvm" value="corretto-1.8" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
<option value="$PROJECT_DIR$/app" />
</set>
</option>
<option name="resolveModulePerSourceSet" value="false" />
</GradleProjectSettings>
</option>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="BintrayJCenter" />
<option name="name" value="BintrayJCenter" />
<option name="url" value="https://jcenter.bintray.com/" />
</remote-repository>
<remote-repository>
<option name="id" value="$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/google/m2repository" />
<option name="name" value="$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/google/m2repository" />
<option name="url" value="file:$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/google/m2repository" />
</remote-repository>
<remote-repository>
<option name="id" value="$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/android/m2repository" />
<option name="name" value="$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/android/m2repository" />
<option name="url" value="file:$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/android/m2repository" />
</remote-repository>
<remote-repository>
<option name="id" value="$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/m2repository" />
<option name="name" value="$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/m2repository" />
<option name="url" value="file:$PROJECT_DIR$/../../../tuth/DATA/AIProject/securityandsafetythings_sdk/extras/m2repository" />
</remote-repository>
<remote-repository>
<option name="id" value="Google" />
<option name="name" value="Google" />
<option name="url" value="https://dl.google.com/dl/android/maven2/" />
</remote-repository>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ExternalStorageConfigurationManager" enabled="true" />
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="true" project-jdk-name="corretto-1.8" project-jdk-type="JavaSDK" />
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/java_mobile_multi_thread_sample.iml" filepath="$PROJECT_DIR$/.idea/java_mobile_multi_thread_sample.iml" />
</modules>
</component>
</project>
\ No newline at end of file
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="$PROJECT_DIR$" vcs="Git" />
</component>
</project>
\ No newline at end of file
/*
* Copyright (c) 2016-2017 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
apply plugin: 'com.android.application'
allprojects {
repositories {
jcenter()
flatDir {
dirs 'libs'
}
}
}
android {
compileSdkVersion 29
buildToolsVersion '27.0.0'
defaultConfig {
applicationId "viettel.iva.ai.multithreadsample"
minSdkVersion 26
targetSdkVersion 29
versionCode 1
versionName "1.0"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
}
}
packagingOptions {
pickFirst "META-INF/DEPENDENCIES"
}
}
ext {
versions = [
'javacv': '1.4.4',
'opencv': '4.0.1',
'ffmpeg': '4.1',
'flandmark': '1.07',
'openblas': '0.3.5',
'leptonica': '1.77.0'
]
}
dependencies {
compile(name: 'snpe-release', ext:'aar')
testCompile 'junit:junit:4.12'
implementation(group: 'org.bytedeco', name: 'javacv-platform', version: versions.javacv) {
exclude group: 'org.bytedeco.javacpp-presets'
}
implementation group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: "${versions.ffmpeg}-${versions.javacv}"
implementation group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: "${versions.ffmpeg}-${versions.javacv}", classifier: 'android-arm'
implementation group: 'org.bytedeco.javacpp-presets', name: 'ffmpeg', version: "${versions.ffmpeg}-${versions.javacv}", classifier: 'android-arm64'
implementation group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: "${versions.opencv}-${versions.javacv}"
implementation group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: "${versions.opencv}-${versions.javacv}", classifier: "android-arm"
implementation group: 'org.bytedeco.javacpp-presets', name: 'opencv', version: "${versions.opencv}-${versions.javacv}", classifier: "android-arm64"
implementation 'org.apache.zookeeper:zookeeper:3.5.5'
}
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* gradle plugin from the resource data it found. It
* should not be modified by hand.
*/
package com.qualcomm.qti.snpe;
public final class R {
private R() {}
public static final class string {
private string() {}
public static final int snpe_name = 0x7f070016;
}
}
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* aapt tool from the resource data it found. It
* should not be modified by hand.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
public final class R {
public static final class color {
public static final int colorAccent=0x7f010000;
public static final int colorDivider=0x7f010001;
public static final int colorPrimary=0x7f010002;
public static final int colorPrimaryDark=0x7f010003;
public static final int colorPrimaryLight=0x7f010004;
public static final int colorPrimaryText=0x7f010005;
public static final int colorSecondaryText=0x7f010006;
public static final int colorTextIcons=0x7f010007;
}
public static final class drawable {
public static final int button=0x7f020000;
public static final int classification_background=0x7f020001;
}
public static final class id {
public static final int main_content=0x7f030000;
public static final int model_build_button=0x7f030001;
public static final int model_builder_options_row=0x7f030002;
public static final int model_builder_output_option=0x7f030003;
public static final int model_builder_runtime_option=0x7f030004;
public static final int model_builder_runtime_spinner=0x7f030005;
public static final int model_builder_tensor_option=0x7f030006;
public static final int model_builder_tensor_spinner=0x7f030007;
public static final int model_builder_values_row=0x7f030008;
public static final int model_image=0x7f030009;
public static final int model_image_grid=0x7f03000a;
public static final int model_name=0x7f03000b;
public static final int model_overview_classification_text=0x7f03000c;
public static final int model_overview_dimensions_label=0x7f03000d;
public static final int model_overview_dimensions_text=0x7f03000e;
public static final int model_overview_labels_row=0x7f03000f;
public static final int model_overview_layers_spinner=0x7f030010;
public static final int model_overview_name_label=0x7f030011;
public static final int model_overview_name_text=0x7f030012;
public static final int model_overview_values_row=0x7f030013;
public static final int model_overview_version_label=0x7f030014;
public static final int model_overview_version_text=0x7f030015;
public static final int model_statistics_init_label=0x7f030016;
public static final int model_statistics_init_text=0x7f030017;
public static final int model_statistics_java_execute_label=0x7f030018;
public static final int model_statistics_java_execute_text=0x7f030019;
public static final int model_statistics_labels_row=0x7f03001a;
public static final int model_statistics_values_row=0x7f03001b;
public static final int models_list=0x7f03001c;
public static final int models_load_status=0x7f03001d;
public static final int unsignedPD_switch=0x7f03001e;
}
public static final class layout {
public static final int activity_main=0x7f040000;
public static final int fragment_model=0x7f040001;
public static final int model_image_layout=0x7f040002;
public static final int models_list=0x7f040003;
public static final int models_list_item=0x7f040004;
}
public static final class mipmap {
public static final int launcher=0x7f050000;
}
public static final class raw {
public static final int retina_480x850_quantize_v11=0x7f060000;
public static final int retina_mb_nosm_h288_w512_quantized=0x7f060001;
public static final int test_image=0x7f060002;
}
public static final class string {
public static final int app_name=0x7f070000;
public static final int build_network=0x7f070001;
public static final int builder_runtime=0x7f070002;
public static final int builder_tensor_format=0x7f070003;
public static final int classification_failed=0x7f070004;
public static final int classification_hint=0x7f070005;
public static final int loading_models=0x7f070006;
public static final int loading_network=0x7f070007;
public static final int model_dimensions=0x7f070008;
public static final int model_dimensions_placeholder=0x7f070009;
public static final int model_extraction_failed=0x7f07000a;
public static final int model_load_failed=0x7f07000b;
public static final int model_name=0x7f07000c;
public static final int model_name_placeholder=0x7f07000d;
public static final int model_not_loaded=0x7f07000e;
public static final int model_statistic_execute_placeholder=0x7f07000f;
public static final int model_statistic_init_placeholder=0x7f070010;
public static final int model_version=0x7f070011;
public static final int model_version_placeholder=0x7f070012;
public static final int models_header=0x7f070013;
public static final int not_available=0x7f070014;
public static final int output_layers=0x7f070015;
public static final int snpe_name=0x7f070016;
public static final int snpe_version=0x7f070017;
public static final int statistic_init=0x7f070018;
public static final int statistic_java_execute=0x7f070019;
public static final int statistic_native_execute=0x7f07001a;
}
public static final class style {
public static final int AppTheme=0x7f080000;
public static final int itemKey=0x7f080001;
public static final int itemValue=0x7f080002;
}
}
\ No newline at end of file
/**
* Automatically generated file. DO NOT MODIFY
*/
package com.qualcomm.qti.snpe.imageclassifiers;
public final class BuildConfig {
public static final boolean DEBUG = Boolean.parseBoolean("true");
public static final String APPLICATION_ID = "viettel.iva.ai.multithreadsample";
public static final String BUILD_TYPE = "debug";
public static final String FLAVOR = "";
public static final int VERSION_CODE = 1;
public static final String VERSION_NAME = "1.0";
}
[{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"}]
\ No newline at end of file
{
"logs": [
{
"outputFile": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/values_values.arsc.flat",
"map": [
{
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/styles.xml",
"from": {
"startLines": "6,12,17",
"startColumns": "4,4,4",
"startOffsets": "166,473,634",
"endLines": "10,15,20",
"endColumns": "12,12,12",
"endOffsets": "467,628,793"
},
"to": {
"startLines": "37,42,46",
"startColumns": "4,4,4",
"startOffsets": "2112,2418,2578",
"endLines": "41,45,49",
"endColumns": "12,12,12",
"endOffsets": "2413,2573,2737"
}
},
{
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/colors.xml",
"from": {
"startLines": "9,13,7,8,10,11,12,14",
"startColumns": "4,4,4,4,4,4,4,4",
"startOffsets": "303,505,205,252,349,401,452,552",
"endColumns": "45,46,46,50,51,50,52,48",
"endOffsets": "344,547,247,298,396,447,500,596"
},
"to": {
"startLines": "2,3,4,5,6,7,8,9",
"startColumns": "4,4,4,4,4,4,4,4",
"startOffsets": "55,101,148,195,246,298,349,402",
"endColumns": "45,46,46,50,51,50,52,48",
"endOffsets": "96,143,190,241,293,344,397,446"
}
},
{
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/strings.xml",
"from": {
"startLines": "6,30,22,23,18,31,7,16,12,13,9,14,10,11,17,28,27,19,20,8,29,15,21,24,25,26",
"startColumns": "4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4",
"startOffsets": "166,1618,1139,1191,898,1675,226,768,524,580,343,645,420,465,831,1502,1432,971,1021,287,1572,712,1079,1248,1300,1364",
"endColumns": "59,56,51,56,72,77,60,62,55,64,76,66,44,58,66,69,69,49,57,55,45,55,59,51,63,67",
"endOffsets": "221,1670,1186,1243,966,1748,282,826,575,640,415,707,460,519,893,1567,1497,1016,1074,338,1613,763,1134,1295,1359,1427"
},
"to": {
"startLines": "10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33,34,35,36",
"startColumns": "4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4",
"startOffsets": "451,511,568,620,677,750,828,889,952,1008,1073,1150,1217,1262,1321,1388,1458,1528,1578,1636,1692,1738,1868,1928,1980,2044",
"endColumns": "59,56,51,56,72,77,60,62,55,64,76,66,44,58,66,69,69,49,57,55,45,55,59,51,63,67",
"endOffsets": "506,563,615,672,745,823,884,947,1003,1068,1145,1212,1257,1316,1383,1453,1523,1573,1631,1687,1733,1789,1923,1975,2039,2107"
}
},
{
"source": "/home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/res/values/values.xml",
"from": {
"startLines": "2",
"startColumns": "4",
"startOffsets": "55",
"endColumns": "73",
"endOffsets": "124"
},
"to": {
"startLines": "32",
"startColumns": "4",
"startOffsets": "1794",
"endColumns": "73",
"endOffsets": "1863"
}
}
]
}
]
}
\ No newline at end of file
{
"logs": [
{
"outputFile": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/incremental/mergeDebugResources/merged.dir/values/values.xml",
"map": [
{
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/styles.xml",
"from": {
"startLines": "6,12,17",
"startColumns": "4,4,4",
"startOffsets": "166,473,634",
"endLines": "10,15,20",
"endColumns": "12,12,12",
"endOffsets": "467,628,793"
},
"to": {
"startLines": "37,42,46",
"startColumns": "4,4,4",
"startOffsets": "2112,2418,2578",
"endLines": "41,45,49",
"endColumns": "12,12,12",
"endOffsets": "2413,2573,2737"
}
},
{
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/colors.xml",
"from": {
"startLines": "9,13,7,8,10,11,12,14",
"startColumns": "4,4,4,4,4,4,4,4",
"startOffsets": "303,505,205,252,349,401,452,552",
"endColumns": "45,46,46,50,51,50,52,48",
"endOffsets": "344,547,247,298,396,447,500,596"
},
"to": {
"startLines": "2,3,4,5,6,7,8,9",
"startColumns": "4,4,4,4,4,4,4,4",
"startOffsets": "55,101,148,195,246,298,349,402",
"endColumns": "45,46,46,50,51,50,52,48",
"endOffsets": "96,143,190,241,293,344,397,446"
}
},
{
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/strings.xml",
"from": {
"startLines": "6,30,22,23,18,31,7,16,12,13,9,14,10,11,17,28,27,19,20,8,29,15,21,24,25,26",
"startColumns": "4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4",
"startOffsets": "166,1618,1139,1191,898,1675,226,768,524,580,343,645,420,465,831,1502,1432,971,1021,287,1572,712,1079,1248,1300,1364",
"endColumns": "59,56,51,56,72,77,60,62,55,64,76,66,44,58,66,69,69,49,57,55,45,55,59,51,63,67",
"endOffsets": "221,1670,1186,1243,966,1748,282,826,575,640,415,707,460,519,893,1567,1497,1016,1074,338,1613,763,1134,1295,1359,1427"
},
"to": {
"startLines": "10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,33,34,35,36",
"startColumns": "4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4",
"startOffsets": "451,511,568,620,677,750,828,889,952,1008,1073,1150,1217,1262,1321,1388,1458,1528,1578,1636,1692,1738,1868,1928,1980,2044",
"endColumns": "59,56,51,56,72,77,60,62,55,64,76,66,44,58,66,69,69,49,57,55,45,55,59,51,63,67",
"endOffsets": "506,563,615,672,745,823,884,947,1003,1068,1145,1212,1257,1316,1383,1453,1523,1573,1631,1687,1733,1789,1923,1975,2039,2107"
}
},
{
"source": "/home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/res/values/values.xml",
"from": {
"startLines": "2",
"startColumns": "4",
"startOffsets": "55",
"endColumns": "73",
"endOffsets": "124"
},
"to": {
"startLines": "32",
"startColumns": "4",
"startOffsets": "1794",
"endColumns": "73",
"endOffsets": "1863"
}
}
]
}
]
}
\ No newline at end of file
[
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/drawable_classification_background.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/drawable/classification_background.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_models_list_item.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/models_list_item.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/raw_test_image.jpeg.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/test_image.jpeg"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/drawable_button.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/drawable/button.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_fragment_model.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/fragment_model.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_model_image_layout.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/model_image_layout.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/raw_retina_480x850_quantize_v11.dlc.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/retina_480x850_quantize_v11.dlc"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_activity_main.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/activity_main.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/raw_retina_mb_nosm_h288_w512_quantized.dlc.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/retina_mb_nosm_h288_w512_quantized.dlc"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_models_list.xml.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/models_list.xml"
},
{
"merged": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/mipmap-xhdpi_launcher.png.flat",
"source": "/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/mipmap-xhdpi/launcher.png"
}
]
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="viettel.iva.ai.multithreadsample"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="26"
android:targetSdkVersion="29" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application
android:allowBackup="false"
android:debuggable="true"
android:icon="@mipmap/launcher"
android:label="@string/app_name"
android:largeHeap="true"
android:supportsRtl="true"
android:testOnly="true"
android:theme="@style/AppTheme" >
<activity android:name="com.qualcomm.qti.snpe.imageclassifiers.MainActivity" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name="com.qualcomm.qti.snpe.imageclassifiers.ModelExtractionService"
android:exported="false" >
</service>
</application>
</manifest>
\ No newline at end of file
[{"outputType":{"type":"BUNDLE_MANIFEST"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"},"path":"AndroidManifest.xml","properties":{"packageId":"viettel.iva.ai.multithreadsample","split":"","minSdkVersion":"26"}}]
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/assets"/><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/shader_assets/debug/compileDebugShaders/out"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/debug/assets"/></dataSet></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/jniLibs"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/debug/jniLibs"/></dataSet></merger>
\ No newline at end of file
#Thu Sep 23 09:21:35 ICT 2021
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/drawable/button.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/drawable_button.xml.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/models_list.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_models_list.xml.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/drawable/classification_background.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/drawable_classification_background.xml.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/models_list_item.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_models_list_item.xml.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/activity_main.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_activity_main.xml.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/retina_480x850_quantize_v11.dlc=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/raw_retina_480x850_quantize_v11.dlc.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/test_image.jpeg=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/raw_test_image.jpeg.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/retina_mb_nosm_h288_w512_quantized.dlc=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/raw_retina_mb_nosm_h288_w512_quantized.dlc.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/mipmap-xhdpi/launcher.png=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/mipmap-xhdpi_launcher.png.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/model_image_layout.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_model_image_layout.xml.flat
/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/fragment_model.xml=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/res/merged/debug/layout_fragment_model.xml.flat
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="colorAccent">#FF9800</color>
<color name="colorDivider">#BDBDBD</color>
<color name="colorPrimary">#03A9F4</color>
<color name="colorPrimaryDark">#0288D1</color>
<color name="colorPrimaryLight">#B3E5FC</color>
<color name="colorPrimaryText">#212121</color>
<color name="colorSecondaryText">#757575</color>
<color name="colorTextIcons">#FFFFFF</color>
<string name="app_name">SNPE Image Classifiers</string>
<string name="build_network">Build network!</string>
<string name="builder_runtime">Runtime</string>
<string name="builder_tensor_format">Tensor</string>
<string name="classification_failed">Classification failed.</string>
<string name="classification_hint">Click on an image to classify</string>
<string name="loading_models">Loading models...</string>
<string name="loading_network">Loading Network...</string>
<string name="model_dimensions">Dimensions</string>
<string name="model_dimensions_placeholder">[X,Y,Z]</string>
<string name="model_extraction_failed">Failed to extract model.</string>
<string name="model_load_failed">Failed to load model</string>
<string name="model_name">Model</string>
<string name="model_name_placeholder">Model A</string>
<string name="model_not_loaded">Model not yet loaded!</string>
<string name="model_statistic_execute_placeholder">20 ms</string>
<string name="model_statistic_init_placeholder">12345 ms</string>
<string name="model_version">Version</string>
<string name="model_version_placeholder">N/A</string>
<string name="models_header"><u>Models</u></string>
<string name="not_available">N/A</string>
<string name="output_layers">Output Layers</string>
<string name="snpe_name">Snapdragon Neural Processing Engine</string>
<string name="snpe_version">SNPE version: %1$s</string>
<string name="statistic_init">Net Init</string>
<string name="statistic_java_execute">Java Execute</string>
<string name="statistic_native_execute">Native Execute</string>
<style name="AppTheme" parent="android:Theme.Material.Light.DarkActionBar">
<item name="android:colorPrimary">@color/colorPrimary</item>
<item name="android:colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="android:colorAccent">@color/colorAccent</item>
</style>
<style name="itemKey">
<item name="android:textColor">@color/colorPrimaryText</item>
<item name="android:textSize">18sp</item>
</style>
<style name="itemValue">
<item name="android:textColor">@color/colorSecondaryText</item>
<item name="android:textSize">14sp</item>
</style>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config=":snpe-release:$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/res"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config=":snpe-release:" from-dependency="true" generated-set=":snpe-release:$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/res"><file path="/home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/res/values/values.xml" qualifiers=""><string name="snpe_name">Snapdragon Neural Processing Engine</string></file></source></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="main$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res"/><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/generated/res/rs/debug"/><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/generated/res/resValues/debug"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="main" generated-set="main$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res"><file name="button" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/drawable/button.xml" qualifiers="" type="drawable"/><file name="classification_background" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/drawable/classification_background.xml" qualifiers="" type="drawable"/><file name="activity_main" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/activity_main.xml" qualifiers="" type="layout"/><file name="fragment_model" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/fragment_model.xml" qualifiers="" type="layout"/><file name="models_list" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/models_list.xml" qualifiers="" type="layout"/><file name="models_list_item" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/models_list_item.xml" qualifiers="" type="layout"/><file name="model_image_layout" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/layout/model_image_layout.xml" qualifiers="" type="layout"/><file name="launcher" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/mipmap-xhdpi/launcher.png" qualifiers="xhdpi-v4" type="mipmap"/><file name="retina_480x850_quantize_v11" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/retina_480x850_quantize_v11.dlc" qualifiers="" type="raw"/><file name="retina_mb_nosm_h288_w512_quantized" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/retina_mb_nosm_h288_w512_quantized.dlc" qualifiers="" type="raw"/><file name="test_image" path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/raw/test_image.jpeg" qualifiers="" type="raw"/><file path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/colors.xml" qualifiers=""><color name="colorPrimary">#03A9F4</color><color name="colorPrimaryDark">#0288D1</color><color name="colorAccent">#FF9800</color><color name="colorPrimaryLight">#B3E5FC</color><color name="colorPrimaryText">#212121</color><color name="colorSecondaryText">#757575</color><color name="colorDivider">#BDBDBD</color><color name="colorTextIcons">#FFFFFF</color></file><file path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/strings.xml" qualifiers=""><string name="app_name">SNPE Image Classifiers</string><string name="loading_models">Loading models...</string><string name="models_header"><u>Models</u></string><string name="model_extraction_failed">Failed to extract model.</string><string name="model_name">Model</string><string name="model_name_placeholder">Model A</string><string name="model_dimensions">Dimensions</string><string name="model_dimensions_placeholder">[X,Y,Z]</string><string name="model_load_failed">Failed to load model</string><string name="output_layers">Output Layers</string><string name="loading_network">Loading Network...</string><string name="model_not_loaded">Model not yet loaded!</string><string name="classification_failed">Classification failed.</string><string name="model_version">Version</string><string name="model_version_placeholder">N/A</string><string name="snpe_version">SNPE version: %1$s</string><string name="builder_runtime">Runtime</string><string name="builder_tensor_format">Tensor</string><string name="statistic_init">Net Init</string><string name="statistic_java_execute">Java Execute</string><string name="statistic_native_execute">Native Execute</string><string name="model_statistic_init_placeholder">12345 ms</string><string name="model_statistic_execute_placeholder">20 ms</string><string name="not_available">N/A</string><string name="build_network">Build network!</string><string name="classification_hint">Click on an image to classify</string></file><file path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/res/values/styles.xml" qualifiers=""><style name="AppTheme" parent="android:Theme.Material.Light.DarkActionBar">
<item name="android:colorPrimary">@color/colorPrimary</item>
<item name="android:colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="android:colorAccent">@color/colorAccent</item>
</style><style name="itemKey">
<item name="android:textColor">@color/colorPrimaryText</item>
<item name="android:textSize">18sp</item>
</style><style name="itemValue">
<item name="android:textColor">@color/colorSecondaryText</item>
<item name="android:textSize">14sp</item>
</style></file></source><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/generated/res/rs/debug"/><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/generated/res/resValues/debug"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="debug$Generated" generated="true" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/debug/res"/></dataSet><dataSet aapt-namespace="http://schemas.android.com/apk/res-auto" config="debug" generated-set="debug$Generated" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/debug/res"/></dataSet><mergedItems/></merger>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<merger version="3"><dataSet config="main" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/shaders"/></dataSet><dataSet config="debug" ignore_pattern="!.svn:!.git:!.ds_store:!*.scc:.*:&lt;dir>_*:!CVS:!thumbs.db:!picasa.ini:!*~"><source path="/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/debug/shaders"/></dataSet></merger>
\ No newline at end of file
#Tue Sep 28 12:30:32 ICT 2021
path.2=classes2.dex
path.1=classes.dex
path.0=classes.dex
renamed.2=classes3.dex
renamed.1=classes2.dex
renamed.0=classes.dex
base.2=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/dex/debug/mergeExtDexDebug/out/classes2.dex
base.1=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/dex/debug/mergeProjectDexDebug/out/classes.dex
base.0=/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/dex/debug/mergeExtDexDebug/out/classes.dex
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="viettel.iva.ai.multithreadsample"
android:targetSandboxVersion="2"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="26"
android:targetSdkVersion="29" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application
android:allowBackup="false"
android:debuggable="true"
android:icon="@mipmap/launcher"
android:label="@string/app_name"
android:largeHeap="true"
android:supportsRtl="true"
android:testOnly="true"
android:theme="@style/AppTheme" >
<activity android:name="com.qualcomm.qti.snpe.imageclassifiers.MainActivity" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name="com.qualcomm.qti.snpe.imageclassifiers.ModelExtractionService"
android:exported="false" >
</service>
</application>
</manifest>
\ No newline at end of file
[{"outputType":{"type":"INSTANT_APP_MANIFEST"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"},"path":"AndroidManifest.xml","properties":{"packageId":"viettel.iva.ai.multithreadsample","split":"","minSdkVersion":"26"}}]
\ No newline at end of file
1<?xml version="1.0" encoding="utf-8"?>
2<!--
3 * Copyright (c) 2016 Qualcomm Technologies, Inc.
4 * All Rights Reserved.
5 * Confidential and Proprietary - Qualcomm Technologies, Inc.
6-->
7<manifest xmlns:android="http://schemas.android.com/apk/res/android"
8 package="viettel.iva.ai.multithreadsample"
9 android:versionCode="1"
10 android:versionName="1.0" >
11
12 <uses-sdk
13 android:minSdkVersion="26"
13-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
14 android:targetSdkVersion="29" />
14-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
15
16 <uses-permission android:name="android.permission.INTERNET" />
16-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:9:5-67
16-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:9:22-64
17 <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
17-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:10:5-79
17-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:10:22-76
18
19 <application
19-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:11:5-30:19
20 android:allowBackup="false"
20-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:12:9-36
21 android:debuggable="true"
22 android:icon="@mipmap/launcher"
22-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:13:9-40
23 android:label="@string/app_name"
23-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:14:9-41
24 android:largeHeap="true"
24-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:17:9-33
25 android:supportsRtl="true"
25-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:15:9-35
26 android:testOnly="true"
27 android:theme="@style/AppTheme" >
27-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:16:9-40
28 <activity android:name="com.qualcomm.qti.snpe.imageclassifiers.MainActivity" >
28-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:18:9-24:20
28-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:18:19-47
29 <intent-filter>
29-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:19:13-23:29
30 <action android:name="android.intent.action.MAIN" />
30-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:20:17-68
30-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:20:25-66
31
32 <category android:name="android.intent.category.LAUNCHER" />
32-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:22:17-76
32-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:22:27-74
33 </intent-filter>
34 </activity>
35
36 <service
36-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:26:9-29:19
37 android:name="com.qualcomm.qti.snpe.imageclassifiers.ModelExtractionService"
37-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:27:13-51
38 android:exported="false" >
38-->/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:28:13-37
39 </service>
40 </application>
41
42</manifest>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="viettel.iva.ai.multithreadsample"
android:versionCode="1"
android:versionName="1.0" >
<uses-sdk
android:minSdkVersion="26"
android:targetSdkVersion="29" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application
android:allowBackup="false"
android:debuggable="true"
android:icon="@mipmap/launcher"
android:label="@string/app_name"
android:largeHeap="true"
android:supportsRtl="true"
android:testOnly="true"
android:theme="@style/AppTheme" >
<activity android:name="com.qualcomm.qti.snpe.imageclassifiers.MainActivity" >
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<service
android:name="com.qualcomm.qti.snpe.imageclassifiers.ModelExtractionService"
android:exported="false" >
</service>
</application>
</manifest>
\ No newline at end of file
[{"outputType":{"type":"MERGED_MANIFESTS"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"},"path":"AndroidManifest.xml","properties":{"packageId":"viettel.iva.ai.multithreadsample","split":"","minSdkVersion":"26"}}]
\ No newline at end of file
[{"outputType":{"type":"METADATA_FEATURE_MANIFEST"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"},"path":"AndroidManifest.xml","properties":{}}]
\ No newline at end of file
[{"outputType":{"type":"PROCESSED_RES"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"},"path":"resources-debug.ap_","properties":{"packageId":"viettel.iva.ai.multithreadsample","split":"","minSdkVersion":"26"}}]
\ No newline at end of file
viettel.iva.ai.multithreadsample
color colorAccent
color colorDivider
color colorPrimary
color colorPrimaryDark
color colorPrimaryLight
color colorPrimaryText
color colorSecondaryText
color colorTextIcons
drawable button
drawable classification_background
id main_content
id model_build_button
id model_builder_options_row
id model_builder_output_option
id model_builder_runtime_option
id model_builder_runtime_spinner
id model_builder_tensor_option
id model_builder_tensor_spinner
id model_builder_values_row
id model_image
id model_image_grid
id model_name
id model_overview_classification_text
id model_overview_dimensions_label
id model_overview_dimensions_text
id model_overview_labels_row
id model_overview_layers_spinner
id model_overview_name_label
id model_overview_name_text
id model_overview_values_row
id model_overview_version_label
id model_overview_version_text
id model_statistics_init_label
id model_statistics_init_text
id model_statistics_java_execute_label
id model_statistics_java_execute_text
id model_statistics_labels_row
id model_statistics_values_row
id models_list
id models_load_status
id unsignedPD_switch
layout activity_main
layout fragment_model
layout model_image_layout
layout models_list
layout models_list_item
mipmap launcher
raw retina_480x850_quantize_v11
raw retina_mb_nosm_h288_w512_quantized
raw test_image
string app_name
string build_network
string builder_runtime
string builder_tensor_format
string classification_failed
string classification_hint
string loading_models
string loading_network
string model_dimensions
string model_dimensions_placeholder
string model_extraction_failed
string model_load_failed
string model_name
string model_name_placeholder
string model_not_loaded
string model_statistic_execute_placeholder
string model_statistic_init_placeholder
string model_version
string model_version_placeholder
string models_header
string not_available
string output_layers
string snpe_name
string snpe_version
string statistic_init
string statistic_java_execute
string statistic_native_execute
style AppTheme
style itemKey
style itemValue
{"mName":"debug","mStoreFile":"/home/quannm/.android/debug.keystore","mStorePassword":"android","mKeyAlias":"AndroidDebugKey","mKeyPassword":"android","mStoreType":"jks","mV1SigningEnabled":true,"mV2SigningEnabled":true}
\ No newline at end of file
int color colorAccent 0x7f010000
int color colorDivider 0x7f010001
int color colorPrimary 0x7f010002
int color colorPrimaryDark 0x7f010003
int color colorPrimaryLight 0x7f010004
int color colorPrimaryText 0x7f010005
int color colorSecondaryText 0x7f010006
int color colorTextIcons 0x7f010007
int drawable button 0x7f020000
int drawable classification_background 0x7f020001
int id main_content 0x7f030000
int id model_build_button 0x7f030001
int id model_builder_options_row 0x7f030002
int id model_builder_output_option 0x7f030003
int id model_builder_runtime_option 0x7f030004
int id model_builder_runtime_spinner 0x7f030005
int id model_builder_tensor_option 0x7f030006
int id model_builder_tensor_spinner 0x7f030007
int id model_builder_values_row 0x7f030008
int id model_image 0x7f030009
int id model_image_grid 0x7f03000a
int id model_name 0x7f03000b
int id model_overview_classification_text 0x7f03000c
int id model_overview_dimensions_label 0x7f03000d
int id model_overview_dimensions_text 0x7f03000e
int id model_overview_labels_row 0x7f03000f
int id model_overview_layers_spinner 0x7f030010
int id model_overview_name_label 0x7f030011
int id model_overview_name_text 0x7f030012
int id model_overview_values_row 0x7f030013
int id model_overview_version_label 0x7f030014
int id model_overview_version_text 0x7f030015
int id model_statistics_init_label 0x7f030016
int id model_statistics_init_text 0x7f030017
int id model_statistics_java_execute_label 0x7f030018
int id model_statistics_java_execute_text 0x7f030019
int id model_statistics_labels_row 0x7f03001a
int id model_statistics_values_row 0x7f03001b
int id models_list 0x7f03001c
int id models_load_status 0x7f03001d
int id unsignedPD_switch 0x7f03001e
int layout activity_main 0x7f040000
int layout fragment_model 0x7f040001
int layout model_image_layout 0x7f040002
int layout models_list 0x7f040003
int layout models_list_item 0x7f040004
int mipmap launcher 0x7f050000
int raw retina_480x850_quantize_v11 0x7f060000
int raw retina_mb_nosm_h288_w512_quantized 0x7f060001
int raw test_image 0x7f060002
int string app_name 0x7f070000
int string build_network 0x7f070001
int string builder_runtime 0x7f070002
int string builder_tensor_format 0x7f070003
int string classification_failed 0x7f070004
int string classification_hint 0x7f070005
int string loading_models 0x7f070006
int string loading_network 0x7f070007
int string model_dimensions 0x7f070008
int string model_dimensions_placeholder 0x7f070009
int string model_extraction_failed 0x7f07000a
int string model_load_failed 0x7f07000b
int string model_name 0x7f07000c
int string model_name_placeholder 0x7f07000d
int string model_not_loaded 0x7f07000e
int string model_statistic_execute_placeholder 0x7f07000f
int string model_statistic_init_placeholder 0x7f070010
int string model_version 0x7f070011
int string model_version_placeholder 0x7f070012
int string models_header 0x7f070013
int string not_available 0x7f070014
int string output_layers 0x7f070015
int string snpe_name 0x7f070016
int string snpe_version 0x7f070017
int string statistic_init 0x7f070018
int string statistic_java_execute 0x7f070019
int string statistic_native_execute 0x7f07001a
int style AppTheme 0x7f080000
int style itemKey 0x7f080001
int style itemValue 0x7f080002
[{"name":"/media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/build/intermediates/javac/debug/classes","index":0,"scopes":["PROJECT"],"types":["DEX_ARCHIVE"],"format":"DIRECTORY","present":true}]
\ No newline at end of file
[{"outputType":{"type":"APK"},"apkData":{"type":"MAIN","splits":[],"versionCode":1,"versionName":"1.0","enabled":true,"outputFile":"app-debug.apk","fullName":"debug","baseName":"debug"},"path":"app-debug.apk","properties":{}}]
\ No newline at end of file
-- Merging decision tree log ---
manifest
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
MERGED from [:snpe-release:] /home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/AndroidManifest.xml:7:1-16:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
package
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:8:11-59
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
android:versionName
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
android:versionCode
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:1-32:12
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
xmlns:android
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:7:11-69
uses-permission#android.permission.INTERNET
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:9:5-67
android:name
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:9:22-64
uses-permission#android.permission.ACCESS_NETWORK_STATE
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:10:5-79
android:name
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:10:22-76
application
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:11:5-30:19
android:supportsRtl
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:15:9-35
android:label
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:14:9-41
android:largeHeap
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:17:9-33
android:icon
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:13:9-40
android:allowBackup
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:12:9-36
android:theme
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:16:9-40
activity#com.qualcomm.qti.snpe.imageclassifiers.MainActivity
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:18:9-24:20
android:name
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:18:19-47
intent-filter#action:name:android.intent.action.MAIN+category:name:android.intent.category.LAUNCHER
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:19:13-23:29
action#android.intent.action.MAIN
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:20:17-68
android:name
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:20:25-66
category#android.intent.category.LAUNCHER
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:22:17-76
android:name
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:22:27-74
service#com.qualcomm.qti.snpe.imageclassifiers.ModelExtractionService
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:26:9-29:19
android:exported
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:28:13-37
android:name
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml:27:13-51
uses-sdk
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml reason: use-sdk injection requested
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
MERGED from [:snpe-release:] /home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/AndroidManifest.xml:12:5-14:41
MERGED from [:snpe-release:] /home/quannm/.gradle/caches/transforms-2/files-2.1/c4edee0919b865fb9f6f59f5038fb286/snpe-release/AndroidManifest.xml:12:5-14:41
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
android:targetSdkVersion
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
android:minSdkVersion
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
ADDED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
INJECTED from /media/HDD/snpe2/java-mobile-multithread-sample-ai_mobile_base/app/src/main/AndroidManifest.xml
The SNPE SDK aar must be dropped in this file's parent folder.
\ No newline at end of file
#
# Copyright (c) 2016-2017 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.qualcomm.qti.snpe.imageclassifiers">
<uses-permission android:name="android.permission.INTERNET" />
<uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />
<application
android:allowBackup="false"
android:icon="@mipmap/launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/AppTheme"
android:largeHeap="true">
<activity android:name=".MainActivity">
<intent-filter>
<action android:name="android.intent.action.MAIN"/>
<category android:name="android.intent.category.LAUNCHER"/>
</intent-filter>
</activity>
<service
android:name=".ModelExtractionService"
android:exported="false">
</service>
</application>
</manifest>
/*
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
public abstract class AbstractViewController<V> {
private V mView;
public void attach(V view) {
if (mView != null) {
throw new IllegalStateException("A view is already attached.");
}
mView = view;
onViewAttached(view);
}
protected abstract void onViewAttached(V view);
public void detach(V view) {
if (mView != view) {
throw new IllegalStateException("Another view is already attached.");
}
onViewDetached(view);
mView = null;
}
protected abstract void onViewDetached(V view);
protected boolean isAttached() {
return mView != null;
}
protected V getView() {
return mView;
}
}
/*
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.app.Activity;
import android.app.FragmentTransaction;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.os.Bundle;
import android.os.Environment;
import android.util.Log;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Bbox;
import com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Slicing;
import com.qualcomm.qti.snpe.imageclassifiers.thread.AISliceMergingThread;
import com.qualcomm.qti.snpe.imageclassifiers.thread.AiSlicingThread;
import com.qualcomm.qti.snpe.imageclassifiers.thread.ImageSlice;
import com.qualcomm.qti.snpe.imageclassifiers.thread.SliceCropBox;
import org.apache.zookeeper.WatchedEvent;
import org.apache.zookeeper.Watcher;
import org.apache.zookeeper.ZooKeeper;
import org.bytedeco.javacpp.Loader;
import org.bytedeco.javacpp.opencv_java;
import org.opencv.osgi.OpenCVNativeLoader;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CountDownLatch;
import static com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector.IMG_HEIGHT;
import static com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector.IMG_WIDTH;
public class MainActivity extends Activity {
private static final String LOGTAG = MainActivity.class.getSimpleName();
private static final int MAX_THREAD = 7;
int noRows = 2;
int noCols = 3;
double rowsOverlap = 0.15;
double colsOverlap = 0.15;
int startInd = 0;
private Slicing Slice;
private Bitmap loadBmpImage(int Input){
BitmapFactory.Options o = new BitmapFactory.Options();
o.inScaled = false;
Bitmap testBmp = BitmapFactory.decodeResource(getResources(),Input,o);
return testBmp;
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
Loader.load(opencv_java.class);
List<AiSlicingThread> listAiSlicingThread = new ArrayList<>();
AISliceMergingThread aiSliceMergingThread = new AISliceMergingThread(this);
aiSliceMergingThread.start();
for (int i=0 ;i< MAX_THREAD; ++i){
AiSlicingThread aiSlicingThread = new AiSlicingThread(this.getApplication(), this, aiSliceMergingThread);
listAiSlicingThread.add(i,aiSlicingThread);
listAiSlicingThread.get(i).setName("aithread" + i);
listAiSlicingThread.get(i).start();
}
int frameId= 0;
Bitmap originalBitmap = loadBmpImage(R.raw.test_image);
List<SliceCropBox> listSliceCrop = new ArrayList<>();
listSliceCrop.add(new SliceCropBox(0,0,1280,718));
listSliceCrop.add(new SliceCropBox(0,0,458,385));
listSliceCrop.add(new SliceCropBox(396,0,488,385));
listSliceCrop.add(new SliceCropBox(822,0,457,385));
listSliceCrop.add(new SliceCropBox(0,333,458,385));
listSliceCrop.add(new SliceCropBox(396,333,488,385));
listSliceCrop.add(new SliceCropBox(822,333,457,385));
int currentIndexThread = 0;
int i=0;
while (i<100){
try {
Thread.sleep(30);
} catch (InterruptedException e) {
e.printStackTrace();
}
i++;
for (SliceCropBox sliceCropBox : listSliceCrop) {
if (currentIndexThread > listAiSlicingThread.size() - 1){
currentIndexThread = 0;
}
listAiSlicingThread.get(currentIndexThread).addSlicedImage(new ImageSlice(frameId, originalBitmap, sliceCropBox));
currentIndexThread++;
}
frameId++;
}
}
@Override
protected void onDestroy() {
super.onDestroy();
}
}
/*
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.net.Uri;
import android.os.Parcel;
import android.os.Parcelable;
import java.io.File;
public class Model implements Parcelable {
public static final Uri MODELS_URI = Uri.parse("content://snpe/models");
public static final String INVALID_ID = "null";
public File file;
public File udoArmDir;
public File udoConfig;
public String[] labels;
public File[] rawImages;
public File[] jpgImages;
public String name;
public File meanImage;
protected Model(Parcel in) {
name = in.readString();
file = new File(in.readString());
final String[] rawPaths = new String[in.readInt()];
in.readStringArray(rawPaths);
rawImages = fromPaths(rawPaths);
final String[] jpgPaths = new String[in.readInt()];
in.readStringArray(jpgPaths);
jpgImages = fromPaths(jpgPaths);
meanImage = new File(in.readString());
labels = new String[in.readInt()];
in.readStringArray(labels);
}
public Model() {}
@Override
public void writeToParcel(Parcel dest, int flags) {
dest.writeString(name);
dest.writeString(file.getAbsolutePath());
dest.writeInt(rawImages.length);
dest.writeStringArray(toPaths(rawImages));
dest.writeInt(jpgImages.length);
dest.writeStringArray(toPaths(jpgImages));
dest.writeString(meanImage.getAbsolutePath());
dest.writeInt(labels.length);
dest.writeStringArray(labels);
}
private File[] fromPaths(String[] paths) {
final File[] files = new File[paths.length];
for (int i = 0; i < paths.length; i++) {
files[i] = new File(paths[i]);
}
return files;
}
private String[] toPaths(File[] files) {
final String[] paths = new String[files.length];
for (int i = 0; i < files.length; i++) {
paths[i] = files[i].getAbsolutePath();
}
return paths;
}
@Override
public int describeContents() {
return 0;
}
public static final Creator<Model> CREATOR = new Creator<Model>() {
@Override
public Model createFromParcel(Parcel in) {
return new Model(in);
}
@Override
public Model[] newArray(int size) {
return new Model[size];
}
};
@Override
public String toString() {
return name.toUpperCase();
}
}
/*
* Copyright (c) 2016, 2017 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.app.Application;
import android.app.Fragment;
import android.content.Context;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.ListView;
import android.widget.TextView;
import android.widget.Switch;
import android.widget.CompoundButton;
import com.qualcomm.qti.snpe.SNPE;
import java.util.Set;
public class ModelCatalogueFragment extends Fragment {
private ModelCatalogueFragmentController mController;
private ListView mModelsList;
private TextView mLoadStatusText;
private Switch mSwitch;
private static boolean mSwitchState=false;
public static ModelCatalogueFragment create() {
return new ModelCatalogueFragment();
}
private ModelsAdapter mModelsAdapter;
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.models_list, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
mModelsList = (ListView) view.findViewById(R.id.models_list);
mLoadStatusText = (TextView) view.findViewById(R.id.models_load_status);
mSwitch = (Switch) view.findViewById(R.id.unsignedPD_switch);
mSwitch.setChecked(mSwitchState);
mSwitch.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener(){
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked){
mSwitchState = isChecked;
}
});
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
mController = new ModelCatalogueFragmentController(getActivity());
mModelsAdapter = new ModelsAdapter(getActivity());
mModelsList.setAdapter(mModelsAdapter);
// mModelsList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
// @Override
// public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
// MainActivity.class.cast(getActivity()).displayModelOverview(
// mModelsAdapter.getItem(position), mSwitch.isChecked());
// }
// });
getActivity().setTitle(getString(R.string.snpe_version,
SNPE.getRuntimeVersion((Application) getActivity().getApplicationContext())));
}
@Override
public void onStart() {
super.onStart();
mController.attach(this);
}
@Override
public void onStop() {
mController.detach(this);
super.onStop();
}
public void setExtractingModelMessageVisible(final boolean isVisible) {
mLoadStatusText.setText(getString(R.string.loading_models));
mLoadStatusText.setVisibility(isVisible ? View.VISIBLE : View.GONE);
}
public void displayModels(Set<Model> models) {
setExtractingModelMessageVisible(models.isEmpty());
mModelsAdapter.clear();
mModelsAdapter.addAll(models);
mModelsAdapter.notifyDataSetChanged();
}
public void showExtractionFailedMessage() {
mLoadStatusText.setText(R.string.model_extraction_failed);
mLoadStatusText.setVisibility(View.VISIBLE);
}
private static final class ModelsAdapter extends ArrayAdapter<Model> {
public ModelsAdapter(Context context) {
super(context, R.layout.models_list_item, R.id.model_name);
}
}
}
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.content.ContentResolver;
import android.content.Context;
import android.database.ContentObserver;
import android.net.Uri;
import android.os.AsyncTask;
import android.os.Handler;
import com.qualcomm.qti.snpe.imageclassifiers.tasks.LoadModelsTask;
import java.util.HashSet;
import java.util.Iterator;
import java.util.Set;
public class ModelCatalogueFragmentController extends
AbstractViewController<ModelCatalogueFragment> {
private static final Set<String> mSupportedModels = new HashSet<String>() {{
add("alexnet");
add("inception_v3");
add("googlenet");
}};
private final Context mContext;
ModelCatalogueFragmentController(Context context) {
mContext = context;
}
@Override
protected void onViewAttached(final ModelCatalogueFragment view) {
view.setExtractingModelMessageVisible(true);
final ContentResolver contentResolver = mContext.getContentResolver();
contentResolver.registerContentObserver(Uri.withAppendedPath(
Model.MODELS_URI, Model.INVALID_ID), false, mModelExtractionFailedObserver);
contentResolver.registerContentObserver(Model.MODELS_URI, true, mModelExtractionObserver);
startModelsExtraction();
loadModels();
}
private void startModelsExtraction() {
for (Iterator<String> it = mSupportedModels.iterator(); it.hasNext();) {
String modelName = it.next();
int resId = getRawResourceId(modelName);
if (resId == 0) {
it.remove();
} else {
ModelExtractionService.extractModel(mContext, modelName, resId);
}
}
}
@Override
protected void onViewDetached(final ModelCatalogueFragment view) {
final ContentResolver contentResolver = mContext.getContentResolver();
contentResolver.unregisterContentObserver(mModelExtractionObserver);
contentResolver.unregisterContentObserver(mModelExtractionFailedObserver);
}
private final ContentObserver mModelExtractionObserver =
new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
super.onChange(selfChange);
if (isAttached()) {
loadModels();
}
}
};
private final ContentObserver mModelExtractionFailedObserver =
new ContentObserver(new Handler()) {
@Override
public void onChange(boolean selfChange) {
if (isAttached()) {
getView().showExtractionFailedMessage();
}
}
};
private void loadModels() {
final LoadModelsTask task = new LoadModelsTask(mContext, this);
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
public void onModelsLoaded(final Set<Model> models) {
if (isAttached()) {
getView().displayModels(models);
}
}
public Set<String> getAvailableModels() {
return mSupportedModels;
}
private int getRawResourceId(String rawName) {
return mContext.getResources().getIdentifier(rawName, "raw", mContext.getPackageName());
}
}
/*
* Copyright (c) 2016-2021 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.app.IntentService;
import android.content.Intent;
import android.content.Context;
import android.net.Uri;
import android.util.Log;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileInputStream;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.IOException;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
public class ModelExtractionService extends IntentService {
private static final String LOG_TAG = ModelExtractionService.class.getSimpleName();
private static final String ACTION_EXTRACT = "extract";
private static final String EXTRA_MODEL_RAW_RES_ID = "model_raw_res";
private static final String EXTRA_MODEL_NAME = "model_name";
public static final String MODELS_ROOT_DIR = "models";
private static final int CHUNK_SIZE = 1024;
private static Context mContext;
public ModelExtractionService() {
super("ModelExtractionService");
}
public static void extractModel(final Context context, final String modelName,
final int modelRawResId) {
mContext = context;
Intent intent = new Intent(context, ModelExtractionService.class);
intent.setAction(ACTION_EXTRACT);
intent.putExtra(EXTRA_MODEL_NAME, modelName);
intent.putExtra(EXTRA_MODEL_RAW_RES_ID, modelRawResId);
context.startService(intent);
}
@Override
protected void onHandleIntent(Intent intent) {
if (intent != null) {
final String action = intent.getAction();
if (ACTION_EXTRACT.equals(action)) {
final int modelRawResId = intent.getIntExtra(EXTRA_MODEL_RAW_RES_ID, 0);
final String modelName = intent.getStringExtra(EXTRA_MODEL_NAME);
handleModelExtraction(modelName, modelRawResId);
}
}
}
private void extractUdoConfig(String modelDirName, String udoFolderName){
final File udoConfigDir = new File(modelDirName + udoFolderName);
if(udoConfigDir.exists()){
try{
File modelDir = new File(modelDirName);
for (final File file : udoConfigDir.listFiles()) {
InputStream is = null;
OutputStream os = null;
String destName = mContext.getFilesDir().getPath();
destName = destName + "/" + modelDir.getName() + "/udo/";
File destDir = new File(destName);
destDir.mkdirs();
File destination = new File(destName + file.getName());
try {
byte[] buffer = new byte[1024];
int length;
is = new FileInputStream(file);
os = new FileOutputStream(destination);
while ((length = is.read(buffer)) > 0) {
os.write(buffer, 0, length);
}
} finally {
is.close();
os.close();
}
}
}
catch(IOException e){
Log.e(LOG_TAG, "Failed to get Udo configs");
}
}
}
private void handleModelExtraction(final String modelName, final int modelRawResId) {
ZipInputStream zipInputStream = null;
try {
final File modelsRoot = getOrCreateExternalModelsRootDirectory();
final File modelRoot = createModelDirectory(modelsRoot, modelName);
if (modelExists(modelRoot)) {
return;
}
zipInputStream = new ZipInputStream(getResources().openRawResource(modelRawResId));
ZipEntry zipEntry;
while ((zipEntry = zipInputStream.getNextEntry()) != null) {
final File entry = new File(modelRoot, zipEntry.getName());
if (zipEntry.isDirectory()) {
doCreateDirectory(entry);
} else {
doCreateFile(entry, zipInputStream);
}
zipInputStream.closeEntry();
}
getContentResolver().notifyChange(
Uri.withAppendedPath(Model.MODELS_URI, modelName), null);
//Copy UDO configs to context directory.
extractUdoConfig(modelRoot.getAbsolutePath(), "/udo/arm64-v8a/");
extractUdoConfig(modelRoot.getAbsolutePath(), "/udo/dsp/");
} catch (IOException e) {
Log.e(LOG_TAG, e.getMessage(), e);
try {
if (zipInputStream != null) {
zipInputStream.close();
}
} catch (IOException ignored) {}
getContentResolver().notifyChange(Model.MODELS_URI, null);
}
}
private boolean modelExists(File modelRoot) {
return modelRoot.listFiles().length > 0;
}
private void doCreateFile(File file, ZipInputStream inputStream) throws IOException {
final FileOutputStream outputStream = new FileOutputStream(file);
final byte[] chunk = new byte[CHUNK_SIZE];
int read;
while ((read = inputStream.read(chunk)) != -1) {
outputStream.write(chunk, 0, read);
}
outputStream.close();
}
private void doCreateDirectory(File directory) throws IOException {
if (!directory.mkdirs()) {
throw new IOException("Can not create directory: " + directory.getAbsolutePath());
}
}
private File getOrCreateExternalModelsRootDirectory() throws IOException {
final File modelsRoot = getExternalFilesDir(MODELS_ROOT_DIR);
if (modelsRoot == null) {
throw new IOException("Unable to access application external storage.");
}
if (!modelsRoot.isDirectory() && !modelsRoot.mkdir()) {
throw new IOException("Unable to create model root directory: " +
modelsRoot.getAbsolutePath());
}
return modelsRoot;
}
private File createModelDirectory(File modelsRoot, String modelName) throws IOException {
final File modelRoot = new File(modelsRoot, modelName);
if (!modelRoot.isDirectory() && !modelRoot.mkdir()) {
throw new IOException("Unable to create model root directory: " +
modelRoot.getAbsolutePath());
}
return modelRoot;
}
}
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.app.Application;
import android.app.Fragment;
import android.content.Context;
import android.graphics.Bitmap;
import android.os.Bundle;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.AdapterView;
import android.widget.ArrayAdapter;
import android.widget.Button;
import android.widget.GridView;
import android.widget.ImageView;
import android.widget.Spinner;
import android.widget.TextView;
import android.widget.Toast;
import com.qualcomm.qti.snpe.NeuralNetwork;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.Locale;
import java.util.Set;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController.SupportedTensorFormat;
public class ModelOverviewFragment extends Fragment {
public static final String EXTRA_MODEL = "model";
private static final Locale LOCALE = Locale.CANADA;
private ModelImagesAdapter mImageGridAdapter;
private ModelOverviewFragmentController mController;
private TextView mDimensionsText;
private TextView mModelNameText;
private Spinner mOutputLayersSpinners;
private Spinner mRuntimeSpinner;
private Spinner mTensorFormatSpinner;
private TextView mClassificationText;
private TextView mModelVersionText;
private TextView mStatisticLoadText;
private TextView mStatisticJavaExecuteText;
private Button mBuildButton;
private static boolean mUnsignedPD;
public static ModelOverviewFragment create(final Model model, boolean unsignedPD) {
mUnsignedPD = unsignedPD;
final ModelOverviewFragment fragment = new ModelOverviewFragment();
final Bundle arguments = new Bundle();
arguments.putParcelable(EXTRA_MODEL, model);
fragment.setArguments(arguments);
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container, Bundle savedInstanceState) {
return inflater.inflate(R.layout.fragment_model, container, false);
}
@Override
public void onViewCreated(View view, Bundle savedInstanceState) {
super.onViewCreated(view, savedInstanceState);
GridView imageGrid = (GridView) view.findViewById(R.id.model_image_grid);
mImageGridAdapter = new ModelImagesAdapter(getActivity());
imageGrid.setAdapter(mImageGridAdapter);
imageGrid.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
final Bitmap bitmap = mImageGridAdapter.getItem(position);
mController.classify(bitmap);
}
});
mModelNameText = (TextView) view.findViewById(R.id.model_overview_name_text);
mModelVersionText = (TextView) view.findViewById(R.id.model_overview_version_text);
mDimensionsText = (TextView) view.findViewById(R.id.model_overview_dimensions_text);
mRuntimeSpinner = (Spinner) view.findViewById(R.id.model_builder_runtime_spinner);
mTensorFormatSpinner = (Spinner) view.findViewById(R.id.model_builder_tensor_spinner);
mOutputLayersSpinners = (Spinner) view.findViewById(R.id.model_overview_layers_spinner);
mClassificationText = (TextView) view.findViewById(R.id.model_overview_classification_text);
mStatisticLoadText = (TextView) view.findViewById(R.id.model_statistics_init_text);
mStatisticJavaExecuteText = (TextView) view.findViewById(R.id.model_statistics_java_execute_text);
mBuildButton = (Button) view.findViewById(R.id.model_build_button);
mBuildButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
mController.loadNetwork();
}
});
}
@Override
public void onActivityCreated(Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
setHasOptionsMenu(true);
final Model model = getArguments().getParcelable(EXTRA_MODEL);
mController = new ModelOverviewFragmentController(
(Application) getActivity().getApplicationContext(), model, mUnsignedPD);
}
@Override
public void onStart() {
super.onStart();
mController.attach(this);
}
@Override
public void onStop() {
mController.detach(this);
super.onStop();
}
public void addSampleBitmap(Bitmap bitmap) {
if (mImageGridAdapter.getPosition(bitmap) == -1) {
mImageGridAdapter.add(bitmap);
mImageGridAdapter.notifyDataSetChanged();
}
}
public void setNetworkDimensions(int[] inputDimensions) {
mDimensionsText.setText(inputDimensions != null ? Arrays.toString(inputDimensions) : "");
}
public void displayModelLoadFailed() {
Toast.makeText(getActivity(), R.string.model_load_failed, Toast.LENGTH_SHORT).show();
}
public void setModelName(String modelName) {
mModelNameText.setText(modelName);
}
public void setModelVersion(String version) {
mModelVersionText.setText(version);
}
public void setModelLoadTime(long loadTime) {
if (loadTime > 0) {
mStatisticLoadText.setText(String.format(LOCALE, "%d ms", loadTime));
} else {
mStatisticLoadText.setText(R.string.not_available);
}
}
public void setJavaExecuteStatistics(long javaExecuteTime) {
if (javaExecuteTime > 0) {
mStatisticJavaExecuteText.setText(String.format(LOCALE, "%d ms", javaExecuteTime));
} else {
mStatisticJavaExecuteText.setText(R.string.not_available);
}
}
public void setOutputLayersNames(Set<String> outputLayersNames) {
mOutputLayersSpinners.setAdapter(new ArrayAdapter<>(
getActivity(), android.R.layout.simple_list_item_1,
new LinkedList<>(outputLayersNames)));
}
public void setSupportedTensorFormats(List<SupportedTensorFormat> tensorsFormats) {
mTensorFormatSpinner.setAdapter(new ArrayAdapter<>(
getActivity(), android.R.layout.simple_list_item_1, tensorsFormats
));
mTensorFormatSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> adapterView, View view, int i, long l) {
SupportedTensorFormat format = (SupportedTensorFormat) adapterView.getItemAtPosition(i);
mController.setTensorFormat(format);
}
@Override
public void onNothingSelected(AdapterView<?> adapterView) {
mController.setTensorFormat(SupportedTensorFormat.FLOAT);
}
});
}
public void setSupportedRuntimes(List<NeuralNetwork.Runtime> runtimes) {
mRuntimeSpinner.setAdapter(new ArrayAdapter<>(
getActivity(), android.R.layout.simple_list_item_1, runtimes
));
mRuntimeSpinner.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> parentView, View selectedItemView, int position, long id) {
NeuralNetwork.Runtime runtime = (NeuralNetwork.Runtime) parentView.getItemAtPosition(position);
mController.setTargetRuntime(runtime);
}
@Override
public void onNothingSelected(AdapterView<?> parentView) {
mController.setTargetRuntime(NeuralNetwork.Runtime.CPU);
}
});
}
public void setClassificationResult(String[] classificationResult) {
if (classificationResult.length > 0) {
mClassificationText.setText(
String.format("%s: %s", classificationResult[0], classificationResult[1]));
} else {
setClassificationHint();
}
}
public void setClassificationHint() {
mClassificationText.setText(R.string.classification_hint);
}
public void setLoadingNetwork(boolean loading) {
if (loading) {
mBuildButton.setText(R.string.loading_network);
mBuildButton.setEnabled(false);
} else {
mBuildButton.setText(R.string.build_network);
mBuildButton.setEnabled(true);
}
}
public void displayModelNotLoaded() {
Toast.makeText(getActivity(), R.string.model_not_loaded, Toast.LENGTH_SHORT).show();
}
public void displayClassificationFailed() {
setClassificationHint();
Toast.makeText(getActivity(), R.string.classification_failed, Toast.LENGTH_SHORT).show();
}
private static class ModelImagesAdapter extends ArrayAdapter<Bitmap> {
ModelImagesAdapter(Context context) {
super(context, R.layout.model_image_layout);
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
View view;
if (convertView == null) {
view = LayoutInflater.from(parent.getContext()).
inflate(R.layout.model_image_layout, parent, false);
} else {
view = convertView;
}
final ImageView imageView = ImageView.class.cast(view);
imageView.setImageBitmap(getItem(position));
return view;
}
}
}
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers;
import android.app.Application;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.SNPE;
import com.qualcomm.qti.snpe.imageclassifiers.tasks.AbstractClassifyImageTask;
import com.qualcomm.qti.snpe.imageclassifiers.tasks.ClassifyImageWithFloatTensorTask;
import com.qualcomm.qti.snpe.imageclassifiers.tasks.ClassifyImageWithUserBufferTf8Task;
import com.qualcomm.qti.snpe.imageclassifiers.tasks.LoadImageTask;
import com.qualcomm.qti.snpe.imageclassifiers.tasks.LoadNetworkTask;
import java.io.File;
import java.lang.ref.SoftReference;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class ModelOverviewFragmentController extends AbstractViewController<ModelOverviewFragment> {
public enum SupportedTensorFormat {
FLOAT,
UB_TF8
}
private final Map<String, SoftReference<Bitmap>> mBitmapCache;
private final Model mModel;
private final Application mApplication;
private NeuralNetwork mNeuralNetwork;
private LoadNetworkTask mLoadTask;
private NeuralNetwork.Runtime mRuntime;
private SupportedTensorFormat mCurrentSelectedTensorFormat;
private SupportedTensorFormat mNetworkTensorFormat;
private boolean mUnsignedPD;
public ModelOverviewFragmentController(final Application application, Model model, boolean unsignedPD) {
mBitmapCache = new HashMap<>();
mApplication = application;
mModel = model;
mUnsignedPD = unsignedPD;
}
@Override
protected void onViewAttached(ModelOverviewFragment view) {
view.setModelName(mModel.name);
view.setSupportedRuntimes(getSupportedRuntimes());
view.setSupportedTensorFormats(Arrays.asList(SupportedTensorFormat.values()));
loadImageSamples(view);
}
private void loadImageSamples(ModelOverviewFragment view) {
for (int i = 0; i < mModel.jpgImages.length; i++) {
final File jpeg = mModel.jpgImages[i];
final Bitmap cached = getCachedBitmap(jpeg);
if (cached != null) {
view.addSampleBitmap(cached);
} else {
final LoadImageTask task = new LoadImageTask(this, jpeg);
task.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
}
}
}
private Bitmap getCachedBitmap(File jpeg) {
final SoftReference<Bitmap> reference = mBitmapCache.get(jpeg.getAbsolutePath());
if (reference != null) {
final Bitmap bitmap = reference.get();
if (bitmap != null) {
return bitmap;
}
}
return null;
}
private List<NeuralNetwork.Runtime> getSupportedRuntimes() {
final List<NeuralNetwork.Runtime> result = new LinkedList<>();
final SNPE.NeuralNetworkBuilder builder = new SNPE.NeuralNetworkBuilder(mApplication);
NeuralNetwork.RuntimeCheckOption runtimeCheck = NeuralNetwork.RuntimeCheckOption.NORMAL_CHECK;
if (mUnsignedPD){
runtimeCheck = NeuralNetwork.RuntimeCheckOption.UNSIGNEDPD_CHECK;
}
builder.setRuntimeCheckOption(runtimeCheck);
for (NeuralNetwork.Runtime runtime : NeuralNetwork.Runtime.values()) {
if (builder.isRuntimeSupported(runtime)) {
result.add(runtime);
}
}
return result;
}
@Override
protected void onViewDetached(ModelOverviewFragment view) {
if (mNeuralNetwork != null) {
mNeuralNetwork.release();
mNeuralNetwork = null;
}
}
public void onBitmapLoaded(File imageFile, Bitmap bitmap) {
mBitmapCache.put(imageFile.getAbsolutePath(), new SoftReference<>(bitmap));
if (isAttached()) {
getView().addSampleBitmap(bitmap);
}
}
public void onNetworkLoaded(NeuralNetwork neuralNetwork, final long loadTime) {
if (isAttached()) {
mNeuralNetwork = neuralNetwork;
ModelOverviewFragment view = getView();
view.setNetworkDimensions(getInputDimensions());
view.setOutputLayersNames(neuralNetwork.getOutputLayers());
view.setModelVersion(neuralNetwork.getModelVersion());
view.setLoadingNetwork(false);
view.setModelLoadTime(loadTime);
} else {
neuralNetwork.release();
}
mLoadTask = null;
}
public void onNetworkLoadFailed() {
if (isAttached()) {
ModelOverviewFragment view = getView();
view.displayModelLoadFailed();
view.setLoadingNetwork(false);
}
mLoadTask = null;
mNetworkTensorFormat = null;
}
public void classify(final Bitmap bitmap) {
if (mNeuralNetwork != null) {
AbstractClassifyImageTask task;
switch (mNetworkTensorFormat) {
case UB_TF8:
task = new ClassifyImageWithUserBufferTf8Task(this, mNeuralNetwork, bitmap, mModel);
break;
case FLOAT:
default:
task = new ClassifyImageWithFloatTensorTask(this, mNeuralNetwork, bitmap, mModel);
break;
}
task.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR);
} else {
getView().displayModelNotLoaded();
}
}
public void onClassificationResult(String[] labels, long javaExecuteTime) {
if (isAttached()) {
ModelOverviewFragment view = getView();
view.setClassificationResult(labels);
view.setJavaExecuteStatistics(javaExecuteTime);
}
}
public void onClassificationFailed() {
if (isAttached()) {
getView().displayClassificationFailed();
getView().setJavaExecuteStatistics(-1);
}
}
public void setTargetRuntime(NeuralNetwork.Runtime runtime) {
mRuntime = runtime;
}
public void setTensorFormat(SupportedTensorFormat format) {
mCurrentSelectedTensorFormat = format;
}
public void loadNetwork() {
if (isAttached()) {
ModelOverviewFragment view = getView();
view.setLoadingNetwork(true);
view.setNetworkDimensions(null);
view.setOutputLayersNames(new HashSet<String>());
view.setModelVersion("");
view.setModelLoadTime(-1);
view.setJavaExecuteStatistics(-1);
view.setClassificationHint();
final NeuralNetwork neuralNetwork = mNeuralNetwork;
if (neuralNetwork != null) {
neuralNetwork.release();
mNeuralNetwork = null;
}
if (mLoadTask != null) {
mLoadTask.cancel(false);
}
mNetworkTensorFormat = mCurrentSelectedTensorFormat;
mLoadTask = new LoadNetworkTask(mApplication, this, mModel, mRuntime, mCurrentSelectedTensorFormat, mUnsignedPD);
mLoadTask.executeOnExecutor(AsyncTask.SERIAL_EXECUTOR);
}
}
private int[] getInputDimensions() {
Set<String> inputNames = mNeuralNetwork.getInputTensorsNames();
Iterator<String> iterator = inputNames.iterator();
return iterator.hasNext() ? mNeuralNetwork.getInputTensorsShapes().get(iterator.next()) : null;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.detector;
public class Anchor {
public float cx;
public float cy;
public float sx;
public float sy;
public Anchor(float cx, float cy, float sx, float sy) {
this.cx = cx;
this.cy = cy;
this.sx = sx;
this.sy = sy;
}
public Anchor() { }
}
package com.qualcomm.qti.snpe.imageclassifiers.detector;
import android.graphics.Bitmap;
import android.graphics.PointF;
import android.graphics.RectF;
import org.opencv.core.Mat;
import java.time.LocalDateTime;
import java.util.ArrayList;
public class Bbox implements Comparable<Bbox> {
public float x1;
public float y1;
public float x2;
public float y2;
public Float conf;
public PointF landmarks[];// = new PointF[5];
//for tracking
public boolean isTracked = false;
//align face
public Mat alignInput = null;
public Bitmap alignBitmap = null;
//Fr
public float min_group_distance = 0.0f;
public float min_distance = 0.0f;
public int index = -1;
public int personId = 0;
public int groupId = 0;
public String label = "Unknown";
public String groupLabel = "Unknown";
//Align Ratio
public float alignRatio = 0.0f;
public float iou = 0.0f;
public float distanceEyesRatio = 0.0f;
public float noseRatio = 0.0f;
//time
public Long lastUpdated = System.currentTimeMillis();
public Long lastFrExecuted = System.currentTimeMillis();
public LocalDateTime recognizedDate = LocalDateTime.now();
//feature
public float[] feature;
//trueface
public float faceProb = 0.0f;
public float finalProb = 0.0f;
//postIQA
public float eyesRatio = 0.0f;
public float eyesDistance = 0.0f;
public float sharpness = 0.0f;
//RPY
public double yawDeg = 0.0f;
public double rollDeg = 0.0f;
public double pitchDeg = 0.0f;
/*
public Bbox(float x1, float y1, float x2, float y2, float conf, PointF[] landmarks) {
this.x1 = x1;
this.y1 = y1;
this.x2 = x2;
this.y2 = y2;
this.conf = conf;
this.landmarks = landmarks;
}
*/
public Bbox() {
landmarks = new PointF[5];
}
public Bbox(float x1, float y1, float x2, float y2, Float conf, String label) {
this.x1 = x1;
this.y1 = y1;
this.x2 = x2;
this.y2 = y2;
this.conf = conf;
this.label = label;
landmarks = new PointF[5];
}
// duplicates into another Box
public void copyTo(Bbox b){
b.x1 = this.x1;
b.y1 = this.y1;
b.x2 = this.x2;
b.y2 = this.y2;
//b.conf = this.conf;
b.landmarks = this.landmarks;
//b.label = this.label;
//b.min_distance = this.min_distance;
//b.index = this.index;
//b.isTracked = this.isTracked;
}
//public void calcAlignRatio()
public void calcRatios()
{
//bbox width
float width = this.x2 - this.x1;
//alignRatio
double dist1 = Math.sqrt(Math.pow(landmarks[0].x-landmarks[2].x,2) + Math.pow(landmarks[0].y-landmarks[2].y,2));
float dx1 = Math.abs(x2 - landmarks[2].x);
double dist2 = Math.sqrt(Math.pow(landmarks[1].x-landmarks[2].x,2) + Math.pow(landmarks[1].y-landmarks[2].y,2));
float dx2 = Math.abs(x1 - landmarks[2].x);
//alignRatio = (float)(Math.min(dist1, dist2) / Math.max(dist1, dist2));
alignRatio = (float)(Math.min(dx1, dx2) / Math.max(dx1, dx2));
//eyes
double eyeDist = Math.sqrt(Math.pow(landmarks[0].x-landmarks[1].x,2) + Math.pow(landmarks[0].y-landmarks[1].y,2));
distanceEyesRatio = (float)(eyeDist/width);
//nose
float noseWidth = landmarks[2].x - this.x1;
noseRatio = (float)(noseWidth/width);
}
// convenience function
public static ArrayList<Bbox> createBoxes(int count) {
final ArrayList<Bbox> boxes = new ArrayList<>();
for (int i = 0; i < count; ++i)
boxes.add(new Bbox());
return boxes;
}
@Override
public int compareTo(Bbox o) {
return o.conf.compareTo(this.conf);
}
public String getLabel() {
return label;
}
public Float getConfidence() {
return conf;
}
public RectF getLocation() {
return new RectF(x1, y1, x2, y2);
}
public float getFaceProb() {
return faceProb;
}
public void setFaceProb(float faceProb) {
this.faceProb = faceProb;
}
public float getFinalProb() {
return finalProb;
}
public void setFinalProb(float finalProb) {
this.finalProb = finalProb;
}
}
\ No newline at end of file
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.detector;
import android.app.Application;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.PointF;
import android.os.SystemClock;
import android.util.Log;
import com.qualcomm.qti.snpe.FloatTensor;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.SNPE.NeuralNetworkBuilder;
import com.qualcomm.qti.snpe.SnpeError;
import com.qualcomm.qti.snpe.TF8UserBufferTensor;
import com.qualcomm.qti.snpe.Tensor;
import com.qualcomm.qti.snpe.UserBufferTensor;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import java.io.IOException;
import java.io.InputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class RetinaDetector {
static final String LOGTAG = RetinaDetector.class.getSimpleName();
public boolean isUsingQuantized = false;
static final double CONFIDENCE_THRESHOLD = 0.8;//0.8;//0.6;
static final double IOU_THRESHOLD = 0.4;
public static final float TRUE_FACE_THRESHOLD = 0.75f;
public static final int IMG_WIDTH = 512;//568;// //512; //850;//640;
public static final int IMG_HEIGHT = 288;//320;// //288; // 480;//360;
public final List<Anchor> anchors = new ArrayList<Anchor>();
NeuralNetwork network = null;
// Prepare input buffer
String mInputLayer = "";
Set<String> mOutputLayer;
private FloatTensor inputTensor = null;
private Map<String, FloatTensor> inputs = new HashMap<>();
private Map<String, TF8UserBufferTensor> inputTensors = new HashMap<>();
private Map<String, TF8UserBufferTensor> outputTensors = new HashMap<>();
private Map<String, ByteBuffer> inputBuffers = new HashMap<>();
private Map<String, ByteBuffer> outputBuffers = new HashMap<>();
private float[] inputValues = new float[IMG_WIDTH * IMG_HEIGHT * 3];
//private HexagonDelegate hexagonDelegate;
//True Face filter
//private TrueFaceDetector mTrueFaceDetector;
public Mat mFrameCv = new Mat();
public RetinaDetector(
Context context,
Application application,
int modelRes
// int trueFaceModelRes
) {
//For hexagon delegate
// Create the Delegate instance.
try {
//Log.d(LOGTAG, "Hexagon: Native libraryDir is: " + context.getApplicationInfo().nativeLibraryDir);
//hexagonDelegate = new HexagonDelegate(context);
//options.addDelegate(hexagonDelegate);
//Log.d(LOGTAG,"Accelerate by Hexagon");
} catch (UnsupportedOperationException e) {
// Hexagon delegate is not supported on this device.
Log.d(LOGTAG,"Hexagon delegate is not supported on this device.");
}
final Resources res = context.getResources();
final InputStream modelInputStream = res.openRawResource(modelRes);
try {
final NeuralNetworkBuilder builder = new NeuralNetworkBuilder(application)
.setDebugEnabled(false)
.setRuntimeOrder(
// NeuralNetwork.Runtime.AIP,
NeuralNetwork.Runtime.DSP,
NeuralNetwork.Runtime.GPU_FLOAT16,
NeuralNetwork.Runtime.GPU,
NeuralNetwork.Runtime.CPU
)
.setModel(modelInputStream, modelInputStream.available())
.setOutputLayers("concatenation_3",
"concatenation_4",
"concatenation_5")
.setCpuFallbackEnabled(true)
.setUseUserSuppliedBuffers(isUsingQuantized)
.setPerformanceProfile(NeuralNetwork.PerformanceProfile.HIGH_PERFORMANCE);
network = builder.build();
// Prepare inputs buffer
mInputLayer = network.getInputTensorsNames().iterator().next();
mOutputLayer = network.getOutputTensorsNames();
inputTensor = network.createFloatTensor(network.getInputTensorsShapes().get(mInputLayer));
createAnchor();
//Init True Face detector
/*
mTrueFaceDetector = new TrueFaceDetector(
context,
application,
trueFaceModelRes
);
*/
Log.d(LOGTAG, "RetinaDetector inited " + network.getInputTensorsShapes().entrySet().iterator().next().getValue().length + " anchor " + anchors.size());
} catch (IOException e) {
// Do something here
}
}
private static native void nativeAlign(long frameCvPtr, long frameRetPtr, float[][] landmarkObjArr);
public Mat prepareAlignNative(Mat frameCv, Bbox box)
{
float scaleX = RetinaDetector.IMG_WIDTH / (float) frameCv.cols();//frame.getWidth();
float scaleY = RetinaDetector.IMG_HEIGHT / (float) frameCv.rows();//frame.getHeight();
//float landmarks[] = {
float landmarks[][] = {
{box.landmarks[0].x / scaleX, box.landmarks[0].y / scaleY},
{box.landmarks[1].x / scaleX, box.landmarks[1].y / scaleY},
{box.landmarks[2].x / scaleX, box.landmarks[2].y / scaleY},
{box.landmarks[3].x / scaleX, box.landmarks[3].y / scaleY},
{box.landmarks[4].x / scaleX, box.landmarks[4].y / scaleY}
};
Mat faceAligned = new Mat();//112, 112, CV_32FC1);
nativeAlign(frameCv.getNativeObjAddr(), faceAligned.getNativeObjAddr(), landmarks);
return faceAligned;
}
private void createAnchor() {
final float[][] featureMap = new float[3][3];
final float[][] minSizes = {{10, 20}, {32, 64}, {128, 256}};
final float[] steps = {8, 16, 32};
for (int i = 0; i < 3; ++i) {
featureMap[i][0] = (float) Math.ceil(IMG_HEIGHT / steps[i]);
featureMap[i][1] = (float) Math.ceil(IMG_WIDTH / steps[i]);
}
for (int k = 0; k < 3; ++k) {
for (int i = 0; i < featureMap[k][0]; ++i) {
for (int j = 0; j < featureMap[k][1]; ++j) {
for (int l = 0; l < 2; ++l) {//2//minSizes.size
final float s_ky = minSizes[k][l] / IMG_HEIGHT;
final float s_kx = minSizes[k][l] / IMG_WIDTH;
final float cx = (float) (j + 0.5) * steps[k] / IMG_WIDTH;
final float cy = (float) (i + 0.5) * steps[k] / IMG_HEIGHT;
final Anchor anchor = new Anchor(cx, cy, s_kx, s_ky);
anchors.add(anchor);
}
}
}
}
}
private void prepareInputs(Bitmap frame) {
loadRgbBitmapAsFloat(frame);
inputTensor.write(inputValues, 0, inputValues.length);
inputs.put(mInputLayer, inputTensor);
}
private void loadRgbBitmapAsFloat(Bitmap image) {
final int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getPixels(pixels, 0, image.getWidth(), 0, 0,
image.getWidth(), image.getHeight());
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
final int idx = y * image.getWidth() + x;
final int batchIdx = idx * 3;
final float[] rgb = extractColorChannels(pixels[idx]);
inputValues[batchIdx] = rgb[0];
inputValues[batchIdx + 1] = rgb[1];
inputValues[batchIdx + 2] = rgb[2];
}
}
}
private float[] extractColorChannels(int pixel) {
float b = ((pixel) & 0xFF);
float g = ((pixel >> 8) & 0xFF);
float r = ((pixel >> 16) & 0xFF);
return new float[]{
r - 123,
g - 117,
b - 104
};
}
private List<Bbox> convertOutputs(Map<String, FloatTensor> outputs) {
float[] locs = {};
float[] landmarks = {};
float[] confidences = {};
for (Map.Entry<String, FloatTensor> output : outputs.entrySet()) {
FloatTensor outputTensor = output.getValue();
switch (output.getKey()) {
case "loc0":
locs = new float[outputTensor.getSize()];
outputTensor.read(locs, 0, locs.length);
break;
case "landmark0":
landmarks = new float[outputTensor.getSize()];
outputTensor.read(landmarks, 0, landmarks.length);
break;
case "conf0":
confidences = new float[outputTensor.getSize()];
outputTensor.read(confidences, 0, confidences.length);
break;
}
}
List<Bbox> bboxes = buildBbox(locs, confidences, landmarks);
bboxes = nms(bboxes);
return bboxes;
}
private List<Bbox> convertTf8Outputs() {
float[] locs = {};
float[] landmarks = {};
float[] confidences = {};
Iterator<String> outputLayers = mOutputLayer.iterator();
while (outputLayers.hasNext()) {
String outputLayer = outputLayers.next();
switch (outputLayer) {
case "loc0": {
locs = TensorUtils.dequantize(outputTensors.get(outputLayer), outputBuffers.get(outputLayer));
break;
}
case "landmark0": {
landmarks = TensorUtils.dequantize(outputTensors.get(outputLayer), outputBuffers.get(outputLayer));
break;
}
case "conf0": {
confidences = TensorUtils.dequantize(outputTensors.get(outputLayer), outputBuffers.get(outputLayer));
break;
}
}
}
List<Bbox> bboxes = buildBbox(locs, confidences, landmarks);
bboxes = nms(bboxes);
return bboxes;
}
public List<Bbox> detectFrame(Bitmap frame) {
//long startTime = System.currentTimeMillis();
//prepareInputs(frame);
long inputProcessStart = System.currentTimeMillis();
/*Old process*/
Mat frameCv = new Mat();//frame.getWidth(), frame.getHeight(), CvType.CV_8UC3);
Bitmap frame32 = frame.copy(Bitmap.Config.ARGB_8888, true);//ismutable
Utils.bitmapToMat(frame32, frameCv);//frame32, frameCv);
Imgproc.cvtColor(frameCv , frameCv , 1);//COLOR_RGBA2RGB
//Mat imgARgb = new Mat(frameCv.rows(), frameCv.cols(), CV_32FC1);
frameCv.convertTo(frameCv, CvType.CV_32F);//, 1.0, 0);
Core.subtract(frameCv, new Scalar(123.0f, 117.0f, 104.0f), frameCv);
frameCv.get(0, 0, inputValues);
//TODO: if not using quantized
if (!isUsingQuantized){
inputTensor.write(inputValues, 0, inputValues.length);
inputs.put(mInputLayer, inputTensor);
} else {
/**/
/*Tf8 Buffer*/
//TensorUtils.prepareTf8Inputs(network, mInputLayer, inputTensors, inputBuffers, frameCv, inputValues);
//TensorUtils.prepareTf8Outputs(network, mOutputLayer, outputTensors, outputBuffers);
}
long inputProcessTime = System.currentTimeMillis() - inputProcessStart;
//long preProcessTime = System.currentTimeMillis();
//Log.d(LOGTAG, "prepareInputs size " + inputs.entrySet().iterator().next().getValue().getSize());
long modelExecutionStart = System.currentTimeMillis();
if (!isUsingQuantized){
final Map<String, FloatTensor> outputs = network.execute(inputs);
long modelExecutionTime = System.currentTimeMillis() - modelExecutionStart;
//Log.d(LOGTAG + "_checkTimeFd", "preprocess time = " + inputProcessTime + " | execute time = " + modelExecutionTime);
long postProcessStart = System.currentTimeMillis();
final List<Bbox> bboxes = convertOutputs(outputs);
long postProcessTime = System.currentTimeMillis() - postProcessStart;
Log.d(LOGTAG + "_checkTimeFd", "preprocess time = " + inputProcessTime + " | execute time = " + modelExecutionTime + " | post process time = " + postProcessTime);
return bboxes;
} else {
network.execute(inputTensors, outputTensors);
long modelExecutionTime = System.currentTimeMillis() - modelExecutionStart;
Log.d(LOGTAG + "_checkTimeFd", "preprocess time = " + inputProcessTime + " | execute time = " + modelExecutionTime);
final List<Bbox> bboxes = convertTf8Outputs();
return bboxes;
}
// long preProcessRunTime = preProcessTime - startTime;
// long dlcRunTime = System.currentTimeMillis() - preProcessTime;
//Log.d(LOGTAG, "DLC: Frame processed preprocess in: " + preProcessRunTime + " detect in: " + dlcRunTime);
//Log.d(LOGTAG, "network executed");
//long outputProcessStart = System.currentTimeMillis();
//clear & releasr for next run
// inputs.clear();
// releaseTensors(outputs);
//long outputProcessTime = System.currentTimeMillis() - outputProcessStart;
//Log.d(LOGTAG + "_tf8_runtime", "prepareInput " + inputProcessTime + "ms |" + " execute " + modelExecutionTime + "ms |" +" post process: " + outputProcessTime + "ms");
}
private List<Bbox> buildBbox(float[] locs, float[] confidences, float[] landmarks)
{
final ArrayList<Bbox> bboxes = new ArrayList<Bbox>();
ReadWriteLock lock = new ReentrantReadWriteLock();
//int locIndex = 0;
//int confIndex = 0;
//int landmarkIndex = 0;
//synchronized (bboxes) {
for (int i = 0; i < anchors.size(); ++i)
//IntStream.range(0, anchors.size() - 1).parallel().forEach(i ->
{
float cx = confidences[i * 2];
float cy = confidences[i * 2 + 1];
float conf = (float) (Math.exp(cy) / (Math.exp(cx) + Math.exp(cy)));
// if (conf > CONFIDENCE_THRESHOLD) {
if (conf > 0.8) {
Anchor tmp = anchors.get(i);
Anchor tmp1 = new Anchor();
Bbox result = new Bbox();
tmp1.cx = (float) (tmp.cx + locs[i * 4] * 0.1 * tmp.sx);
tmp1.cy = (float) (tmp.cy + locs[i * 4 + 1] * 0.1 * tmp.sy);
tmp1.sx = (float) (tmp.sx * Math.exp(locs[i * 4 + 2] * 0.2));
tmp1.sy = (float) (tmp.sy * Math.exp(locs[i * 4 + 3] * 0.2));
// Extract bbox and confidences
result.x1 = (tmp1.cx - tmp1.sx / 2) * IMG_WIDTH;
if (result.x1 < 0) {
result.x1 = 0;
}
result.y1 = (tmp1.cy - tmp1.sy / 2) * IMG_HEIGHT;
if (result.y1 < 0) {
result.y1 = 0;
}
result.x2 = (tmp1.cx + tmp1.sx / 2) * IMG_WIDTH;
if (result.x2 > IMG_WIDTH) {
result.x2 = IMG_WIDTH;
}
result.y2 = (tmp1.cy + tmp1.sy / 2) * IMG_HEIGHT;
if (result.y2 > IMG_HEIGHT) {
result.y2 = IMG_HEIGHT;
}
result.conf = conf;
// Skip extracting landmark
for (int j = 0; j < 5; ++j) {
float lx = (tmp.cx + (landmarks[i * 10 + j * 2]) * 0.1f * tmp.sx) * IMG_WIDTH;
float ly = (tmp.cy + (landmarks[i * 10 + j * 2 + 1]) * 0.1f * tmp.sy) * IMG_HEIGHT;
result.landmarks[j] = new PointF(lx, ly);
}
//Check face probability/*
/*
Mat inputAlign = prepareAlignNative(mFrameCv, result);
result.alignMat = inputAlign;
Mat inputMat = new Mat(inputAlign.rows(), inputAlign.cols(), CV_32FC1);
inputAlign.convertTo(inputMat, CvType.CV_32F, 1.0 / 255, 0);
Core.subtract(inputMat, new Scalar(0.485f, 0.456f, 0.406f), inputMat);
Core.divide(inputMat, new Scalar(0.229f, 0.224f, 0.225f), inputMat);
long filterStart = System.currentTimeMillis();
final float[] filterOutputs = mTrueFaceDetector.filterTrueFace(inputMat);
long filterRunTime = System.currentTimeMillis() - filterStart;
Log.d(LOGTAG, "DLC: Filtered true face in: " + filterRunTime);
double faceProb = Math.exp(filterOutputs[0]) / (Math.exp(filterOutputs[0]) + Math.exp(filterOutputs[1]));
float finalProb = (float)(0.6 * faceProb + 0.4 * conf);
Log.d("FaceProb", "" + finalProb);
if (finalProb >= TRUE_FACE_THRESHOLD) {
result.faceProb = finalProb;
//lock.writeLock().lock();
bboxes.add(result);
//lock.writeLock().unlock();
}
*/
bboxes.add(result);
}
}
//});
//}
Collections.sort(bboxes);
return bboxes;
}
private List<Bbox> nms(List<Bbox> bboxes) {
List<Bbox> selected = new ArrayList<Bbox>();
for (Bbox boxA : bboxes) {
boolean shouldSelect = true;
// Does the current box overlap one of the selected boxes more than the
// given threshold amount? Then it's too similar, so don't keep it.
for (Bbox boxB : selected) {
if (IOU(boxA, boxB) > IOU_THRESHOLD) {
shouldSelect = false;
break;
}
}
// This bounding box did not overlap too much with any previously selected
// bounding box, so we'll keep it.
if (shouldSelect) {
selected.add(boxA);
}
}
return selected;
}
private float IOU(Bbox a, Bbox b ) {
float areaA = (a.x2 - a.x1) * (a.y2 - a.y1);
if (areaA <= 0) {
return 0;
}
float areaB = (b.x2 - b.x1) * (b.y2 - b.y1);
if (areaB <= 0) {
return 0;
}
float intersectionMinX = Math.max(a.x1, b.x1);
float intersectionMinY = Math.max(a.y1, b.y1);
float intersectionMaxX = Math.min(a.x2, b.x2);
float intersectionMaxY = Math.min(a.y2, b.y2);
float intersectionArea = Math.max(intersectionMaxY - intersectionMinY, 0) *
Math.max(intersectionMaxX - intersectionMinX, 0);
return intersectionArea / (areaA + areaB - intersectionArea);
}
@SafeVarargs
private final void releaseTensors(Map<String, ? extends Tensor>... tensorMaps) {
for (Map<String, ? extends Tensor> tensorMap: tensorMaps) {
for (Tensor tensor: tensorMap.values()) {
tensor.release();
}
}
}
public void close() {
network.release();
// releaseTensors(inputs);
releaseTf8Tensors(inputTensors, outputTensors);
}
private final void releaseTf8Tensors(Map<String, ? extends UserBufferTensor>... tensorMaps) {
for (Map<String, ? extends UserBufferTensor> tensorMap: tensorMaps) {
for (UserBufferTensor tensor: tensorMap.values()) {
tensor.release();
}
}
}
}
package com.qualcomm.qti.snpe.imageclassifiers.detector;
import org.opencv.core.Mat;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.ListIterator;
public class Slicing {
int imgHeight;
int imgWidth;
int noRows;
int noCols;
double rowsOverlap;
double colsOverlap;
int startInd = 0;
public Slicing(int imgHeight_,
int imgWidth_,
int noRows_,
int noCols_,
double rowsOverlap_,
double colsOverlap_,
int startInd_)
{
this.imgHeight = imgHeight_;
this.imgWidth = imgWidth_;
this.noCols = noCols_;
this.noRows = noRows_;
this.rowsOverlap = rowsOverlap_;
this.colsOverlap = colsOverlap_;
this.startInd = startInd_;
}
public ArrayList<Integer> getSlice()
{
ArrayList<Integer> divisionList = new ArrayList<Integer>();
ArrayList<Integer> rowVals = new ArrayList<Integer>();
ArrayList<Integer> colVals = new ArrayList<Integer>();
ArrayList<Integer> zeroRows = new ArrayList<Integer>();
ArrayList<Integer> finalRows = new ArrayList<Integer>();
ArrayList<Integer> zeroCols = new ArrayList<Integer>();
ArrayList<Integer> finalCols = new ArrayList<Integer>();
ArrayList<Integer> yMins = new ArrayList<Integer>();
ArrayList<Integer> xMins = new ArrayList<Integer>();
ArrayList<Integer> yMaxs = new ArrayList<Integer>();
ArrayList<Integer> xMaxs = new ArrayList<Integer>();
ArrayList<Integer> xMinMesh = new ArrayList<Integer>();
ArrayList<Integer> yMinMesh = new ArrayList<Integer>();
ArrayList<Integer> xMaxMesh = new ArrayList<Integer>();
ArrayList<Integer> yMaxMesh = new ArrayList<Integer>();
int divisionH = this.imgHeight / noRows;
int remainderH = this.imgHeight % noRows;
int divisionW = imgWidth/ noCols;
int remainderW = imgWidth % noCols;
int rowsOverlapPixels = (int)(rowsOverlap*divisionH/2);
int colsOverlapPixels = (int)(colsOverlap*divisionW/2);
int startH = (int)(Math.floor(remainderH/2));
int finalH = (int)(this.imgHeight - Math.ceil((remainderH/2)));
int startW = (int)(Math.floor(remainderW/2));
int finalW = (int)(this.imgWidth - Math.ceil(remainderW/2));
int rowValsInit = 0;
while (rowValsInit<finalH+1){
if (rowValsInit<finalH+1){
rowVals.add(rowValsInit);
}
rowValsInit += divisionH;
}
int colValsInit = 0;
while (colValsInit<finalW+1){
if (colValsInit<finalW+1){
colVals.add(colValsInit);
}
colValsInit += divisionW;
}
for (int i=0 ;i < this.noRows;i++){
zeroRows.add(0);
}
for (int i=0 ;i < this.noRows;i++){
finalRows.add(finalH);
}
for (int i=0 ;i < this.noCols;i++){
zeroCols.add(0);
}
for (int i=0 ;i < this.noCols;i++){
finalCols.add(finalW);
}
for (int i = 0; i < zeroRows.size(); i++){
yMins.add(Math.max(zeroRows.get(i),rowVals.get(i)-rowsOverlapPixels));
}
for (int i = 0; i < zeroCols.size(); i++){
xMins.add(Math.max(zeroCols.get(i),colVals.get(i)-colsOverlapPixels));
}
for (int i = 0; i < finalRows.size(); i++){
yMaxs.add(Math.min(finalRows.get(i),rowVals.get(i+1)+rowsOverlapPixels));
}
for (int i = 0; i < finalCols.size(); i++){
xMaxs.add(Math.min(finalCols.get(i),colVals.get(i+1)+colsOverlapPixels));
}
for (int i = 0; i < yMins.size(); i++){
for (int j = 0 ; j < xMins.size();i++)
{
xMinMesh.add(xMins.get(j));
}
// xMinMesh.add(xMins);
}
// for (int i =0 ; i< xMins.size(); i++){
// for (int j = 0; j < yMins.size();i++){
// yMinMeshLine.add(yMins.get(j));
// yMinMesh.add(yMinMeshLine);
// yMinMeshLine.clear();
// }
// }
return xMinMesh;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.detector;
import android.graphics.Bitmap;
import android.util.Log;
import com.qualcomm.qti.snpe.FloatTensor;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.TF8UserBufferTensor;
import com.qualcomm.qti.snpe.TensorAttributes;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Scalar;
import org.opencv.imgproc.Imgproc;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
public class TensorUtils {
private static class Tf8Params {
int size;
int[] strides;
int stepExactly0;
float stepSize;
Tf8Params(int size, int[] strides) {
this.size = size;
this.strides = strides;
}
}
private static class Tf8Encoding {
float min;
float max;
float delta;
float offset;
}
private static final int TF8_SIZE = 1;
private static final int TF8_BITWIDTH = 8;
private static final int mStepExactly0 = 0;
private static final float mStepSize = 1.0f;
public static boolean prepareFloatInputs(NeuralNetwork neuralNetwork, String inputLayer,
FloatTensor inputTensor,
final Map<String, FloatTensor> inputs,
final Map<String, ByteBuffer> inputBuffers,
float[] inputValues,
Bitmap image) {
final int[] dimensions = inputTensor.getShape();
final boolean isGrayScale = (dimensions[dimensions.length - 1] == 1);
Mat frameCv = new Mat();//frame.getWidth(), frame.getHeight(), CvType.CV_8UC3);
Bitmap frame32 = image.copy(Bitmap.Config.ARGB_8888, true);//ismutable
Utils.bitmapToMat(frame32, frameCv);//frame32, frameCv);
Imgproc.cvtColor(frameCv , frameCv , 1);//COLOR_RGBA2RGB
//Mat imgARgb = new Mat(frameCv.rows(), frameCv.cols(), CV_32FC1);
frameCv.convertTo(frameCv, CvType.CV_32F);//, 1.0, 0);
Core.subtract(frameCv, new Scalar(123.0f, 117.0f, 104.0f), frameCv);
frameCv.get(0, 0, inputValues);
inputTensor.write(inputValues, 0, inputValues.length);
inputs.put(inputLayer, inputTensor);
return true;
}
public static boolean prepareTf8InputsWithMat(NeuralNetwork neuralNetwork, String inputLayer,
final Map<String, TF8UserBufferTensor> inputTensors,
final Map<String, ByteBuffer> inputBuffers,
float[] inputValues, Bitmap image) {
TensorAttributes inputAttributes = neuralNetwork.getTensorAttributes(inputLayer);
Tf8Params inputParams = resolveTf8Params(inputAttributes);
inputBuffers.put(inputLayer, ByteBuffer.allocateDirect(inputParams.size).order(ByteOrder.nativeOrder()));
//Get float[]
Mat frameCv = new Mat();//frame.getWidth(), frame.getHeight(), CvType.CV_8UC3);
Bitmap frame32 = image.copy(Bitmap.Config.ARGB_8888, true);//ismutable
Utils.bitmapToMat(frame32, frameCv);//frame32, frameCv);
Imgproc.cvtColor(frameCv , frameCv , 1);//COLOR_RGBA2RGB
frameCv.convertTo(frameCv, CvType.CV_32F);//, 1.0, 0);
Core.subtract(frameCv, new Scalar(123.0f, 117.0f, 104.0f), frameCv);
frameCv.get(0, 0, inputValues);
quantizeWithMat(frameCv, inputValues, inputBuffers.get(inputLayer), inputParams);
inputTensors.put(inputLayer, neuralNetwork.createTF8UserBufferTensor(
inputParams.size, inputParams.strides,
inputParams.stepExactly0, inputParams.stepSize,
inputBuffers.get(inputLayer)
));
return true;
}
public static boolean prepareTf8Inputs(NeuralNetwork neuralNetwork, String inputLayer,
final Map<String, TF8UserBufferTensor> inputTensors,
final Map<String, ByteBuffer> inputBuffers,
float[] inputValues, Bitmap image) {
TensorAttributes inputAttributes = neuralNetwork.getTensorAttributes(inputLayer);
Tf8Params inputParams = resolveTf8Params(inputAttributes);
inputBuffers.put(inputLayer, ByteBuffer.allocateDirect(inputParams.size).order(ByteOrder.nativeOrder()));
//Get float[]
// final int[] dimensions = inputAttributes.getDims();
// final boolean isGrayScale = (dimensions[dimensions.length - 1] == 1);
// if (!isGrayScale) {
// inputValues = loadRgbBitmapAsFloat(image);
// } else {
// inputValues = loadGrayScaleBitmapAsFloat(image);
// }
//Get float[] with opencvMat
Mat frameCv = new Mat();//frame.getWidth(), frame.getHeight(), CvType.CV_8UC3);
Bitmap frame32 = image.copy(Bitmap.Config.ARGB_8888, true);//ismutable
Utils.bitmapToMat(frame32, frameCv);//frame32, frameCv);
Imgproc.cvtColor(frameCv , frameCv , 1);//COLOR_RGBA2RGB
frameCv.convertTo(frameCv, CvType.CV_32F);//, 1.0, 0);
Core.subtract(frameCv, new Scalar(123.0f, 117.0f, 104.0f), frameCv);
frameCv.get(0, 0, inputValues);
quantize(inputValues, inputBuffers.get(inputLayer), inputParams);
inputTensors.put(inputLayer, neuralNetwork.createTF8UserBufferTensor(
inputParams.size, inputParams.strides,
inputParams.stepExactly0, inputParams.stepSize,
inputBuffers.get(inputLayer)
));
return true;
}
public static void prepareTf8Outputs(NeuralNetwork neuralNetwork, Set<String> outputLayers,
final Map<String, TF8UserBufferTensor> outputTensors,
final Map<String, ByteBuffer> outputBuffers) {
Iterator<String> outputIterator = outputLayers.iterator();
while (outputIterator.hasNext()) {
String outputLayer = outputIterator.next();
TensorAttributes outputAttributes = neuralNetwork.getTensorAttributes(outputLayer);
Tf8Params outputParams = resolveTf8Params(outputAttributes);
outputParams.stepExactly0 = mStepExactly0;
outputParams.stepSize = mStepSize;
outputBuffers.put(outputLayer, ByteBuffer.allocateDirect(outputParams.size).order(ByteOrder.nativeOrder()));
try {
outputTensors.put(outputLayer, neuralNetwork.createTF8UserBufferTensor(
outputParams.size, outputParams.strides,
outputParams.stepExactly0, outputParams.stepSize,
outputBuffers.get(outputLayer)));
} catch (Exception e) {
Log.d("HeadDetectorException", "" + e.getMessage());
}
}
}
public static float[] dequantize(TF8UserBufferTensor tensor, ByteBuffer buffer) {
final int outputSize = buffer.capacity();
final byte[] quantizedArray = new byte[outputSize];
buffer.get(quantizedArray);
final float[] dequantizedArray = new float[outputSize];
for (int i = 0; i < outputSize; i++) {
int quantizedValue = (int)quantizedArray[i] & 0xFF;
dequantizedArray[i] = tensor.getMin() + quantizedValue * tensor.getQuantizedStepSize();
}
return dequantizedArray;
}
private static Tf8Params resolveTf8Params(TensorAttributes attribute) {
int rank = attribute.getDims().length;
int[] strides = new int[rank];
strides[rank - 1] = TF8_SIZE;
for (int i = rank - 1; i > 0; i--) {
strides[i-1] = strides[i] * attribute.getDims()[i];
}
int bufferSize = TF8_SIZE;
for (int dim: attribute.getDims()) {
bufferSize *= dim;
}
return new Tf8Params(bufferSize, strides);
}
//Load rgb image as float
static float[] loadRgbBitmapAsFloat(Bitmap image) {
final int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getPixels(pixels, 0, image.getWidth(), 0, 0,
image.getWidth(), image.getHeight());
final float[] pixelsBatched = new float[pixels.length * 3];
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
final int idx = y * image.getWidth() + x;
final int batchIdx = idx * 3;
final float[] rgb = extractColorChannels(pixels[idx]);
pixelsBatched[batchIdx] = rgb[0];
pixelsBatched[batchIdx + 1] = rgb[1];
pixelsBatched[batchIdx + 2] = rgb[2];
}
}
return pixelsBatched;
}
//Load gray scale bitmap as float
static float[] loadGrayScaleBitmapAsFloat(Bitmap image) {
final int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getPixels(pixels, 0, image.getWidth(), 0, 0,
image.getWidth(), image.getHeight());
final float[] pixelsBatched = new float[pixels.length];
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
final int idx = y * image.getWidth() + x;
final int rgb = pixels[idx];
final float b = ((rgb) & 0xFF);
final float g = ((rgb >> 8) & 0xFF);
final float r = ((rgb >> 16) & 0xFF);
float grayscale = (float) (r * 0.3 + g * 0.59 + b * 0.11);
pixelsBatched[idx] = preProcess(grayscale);
}
}
return pixelsBatched;
}
private static float[] extractColorChannels(int pixel) {
float b = ((pixel) & 0xFF);
float g = ((pixel >> 8) & 0xFF);
float r = ((pixel >> 16) & 0xFF);
return new float[] {preProcess(r), preProcess(g), preProcess(b)};
}
private static float preProcess(float original) {
return (original - 128) / 128;
}
public static void quantizeWithMat(Mat cvMat, float[] src, ByteBuffer dst, Tf8Params tf8Params) {
Tf8Encoding encoding = getTf8EncodingWithMat(cvMat);
byte[] quantized = new byte[src.length];
for (int i = 0; i < src.length; i++) {
float data = Math.max(Math.min(src[i], encoding.max), encoding.min);
data = data / encoding.delta - encoding.offset;
quantized[i] = (byte) Math.round(data);
}
dst.put(quantized);
tf8Params.stepSize = encoding.delta;
tf8Params.stepExactly0 = Math.round(-encoding.min / encoding.delta);
}
public static void quantize(float[] src, ByteBuffer dst, Tf8Params tf8Params) {
Tf8Encoding encoding = getTf8Encoding(src);
byte[] quantized = new byte[src.length];
for (int i = 0; i < src.length; i++) {
float data = Math.max(Math.min(src[i], encoding.max), encoding.min);
data = data / encoding.delta - encoding.offset;
quantized[i] = (byte) Math.round(data);
}
dst.put(quantized);
tf8Params.stepSize = encoding.delta;
tf8Params.stepExactly0 = Math.round(-encoding.min / encoding.delta);
}
private static Tf8Encoding getTf8EncodingWithMat(Mat frameCv) {
Tf8Encoding encoding = new Tf8Encoding();
int num_steps = (int) Math.pow(2, TF8_BITWIDTH) - 1;
List<Mat> rgb = new ArrayList<>();
Float[] maxVals = new Float[3];
Float[] minVals = new Float[3];
Core.split(frameCv, rgb);
int i = 0;
for (Mat mat : rgb) {
Core.MinMaxLocResult result = Core.minMaxLoc(mat);
maxVals[i] = (float) result.maxVal;
minVals[i] = (float) result.minVal;
i++;
}
float newMax = Math.max(Collections.max(Arrays.asList(maxVals)), 0);
float newMin = Math.min(Collections.min(Arrays.asList(minVals)), 0);
float minRange = 0.1f;
newMax = Math.max(newMax, newMin + minRange);
encoding.delta = (newMax - newMin) / num_steps;
if (newMin < 0 && newMax > 0) {
float quantizedZero = Math.round(-newMin / encoding.delta);
quantizedZero = (float) Math.min(num_steps, Math.max(0.0, quantizedZero));
encoding.offset = -quantizedZero;
} else {
encoding.offset = Math.round(newMin / encoding.delta);
}
encoding.min = encoding.delta * encoding.offset;
encoding.max = encoding.delta * num_steps + encoding.min;
return encoding;
}
private static Tf8Encoding getTf8Encoding(float[] array) {
Tf8Encoding encoding = new Tf8Encoding();
int num_steps = (int) Math.pow(2, TF8_BITWIDTH) - 1;
float new_min = Math.min(getMin(array), 0);
float new_max = Math.max(getMax(array), 0);
float min_range = 0.1f;
new_max = Math.max(new_max, new_min + min_range);
encoding.delta = (new_max - new_min) / num_steps;
if (new_min < 0 && new_max > 0) {
float quantized_zero = Math.round(-new_min / encoding.delta);
quantized_zero = (float) Math.min(num_steps, Math.max(0.0, quantized_zero));
encoding.offset = -quantized_zero;
} else {
encoding.offset = Math.round(new_min / encoding.delta);
}
encoding.min = encoding.delta * encoding.offset;
encoding.max = encoding.delta * num_steps + encoding.min;
return encoding;
}
static float getMin(float[] array) {
float min = Float.MAX_VALUE;
for (float value : array) {
if (value < min) {
min = value;
}
}
return min;
}
static float getMax(float[] array) {
float max = Float.MIN_VALUE;
for (float value : array) {
if (value > max) {
max = value;
}
}
return max;
}
}
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.tasks;
import android.graphics.Bitmap;
import android.os.AsyncTask;
import android.util.Pair;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.imageclassifiers.Model;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import java.util.Set;
public abstract class AbstractClassifyImageTask extends AsyncTask<Bitmap, Void, String[]> {
private static final String LOG_TAG = AbstractClassifyImageTask.class.getSimpleName();
private static final int FLOAT_SIZE = 4;
final String mInputLayer;
final String mOutputLayer;
private final ModelOverviewFragmentController mController;
final NeuralNetwork mNeuralNetwork;
final Model mModel;
final Bitmap mImage;
private FloatBuffer mMeanImage;
long mJavaExecuteTime = -1;
AbstractClassifyImageTask(ModelOverviewFragmentController controller,
NeuralNetwork network, Bitmap image, Model model) {
mController = controller;
mNeuralNetwork = network;
mImage = image;
mModel = model;
Set<String> inputNames = mNeuralNetwork.getInputTensorsNames();
Set<String> outputNames = mNeuralNetwork.getOutputTensorsNames();
if (inputNames.size() != 1 || outputNames.size() != 1) {
throw new IllegalStateException("Invalid network input and/or output tensors.");
} else {
mInputLayer = inputNames.iterator().next();
mOutputLayer = outputNames.iterator().next();
}
}
@Override
protected void onPostExecute(String[] labels) {
super.onPostExecute(labels);
if (labels.length > 0) {
mController.onClassificationResult(labels, mJavaExecuteTime);
} else {
mController.onClassificationFailed();
}
}
void loadMeanImageIfAvailable(File meanImage, final int imageSize) {
ByteBuffer buffer = ByteBuffer.allocate(imageSize * FLOAT_SIZE)
.order(ByteOrder.nativeOrder());
if (!meanImage.exists()) {
return;
}
FileInputStream fileInputStream = null;
try {
fileInputStream = new FileInputStream(meanImage);
final byte[] chunk = new byte[1024];
int read;
while ((read = fileInputStream.read(chunk)) != -1) {
buffer.put(chunk, 0, read);
}
buffer.flip();
} catch (IOException e) {
buffer = ByteBuffer.allocate(imageSize * FLOAT_SIZE);
} finally {
if (fileInputStream != null) {
try {
fileInputStream.close();
} catch (IOException e) {
// Do thing
}
}
}
mMeanImage = buffer.asFloatBuffer();
}
float[] loadRgbBitmapAsFloat(Bitmap image) {
final int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getPixels(pixels, 0, image.getWidth(), 0, 0,
image.getWidth(), image.getHeight());
final float[] pixelsBatched = new float[pixels.length * 3];
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
final int idx = y * image.getWidth() + x;
final int batchIdx = idx * 3;
final float[] rgb = extractColorChannels(pixels[idx]);
pixelsBatched[batchIdx] = rgb[0];
pixelsBatched[batchIdx + 1] = rgb[1];
pixelsBatched[batchIdx + 2] = rgb[2];
}
}
return pixelsBatched;
}
float[] loadGrayScaleBitmapAsFloat(Bitmap image) {
final int[] pixels = new int[image.getWidth() * image.getHeight()];
image.getPixels(pixels, 0, image.getWidth(), 0, 0,
image.getWidth(), image.getHeight());
final float[] pixelsBatched = new float[pixels.length];
for (int y = 0; y < image.getHeight(); y++) {
for (int x = 0; x < image.getWidth(); x++) {
final int idx = y * image.getWidth() + x;
final int rgb = pixels[idx];
final float b = ((rgb) & 0xFF);
final float g = ((rgb >> 8) & 0xFF);
final float r = ((rgb >> 16) & 0xFF);
float grayscale = (float) (r * 0.3 + g * 0.59 + b * 0.11);
pixelsBatched[idx] = preProcess(grayscale);
}
}
return pixelsBatched;
}
Pair<Integer, Float>[] topK(int k, final float[] tensor) {
final boolean[] selected = new boolean[tensor.length];
final Pair<Integer, Float> topK[] = new Pair[k];
int count = 0;
while (count < k) {
final int index = top(tensor, selected);
selected[index] = true;
topK[count] = new Pair<>(index, tensor[index]);
count++;
}
return topK;
}
private int top(final float[] array, boolean[] selected) {
int index = 0;
float max = -1.f;
for (int i = 0; i < array.length; i++) {
if (selected[i]) {
continue;
}
if (array[i] > max) {
max = array[i];
index = i;
}
}
return index;
}
private float[] extractColorChannels(int pixel) {
String modelName = mModel.name;
float b = ((pixel) & 0xFF);
float g = ((pixel >> 8) & 0xFF);
float r = ((pixel >> 16) & 0xFF);
if (modelName.equals("inception_v3")) {
return new float[] {preProcess(r), preProcess(g), preProcess(b)};
} else if (modelName.equals("alexnet") && mMeanImage != null) {
return new float[] {preProcess(b), preProcess(g), preProcess(r)};
} else if (modelName.equals("googlenet") && mMeanImage != null) {
return new float[] {preProcess(b), preProcess(g), preProcess(r)};
} else {
return new float[] {preProcess(r), preProcess(g), preProcess(b)};
}
}
private float preProcess(float original) {
String modelName = mModel.name;
if (modelName.equals("inception_v3")) {
return (original - 128) / 128;
} else if (modelName.equals("alexnet") && mMeanImage != null) {
return original - mMeanImage.get();
} else if (modelName.equals("googlenet") && mMeanImage != null) {
return original - mMeanImage.get();
} else {
return original;
}
}
float getMin(float[] array) {
float min = Float.MAX_VALUE;
for (float value : array) {
if (value < min) {
min = value;
}
}
return min;
}
float getMax(float[] array) {
float max = Float.MIN_VALUE;
for (float value : array) {
if (value > max) {
max = value;
}
}
return max;
}
}
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.tasks;
import android.graphics.Bitmap;
import android.os.SystemClock;
import android.util.Pair;
import com.qualcomm.qti.snpe.FloatTensor;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.Tensor;
import com.qualcomm.qti.snpe.imageclassifiers.Model;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class ClassifyImageWithFloatTensorTask extends AbstractClassifyImageTask {
private static final String LOG_TAG = ClassifyImageWithFloatTensorTask.class.getSimpleName();
public ClassifyImageWithFloatTensorTask(ModelOverviewFragmentController controller,
NeuralNetwork network, Bitmap image, Model model) {
super(controller, network, image, model);
}
@Override
protected String[] doInBackground(Bitmap... params) {
final List<String> result = new LinkedList<>();
final FloatTensor tensor = mNeuralNetwork.createFloatTensor(
mNeuralNetwork.getInputTensorsShapes().get(mInputLayer));
loadMeanImageIfAvailable(mModel.meanImage, tensor.getSize());
final int[] dimensions = tensor.getShape();
final boolean isGrayScale = (dimensions[dimensions.length -1] == 1);
float[] rgbBitmapAsFloat;
if (!isGrayScale) {
rgbBitmapAsFloat = loadRgbBitmapAsFloat(mImage);
} else {
rgbBitmapAsFloat = loadGrayScaleBitmapAsFloat(mImage);
}
tensor.write(rgbBitmapAsFloat, 0, rgbBitmapAsFloat.length);
final Map<String, FloatTensor> inputs = new HashMap<>();
inputs.put(mInputLayer, tensor);
final long javaExecuteStart = SystemClock.elapsedRealtime();
final Map<String, FloatTensor> outputs = mNeuralNetwork.execute(inputs);
final long javaExecuteEnd = SystemClock.elapsedRealtime();
mJavaExecuteTime = javaExecuteEnd - javaExecuteStart;
for (Map.Entry<String, FloatTensor> output : outputs.entrySet()) {
if (output.getKey().equals(mOutputLayer)) {
FloatTensor outputTensor = output.getValue();
final float[] array = new float[outputTensor.getSize()];
outputTensor.read(array, 0, array.length);
for (Pair<Integer, Float> pair : topK(1, array)) {
result.add(mModel.labels[pair.first]);
result.add(String.valueOf(pair.second));
}
}
}
releaseTensors(inputs, outputs);
return result.toArray(new String[result.size()]);
}
@SafeVarargs
private final void releaseTensors(Map<String, ? extends Tensor>... tensorMaps) {
for (Map<String, ? extends Tensor> tensorMap: tensorMaps) {
for (Tensor tensor: tensorMap.values()) {
tensor.release();
}
}
}
}
/*
* Copyright (c) 2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.tasks;
import android.graphics.Bitmap;
import android.os.SystemClock;
import android.util.Pair;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.TF8UserBufferTensor;
import com.qualcomm.qti.snpe.TensorAttributes;
import com.qualcomm.qti.snpe.UserBufferTensor;
import com.qualcomm.qti.snpe.imageclassifiers.Model;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class ClassifyImageWithUserBufferTf8Task extends AbstractClassifyImageTask {
private static final String LOG_TAG = ClassifyImageWithUserBufferTf8Task.class.getSimpleName();
private static final int TF8_SIZE = 1;
private static final int TF8_BITWIDTH = 8;
private static final int mStepExactly0 = 0;
private static final float mStepSize = 1.0f;
public ClassifyImageWithUserBufferTf8Task(ModelOverviewFragmentController controller,
NeuralNetwork network, Bitmap image, Model model) {
super(controller, network, image, model);
}
@Override
protected String[] doInBackground(Bitmap... params) {
final List<String> result = new LinkedList<>();
final Map<String, TF8UserBufferTensor> inputTensors = new HashMap<>();
final Map<String, TF8UserBufferTensor> outputTensors = new HashMap<>();
final Map<String, ByteBuffer> inputBuffers = new HashMap<>();
final Map<String, ByteBuffer> outputBuffers = new HashMap<>();
boolean status = prepareInputs(inputTensors, inputBuffers);
if (!status) {
return new String[0];
}
prepareOutputs(outputTensors, outputBuffers);
final long javaExecuteStart = SystemClock.elapsedRealtime();
status = mNeuralNetwork.execute(inputTensors, outputTensors);
final long javaExecuteEnd = SystemClock.elapsedRealtime();
if (!status) {
return new String[0];
}
mJavaExecuteTime = javaExecuteEnd - javaExecuteStart;
float[] outputValues = dequantize(outputTensors.get(mOutputLayer), outputBuffers.get(mOutputLayer));
for (Pair<Integer, Float> pair : topK(1, outputValues)) {
result.add(mModel.labels[pair.first]);
result.add(String.valueOf(pair.second));
}
String[] resultString = result.toArray(new String[result.size()]);
releaseTensors(inputTensors, outputTensors);
return resultString;
}
private boolean prepareInputs(final Map<String, TF8UserBufferTensor> inputTensors,
final Map<String, ByteBuffer> inputBuffers) {
TensorAttributes inputAttributes = mNeuralNetwork.getTensorAttributes(mInputLayer);
Tf8Params inputParams = resolveTf8Params(inputAttributes);
inputBuffers.put(mInputLayer, ByteBuffer.allocateDirect(inputParams.size).order(ByteOrder.nativeOrder()));
loadMeanImageIfAvailable(mModel.meanImage, inputParams.size);
final int[] dimensions = inputAttributes.getDims();
final boolean isGrayScale = (dimensions[dimensions.length -1] == 1);
float[] imageBitmapAsFloat;
if (!isGrayScale) {
imageBitmapAsFloat = loadRgbBitmapAsFloat(mImage);
} else {
imageBitmapAsFloat = loadGrayScaleBitmapAsFloat(mImage);
}
quantize(imageBitmapAsFloat, inputBuffers.get(mInputLayer), inputParams);
inputTensors.put(mInputLayer, mNeuralNetwork.createTF8UserBufferTensor(
inputParams.size, inputParams.strides,
inputParams.stepExactly0, inputParams.stepSize,
inputBuffers.get(mInputLayer)));
return true;
}
private void prepareOutputs(final Map<String, TF8UserBufferTensor> outputTensors,
final Map<String, ByteBuffer> outputBuffers) {
TensorAttributes outputAttributes = mNeuralNetwork.getTensorAttributes(mOutputLayer);
Tf8Params outputParams = resolveTf8Params(outputAttributes);
outputParams.stepExactly0 = mStepExactly0;
outputParams.stepSize = mStepSize;
outputBuffers.put(mOutputLayer, ByteBuffer.allocateDirect(outputParams.size).order(ByteOrder.nativeOrder()));
outputTensors.put(mOutputLayer, mNeuralNetwork.createTF8UserBufferTensor(
outputParams.size, outputParams.strides,
outputParams.stepExactly0, outputParams.stepSize,
outputBuffers.get(mOutputLayer)));
}
@SafeVarargs
private final void releaseTensors(Map<String, ? extends UserBufferTensor>... tensorMaps) {
for (Map<String, ? extends UserBufferTensor> tensorMap: tensorMaps) {
for (UserBufferTensor tensor: tensorMap.values()) {
tensor.release();
}
}
}
private void quantize(float[] src, ByteBuffer dst, Tf8Params tf8Params) {
Tf8Encoding encoding = getTf8Encoding(src);
byte[] quantized = new byte[src.length];
for (int i = 0; i < src.length; i++) {
float data = Math.max(Math.min(src[i], encoding.max), encoding.min);
data = data / encoding.delta - encoding.offset;
quantized[i] = (byte) Math.round(data);
}
dst.put(quantized);
tf8Params.stepSize = encoding.delta;
tf8Params.stepExactly0 = Math.round(-encoding.min / encoding.delta);
}
private Tf8Encoding getTf8Encoding(float[] array) {
Tf8Encoding encoding = new Tf8Encoding();
int num_steps = (int) Math.pow(2, TF8_BITWIDTH) - 1;
float new_min = Math.min(getMin(array), 0);
float new_max = Math.max(getMax(array), 0);
float min_range = 0.1f;
new_max = Math.max(new_max, new_min + min_range);
encoding.delta = (new_max - new_min) / num_steps;
if (new_min < 0 && new_max > 0) {
float quantized_zero = Math.round(-new_min / encoding.delta);
quantized_zero = (float) Math.min(num_steps, Math.max(0.0, quantized_zero));
encoding.offset = -quantized_zero;
} else {
encoding.offset = Math.round(new_min / encoding.delta);
}
encoding.min = encoding.delta * encoding.offset;
encoding.max = encoding.delta * num_steps + encoding.min;
return encoding;
}
private float[] dequantize(TF8UserBufferTensor tensor, ByteBuffer buffer) {
final int outputSize = buffer.capacity();
final byte[] quantizedArray = new byte[outputSize];
buffer.get(quantizedArray);
final float[] dequantizedArray = new float[outputSize];
for (int i = 0; i < outputSize; i++) {
int quantizedValue = (int)quantizedArray[i] & 0xFF;
dequantizedArray[i] = tensor.getMin() + quantizedValue * tensor.getQuantizedStepSize();
}
return dequantizedArray;
}
private Tf8Params resolveTf8Params(TensorAttributes attribute) {
int rank = attribute.getDims().length;
int[] strides = new int[rank];
strides[rank - 1] = TF8_SIZE;
for (int i = rank - 1; i > 0; i--) {
strides[i-1] = strides[i] * attribute.getDims()[i];
}
int bufferSize = TF8_SIZE;
for (int dim: attribute.getDims()) {
bufferSize *= dim;
}
return new Tf8Params(bufferSize, strides);
}
private class Tf8Params {
int size;
int[] strides;
int stepExactly0;
float stepSize;
Tf8Params(int size, int[] strides) {
this.size = size;
this.strides = strides;
}
}
private class Tf8Encoding {
float min;
float max;
float delta;
float offset;
}
}
/*
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.tasks;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.os.AsyncTask;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController;
import java.io.File;
public class LoadImageTask extends AsyncTask<File, Void, Bitmap> {
private final ModelOverviewFragmentController mController;
private final File mImageFile;
public LoadImageTask(ModelOverviewFragmentController controller, final File imageFile) {
mController = controller;
mImageFile = imageFile;
}
@Override
protected Bitmap doInBackground(File... params) {
return BitmapFactory.decodeFile(mImageFile.getAbsolutePath());
}
@Override
protected void onPostExecute(Bitmap bitmap) {
super.onPostExecute(bitmap);
mController.onBitmapLoaded(mImageFile, bitmap);
}
}
/*
* Copyright (c) 2016-2021 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.tasks;
import android.content.Context;
import android.os.AsyncTask;
import android.util.Log;
import com.qualcomm.qti.snpe.imageclassifiers.Model;
import com.qualcomm.qti.snpe.imageclassifiers.ModelCatalogueFragmentController;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.LinkedHashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Set;
public class LoadModelsTask extends AsyncTask<Void, Void, Set<Model>> {
public static final String MODEL_DLC_FILE_NAME = "model.dlc";
public static final String MODEL_MEAN_IMAGE_FILE_NAME = "mean_image.bin";
public static final String LABELS_FILE_NAME = "labels.txt";
public static final String IMAGES_FOLDER_NAME = "images";
public static final String RAW_EXT = ".raw";
public static final String UDO_PACKAGE = "UdoPackageReg.so";
public static final String UDO_PACKAGE_DIR = "/udo/arm64-v8a";
public static final String JPG_EXT = ".jpg";
private static final String LOG_TAG = LoadModelsTask.class.getSimpleName();
private final ModelCatalogueFragmentController mController;
private final Context mContext;
public LoadModelsTask(Context context, ModelCatalogueFragmentController controller) {
mContext = context.getApplicationContext();
mController = controller;
}
@Override
protected Set<Model> doInBackground(Void... params) {
final Set<Model> result = new LinkedHashSet<>();
final File modelsRoot = mContext.getExternalFilesDir("models");
if (modelsRoot != null) {
result.addAll(createModels(modelsRoot));
}
return result;
}
@Override
protected void onPostExecute(Set<Model> models) {
mController.onModelsLoaded(models);
}
private Set<Model> createModels(File modelsRoot) {
final Set<Model> models = new LinkedHashSet<>();
final Set<String> availableModels = mController.getAvailableModels();
for (File child : modelsRoot.listFiles()) {
if (!child.isDirectory() || !availableModels.contains(child.getName())) {
continue;
}
try {
models.add(createModel(child));
} catch (IOException e) {
Log.e(LOG_TAG, "Failed to load model from model directory.", e);
}
}
return models;
}
private Model createModel(File modelDir) throws IOException {
final Model model = new Model();
model.name = modelDir.getName();
model.file = new File(modelDir, MODEL_DLC_FILE_NAME);
model.meanImage = new File(modelDir, MODEL_MEAN_IMAGE_FILE_NAME);
final File images = new File(modelDir, IMAGES_FOLDER_NAME);
// Paths to UDO libs
String udoDir = mContext.getFilesDir().getPath() + "/" + model.name + "/udo/";
File udoDirFile = new File(udoDir);
model.udoConfig = new File(udoDirFile, UDO_PACKAGE);
String udoDirPath = modelDir.getAbsolutePath() + UDO_PACKAGE_DIR;
model.udoArmDir = new File(udoDirPath);
if (images.isDirectory()) {
model.rawImages = images.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
return file.getName().endsWith(RAW_EXT);
}
});
model.jpgImages = images.listFiles(new FileFilter() {
@Override
public boolean accept(File file) {
return file.getName().endsWith(JPG_EXT);
}
});
}
model.labels = loadLabels(new File(modelDir, LABELS_FILE_NAME));
return model;
}
private String[] loadLabels(File labelsFile) throws IOException {
final List<String> list = new LinkedList<>();
final BufferedReader inputStream = new BufferedReader(
new InputStreamReader(new FileInputStream(labelsFile)));
String line;
while ((line = inputStream.readLine()) != null) {
list.add(line);
}
return list.toArray(new String[list.size()]);
}
}
/*
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
package com.qualcomm.qti.snpe.imageclassifiers.tasks;
import android.app.Application;
import android.os.AsyncTask;
import android.os.SystemClock;
import android.util.Log;
import com.qualcomm.qti.snpe.NeuralNetwork;
import com.qualcomm.qti.snpe.SNPE;
import com.qualcomm.qti.snpe.imageclassifiers.Model;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController;
import com.qualcomm.qti.snpe.imageclassifiers.ModelOverviewFragmentController.SupportedTensorFormat;
import java.io.File;
import java.io.IOException;
public class LoadNetworkTask extends AsyncTask<File, Void, NeuralNetwork> {
private static final String LOG_TAG = LoadNetworkTask.class.getSimpleName();
private final ModelOverviewFragmentController mController;
private final Model mModel;
private final Application mApplication;
private final NeuralNetwork.Runtime mTargetRuntime;
private final SupportedTensorFormat mTensorFormat;
private boolean mUnsignedPD;
private long mLoadTime = -1;
public LoadNetworkTask(final Application application,
final ModelOverviewFragmentController controller,
final Model model,
final NeuralNetwork.Runtime targetRuntime,
final SupportedTensorFormat tensorFormat,
boolean unsignedPD) {
mApplication = application;
mController = controller;
mModel = model;
mTargetRuntime = targetRuntime;
mTensorFormat = tensorFormat;
mUnsignedPD = unsignedPD;
}
@Override
protected NeuralNetwork doInBackground(File... params) {
NeuralNetwork network = null;
try {
if(mModel.udoConfig.exists()){
for (final File file : mModel.udoArmDir.listFiles()) {
String udoDir = mModel.udoConfig.getParent();
String udoArmLibs = file.getName();
String udoLibName = udoDir + "/" + udoArmLibs;
System.load(udoLibName);
}
SNPE.addOpPackage(mApplication, mModel.udoConfig.getAbsolutePath());
}
final SNPE.NeuralNetworkBuilder builder = new SNPE.NeuralNetworkBuilder(mApplication)
.setDebugEnabled(false)
.setRuntimeOrder(mTargetRuntime)
.setModel(mModel.file)
.setCpuFallbackEnabled(true)
.setUseUserSuppliedBuffers(mTensorFormat != SupportedTensorFormat.FLOAT)
.setUnsignedPD(mUnsignedPD);
if (mUnsignedPD){
builder.setRuntimeCheckOption(NeuralNetwork.RuntimeCheckOption.UNSIGNEDPD_CHECK);
}
final long start = SystemClock.elapsedRealtime();
network = builder.build();
final long end = SystemClock.elapsedRealtime();
mLoadTime = end - start;
} catch (IllegalStateException | IOException e) {
Log.e(LOG_TAG, e.getMessage(), e);
}
return network;
}
@Override
protected void onPostExecute(NeuralNetwork neuralNetwork) {
super.onPostExecute(neuralNetwork);
if (neuralNetwork != null) {
if (!isCancelled()) {
mController.onNetworkLoaded(neuralNetwork, mLoadTime);
} else {
neuralNetwork.release();
}
} else {
if (!isCancelled()) {
mController.onNetworkLoadFailed();
}
}
}
}
package com.qualcomm.qti.snpe.imageclassifiers.thread;
import android.graphics.Bitmap;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Bbox;
import java.util.List;
public class AIResult {
private int frameId;
private List<Bbox> listBox;
private Bitmap originalBitmap;
public AIResult(int frameId, List<Bbox> listBox, Bitmap originalBitmap) {
this.frameId = frameId;
this.listBox = listBox;
this.originalBitmap = originalBitmap;
}
public List<Bbox> getListBox() {
return listBox;
}
public void setListBox(List<Bbox> listBox) {
this.listBox = listBox;
}
public int getFrameId() {
return frameId;
}
public void setFrameId(int frameId) {
this.frameId = frameId;
}
public Bitmap getOriginalBitmap() {
return originalBitmap;
}
public void setOriginalBitmap(Bitmap originalBitmap) {
this.originalBitmap = originalBitmap;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.thread;
import static com.qualcomm.qti.snpe.imageclassifiers.thread.AiSlicingThread.TOTAL_SLICING_PER_IMAGE;
import static org.bytedeco.javacpp.Loader.getCacheDir;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.Log;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Bbox;
import com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.LinkedBlockingDeque;
public class AISliceMergingThread extends Thread{
/** class's constance*/
private static final String LOGTAG = AISliceMergingThread.class.getSimpleName();
private static final int MAX_QUEUE_SIZE = 20;
private static final String AI_SLING_THREAD = "AI-merge thread";
/** class main attribute */
private ConcurrentMap<Integer, CopyOnWriteArrayList<AIResult>> queueMergingResults;
private LinkedBlockingDeque<AIResult> queueResults;
private Context mContext;
public AISliceMergingThread(Context mContext) {
this.queueResults = new LinkedBlockingDeque<>(MAX_QUEUE_SIZE);
this.queueMergingResults = new ConcurrentHashMap<>();
this.mContext = mContext;
}
@Override
public void run() {
while (true) {
try {
Thread.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (queueResults.size() > 0) {
try {
AIResult aiResult = queueResults.takeFirst();
int frameId = aiResult.getFrameId();
if (!queueMergingResults.containsKey(aiResult.getFrameId())) {
queueMergingResults.put(frameId, new CopyOnWriteArrayList<AIResult>());
}
queueMergingResults.get(frameId).add(aiResult);
Log.d(LOGTAG, "map size = " + queueMergingResults);
if (queueMergingResults.get(frameId).size() == TOTAL_SLICING_PER_IMAGE) {
CopyOnWriteArrayList<AIResult> combinedResults = queueMergingResults.get(frameId);
CopyOnWriteArrayList<Bbox> mergedBBoxes = new CopyOnWriteArrayList<Bbox>();
queueMergingResults.remove(frameId);
/** DO AI-MERGING-RESULTS WITH combinedResults */
// Bitmap originalImage = aiResult.getOriginalBitmap();
// final Bitmap bmpcopy = originalImage.copy(Bitmap.Config.ARGB_8888, true);
// Canvas canvas = new Canvas(bmpcopy);
//
// Paint paint = new Paint();
// //paint.setAlpha(0xA0); // the transparency
// paint.setColor(Color.RED); // color is red
// paint.setStyle(Paint.Style.STROKE); // stroke or fill or ...
// paint.setStrokeWidth(1); // the stroke width
// for (AIResult aiResult_ : combinedResults) {
// for (Bbox box0 : aiResult_.getListBox()) {
// mergedBBoxes.add(box0);
//// Log.d(LOGTAG + "singleBoxInfo", "co-ord= " + box0.x1 + " " + box0.x2 +" " + box0.y1 + " " + box0.y2 + " | conf = " + box0.conf);
// Rect r = new Rect((int) box0.x1, (int) box0.y1, (int) box0.x2, (int) box0.y2);
//
// canvas.drawRect(r, paint);
// }
// }
//
// Log.d(LOGTAG + "combine", "saving combined image");
// String filename = System.currentTimeMillis() + "_" + frameId + "_combined";
// savebitmap(bmpcopy, filename);
// Bitmap originalImage_ = aiResult.getOriginalBitmap();
// final Bitmap bmpcopy_ = originalImage_.copy(Bitmap.Config.ARGB_8888, true);
// Canvas canvas_ = new Canvas(bmpcopy_);
//
// Paint paint_ = new Paint();
// //paint.setAlpha(0xA0); // the transparency
// paint_.setColor(Color.RED); // color is red
// paint_.setStyle(Paint.Style.STROKE); // stroke or fill or ...
// paint_.setStrokeWidth(1); // the stroke width
// for (Bbox mBox : mergedBBoxes) {
//
//// Log.d(LOGTAG + "singleBoxInfo", "co-ord= " + box0.x1 + " " + box0.x2 +" " + box0.y1 + " " + box0.y2 + " | conf = " + box0.conf);
// Rect r = new Rect((int) mBox.x1, (int) mBox.y1, (int) mBox.x2, (int) mBox.y2);
// canvas_.drawRect(r, paint_);
// }
//
// Log.d(LOGTAG + "merging", "saving merged image");
// String filename_ = System.currentTimeMillis() + "_" + frameId + "_merged";
// savebitmap(bmpcopy_, filename_);
UnionMergeProcess UnionMerge = new UnionMergeProcess((float)0.7,"IOS",true,mergedBBoxes);
CopyOnWriteArrayList<Bbox> UnionMergedBBoxes = UnionMerge.UnionMerging();
Bitmap originalImageMerge = aiResult.getOriginalBitmap();
final Bitmap bmpcopyMerge = originalImageMerge.copy(Bitmap.Config.ARGB_8888, true);
Canvas canvasMerge = new Canvas(bmpcopyMerge);
Paint paintMerge = new Paint();
//paint.setAlpha(0xA0); // the transparency
paintMerge.setColor(Color.RED); // color is red
paintMerge.setStyle(Paint.Style.STROKE); // stroke or fill or ...
paintMerge.setStrokeWidth(1); // the stroke width
for (Bbox mBox : UnionMergedBBoxes) {
// Log.d(LOGTAG + "singleBoxInfo", "co-ord= " + box0.x1 + " " + box0.x2 +" " + box0.y1 + " " + box0.y2 + " | conf = " + box0.conf);
Rect r = new Rect((int) mBox.x1, (int) mBox.y1, (int) mBox.x2, (int) mBox.y2);
canvasMerge.drawRect(r, paintMerge);
}
Log.d(LOGTAG + "Union merging", "saving Union merged image");
String filenameMerge = System.currentTimeMillis() + "_" + frameId + "_Unionmerged";
savebitmap(bmpcopyMerge, filenameMerge);
}
} catch (InterruptedException | IOException e) {
e.printStackTrace();
}
}
}
}
public boolean addItem(AIResult aiResult) {
if (queueResults.size() > MAX_QUEUE_SIZE){
try {
queueResults.takeFirst();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
return queueResults.offerLast(aiResult);
}
public File savebitmap(Bitmap bmp, String filename) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 60, bytes);
File f = new File(mContext.getCacheDir()
+ File.separator + filename +".jpg");
Log.d(LOGTAG + "fpath", "file-path= " + (getCacheDir()
+ File.separator + filename +".jpg"));
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
return f;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.thread;
import static com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector.IMG_HEIGHT;
import static com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector.IMG_WIDTH;
import static org.bytedeco.javacpp.Loader.getCacheDir;
import android.app.Application;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.Rect;
import android.util.Log;
import com.qualcomm.qti.snpe.imageclassifiers.MainActivity;
import com.qualcomm.qti.snpe.imageclassifiers.R;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Bbox;
import com.qualcomm.qti.snpe.imageclassifiers.detector.RetinaDetector;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.LinkedBlockingDeque;
public class AiSlicingThread extends Thread{
/** class's constance*/
private static final String LOGTAG = AiSlicingThread.class.getSimpleName();
private static final int MAX_QUEUE_SIZE = 20;
private static final String AI_SLICING_THREAD = "AI slicing thread";
public static final int TOTAL_SLICING_PER_IMAGE = 7;
/** class main attribute */
private LinkedBlockingDeque<ImageSlice> aiSlicesQueue;
private RetinaDetector mDetector;
private AISliceMergingThread aiSliceMergingThread;
private boolean isProcess = true;
private Context mContext;
/** Constructor 1: receive exist detector */
public AiSlicingThread(RetinaDetector mDetector, AISliceMergingThread aiSliceMergingThread) {
this.aiSlicesQueue = new LinkedBlockingDeque<>();
this.aiSliceMergingThread = aiSliceMergingThread;
this.mDetector = mDetector;
}
/** Constructor 2: using default detector */
public AiSlicingThread(Application application, Context context, AISliceMergingThread aiSliceMergingThread) {
this.aiSliceMergingThread = aiSliceMergingThread;
this.aiSlicesQueue = new LinkedBlockingDeque<>();
mContext = context;
this.aiSlicesQueue = new LinkedBlockingDeque<ImageSlice>();
this.mDetector = new RetinaDetector(context, application, R.raw.retina_mb_nosm_h288_w512_quantized);
this.setName(AI_SLICING_THREAD);
}
/** do loop slicing */
@Override
public void run() {
while (isProcess) {
/** need sleep to cpu scheduling*/
try {
Thread.sleep(1);
} catch (InterruptedException e) {
e.printStackTrace();
}
if (aiSlicesQueue.size() > 0){
try {
ImageSlice imageSlice = aiSlicesQueue.takeFirst();
Bitmap bmp = imageSlice.getBmp();
int frameId = imageSlice.getFrameId();
SliceCropBox sliceCropBox = imageSlice.getSliceCropBox();
/** Preprocess input image */
Bitmap croppedBmp1 = Bitmap.createBitmap(bmp,
sliceCropBox.getX(), sliceCropBox.getY(),
sliceCropBox.getWidth(), sliceCropBox.getHeight());
final float scaleX = IMG_WIDTH / (float) (croppedBmp1.getWidth());
final float scaleY = IMG_HEIGHT / (float) (croppedBmp1.getHeight());
final Matrix scalingMatrix = new Matrix();
scalingMatrix.postScale(scaleX, scaleY);
Bitmap resizedcroppedBmp1 = Bitmap.createBitmap(croppedBmp1,
0, 0,
croppedBmp1.getWidth(), croppedBmp1.getHeight(),scalingMatrix,false);
List<Bbox> detectedBoxes = startAIFlowDetect(mDetector, resizedcroppedBmp1, frameId,scaleX,scaleY,sliceCropBox.getX(),sliceCropBox.getY());
Log.d(LOGTAG +"_boxResultInfo", "box count= " + detectedBoxes.size());
// final Bitmap bmpcopy = bmp.copy(Bitmap.Config.ARGB_8888, true);
// Canvas canvas = new Canvas(bmpcopy);
//
// Paint paint = new Paint();
// //paint.setAlpha(0xA0); // the transparency
// paint.setColor(Color.RED); // color is red
// paint.setStyle(Paint.Style.STROKE); // stroke or fill or ...
// paint.setStrokeWidth(1); // the stroke width
// for (Bbox box0 : detectedBoxes){
// Log.d(LOGTAG + "singleBoxInfo", "co-ord= " + box0.x1 + " " + box0.x2 +" " + box0.y1 + " " + box0.y2 + " | conf = " + box0.conf);
// Rect r = new Rect((int)box0.x1, (int)box0.y1, (int)box0.x2, (int)box0.y2);
// canvas.drawRect(r, paint);
//
// }
// String filename = System.currentTimeMillis() + "_"+frameId + "_" + this.getName() ;
aiSliceMergingThread.addItem(new AIResult(frameId, detectedBoxes, bmp));
// Log.d(LOGTAG + "detecting slice", "saving slice image");
// savebitmap(bmpcopy, filename);
} catch (InterruptedException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
}
/** Call to destroy detector to end thread process */
if (mDetector != null) {
mDetector.close();
}
}
public boolean addSlicedImage(ImageSlice imageSlice){
if (aiSlicesQueue.size() >= MAX_QUEUE_SIZE){
try {
aiSlicesQueue.takeFirst();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
return aiSlicesQueue.offerLast(imageSlice);
}
private List<Bbox> startAIFlowDetect(RetinaDetector mDetector, Bitmap bmp, int frameId,float scaleX,float scaleY,int startX,int startY) throws IOException {
List<Bbox> detectedBoxes = new ArrayList<>();
final Bitmap resizedBmpCopy = bmp.copy(Bitmap.Config.ARGB_8888, true);
detectedBoxes = mDetector.detectFrame(bmp);
for (Bbox box0 : detectedBoxes){
box0.x1 = box0.x1 / scaleX +startX;
box0.x2 = box0.x2 /scaleX + startX;
box0.y1 = box0.y1 /scaleY+ startY ;
box0.y2 = box0.y2 /scaleY + startY;
}
// Canvas canvas = new Canvas(resizedBmpCopy);
//
// Paint paint = new Paint();
// //paint.setAlpha(0xA0); // the transparency
// paint.setColor(Color.RED); // color is red
// paint.setStyle(Paint.Style.STROKE); // stroke or fill or ...
// paint.setStrokeWidth(1); // the stroke width
// for (Bbox box0 : detectedBoxes){
// Log.d(LOGTAG + "singleBoxInfo", "co-ord= " + box0.x1 + " " + box0.x2 +" " + box0.y1 + " " + box0.y2 + " | conf = " + box0.conf);
// Rect r = new Rect((int)box0.x1, (int)box0.y1, (int)box0.x2, (int)box0.y2);
//
// canvas.drawRect(r, paint);
// }
//
// String filename = System.currentTimeMillis() + "_"+frameId + "_" + this.getName() ;
// savebitmap(resizedBmpCopy, filename);
return detectedBoxes;
}
public File savebitmap(Bitmap bmp, String filename) throws IOException {
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
bmp.compress(Bitmap.CompressFormat.JPEG, 60, bytes);
File f = new File(mContext.getCacheDir()
+ File.separator + filename +".jpg");
Log.d(LOGTAG + "fpath", "file-path= " + (getCacheDir()
+ File.separator + filename +".jpg"));
f.createNewFile();
FileOutputStream fo = new FileOutputStream(f);
fo.write(bytes.toByteArray());
fo.close();
return f;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.thread;
import android.graphics.Bitmap;
import android.graphics.PointF;
public class ImageSlice {
private int frameId;
private Bitmap bmp;
private SliceCropBox sliceCropBox;
public ImageSlice(int frameId, Bitmap bmp, SliceCropBox sliceCropBox) {
this.frameId = frameId;
this.bmp = bmp;
this.sliceCropBox = sliceCropBox;
}
public int getFrameId() {
return frameId;
}
public void setFrameId(int frameId) {
this.frameId = frameId;
}
public Bitmap getBmp() {
return bmp;
}
public void setBmp(Bitmap bmp) {
this.bmp = bmp;
}
public SliceCropBox getSliceCropBox() {
return sliceCropBox;
}
public void setSliceCropBox(SliceCropBox sliceCropBox) {
this.sliceCropBox = sliceCropBox;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.thread;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Bbox;
public class PostProcessPrediction {
public interface ICalculateMatch {
public float run(Bbox mPred1, Bbox mPred2);
}
float matchThreshold;
String matchMetric;
boolean classAgnostic;
ICalculateMatch calculateMatch;
public float calculateArea(Bbox mBbox){
// return ((mBbox.BBoxParams[2]-mBbox.BBoxParams[0])*(mBbox.BBoxParams[3]-mBbox.BBoxParams[1]));
return ((mBbox.x2 -mBbox.x1)*(mBbox.y2 -mBbox.y1));
}
public float calculateIntersectionArea(Bbox mBbox1, Bbox mBbox2){
float[] leftTop = new float[]{Math.max(mBbox1.x1,mBbox2.x1),Math.max(mBbox1.y1,mBbox2.y1)};
float[] rightBottom = new float[]{Math.min(mBbox1.x2,mBbox2.x2),Math.min(mBbox1.y2,mBbox2.y2)};
float[] widthHeight = new float[]{Math.max(rightBottom[0]-leftTop[0],0),Math.max(rightBottom[1]-leftTop[1],0)};
return widthHeight[0] * widthHeight[1];
}
public float calculateBoxIOS(Bbox mPred1, Bbox mPred2){
float mArea1 = this.calculateArea(mPred1);
float mArea2 = this.calculateArea(mPred2);
float intersect = calculateIntersectionArea( mPred1, mPred2);
float smallerArea = Math.min(mArea1,mArea2);
return intersect/smallerArea;
}
public boolean hasMatch (Bbox mPred1, Bbox mPred2){
boolean thresholdCondition = this.calculateMatch.run(mPred1,mPred2) > this.matchThreshold;
boolean categoryCondition = (mPred1.getLabel().equals(mPred2.getLabel())
|| this.classAgnostic);
return thresholdCondition && categoryCondition;
}
public PostProcessPrediction(float matchThreshold_, String matchMetric_, boolean classAgnostic_){
this.matchThreshold = matchThreshold_;
this.matchMetric = matchMetric_;
this.classAgnostic = classAgnostic_;
if (this.matchMetric == "IOS"){
this.calculateMatch = new ICalculateMatch() {
@Override
public float run(Bbox mPred1, Bbox mPred2) {
return calculateBoxIOS(mPred1,mPred2);
}
};
}
}
}
\ No newline at end of file
package com.qualcomm.qti.snpe.imageclassifiers.thread;
public class SliceCropBox {
int x;
int y;
int width;
int height;
public SliceCropBox(int x, int y, int width, int height) {
this.x = x;
this.y = y;
this.width = width;
this.height = height;
}
public int getX() {
return x;
}
public void setX(int x) {
this.x = x;
}
public int getY() {
return y;
}
public void setY(int y) {
this.y = y;
}
public int getWidth() {
return width;
}
public void setWidth(int width) {
this.width = width;
}
public int getHeight() {
return height;
}
public void setHeight(int height) {
this.height = height;
}
}
package com.qualcomm.qti.snpe.imageclassifiers.thread;
import com.qualcomm.qti.snpe.imageclassifiers.detector.Bbox;
import java.util.Collections;
import java.util.concurrent.CopyOnWriteArrayList;
class UnionMergeProcess extends PostProcessPrediction{
CopyOnWriteArrayList<Bbox> sourceObjectPredictions;
CopyOnWriteArrayList<Bbox> newSourceObjectPredictions;
Bbox selectedObjectPredictions;
CopyOnWriteArrayList<Bbox> selectedObjectPredictionsList = new CopyOnWriteArrayList<Bbox>();
public UnionMergeProcess(float matchThreshold_, String matchMetric_, boolean classAgnostic_, CopyOnWriteArrayList<Bbox> sourceObjectPredictions_) {
super(matchThreshold_, matchMetric_, classAgnostic_);
this.sourceObjectPredictions = sourceObjectPredictions_;
}
public CopyOnWriteArrayList<Bbox> UnionMerging(){
while(this.sourceObjectPredictions.size() > 0){
Collections.sort(sourceObjectPredictions, Collections.reverseOrder());
selectedObjectPredictions = sourceObjectPredictions.get(0);
sourceObjectPredictions.remove(0);
newSourceObjectPredictions = new CopyOnWriteArrayList<Bbox>();
for (int i = 0; i<sourceObjectPredictions.size();i++){
if (this.hasMatch(selectedObjectPredictions,sourceObjectPredictions.get(i))){
selectedObjectPredictions = this.mergeObjectPredictionPair(selectedObjectPredictions,sourceObjectPredictions.get(i));
}
else{
newSourceObjectPredictions.add(sourceObjectPredictions.get(i));
}
}
sourceObjectPredictions = newSourceObjectPredictions;
selectedObjectPredictionsList.add(selectedObjectPredictions);
}
return selectedObjectPredictionsList;
}
public Bbox mergeObjectPredictionPair(Bbox mPred1, Bbox mPred2){
int[] mergedBoundingBox = this.getMergedBBox(mPred1,mPred2);
float mergedScore = this.getMergedScore(mPred1,mPred2);
String mergedCategory = this.getMergedCategory(mPred1,mPred2);
return new Bbox(mergedBoundingBox[0],mergedBoundingBox[1],mergedBoundingBox[2],mergedBoundingBox[3],mergedScore,mergedCategory);
}
public int[] getMergedBBox(Bbox mPred1, Bbox mPred2){
Bbox Bbox1 = mPred1;
Bbox Bbox2 = mPred2;
int[] BboxParams = this.calculateBoxUnion(Bbox1,Bbox2);
return BboxParams;
}
public float getMergedScore(Bbox mPred1, Bbox mPred2){
return Math.max(mPred1.conf,mPred2.conf);
}
public String getMergedCategory (Bbox mPred1, Bbox mPred2) {
if (mPred1.conf > mPred2.conf){
return mPred1.label;
}
else{
return mPred2.label;
}
}
public int[] calculateBoxUnion (Bbox Bbox1, Bbox Bbox2)
{
int[] Bbox = new int[]{(int) Math.min(Bbox1.x1,Bbox2.x1), (int) Math.min(Bbox1.y1,Bbox2.y1),
(int) Math.max(Bbox1.x2,Bbox2.x2), (int) Math.max(Bbox1.y2,Bbox2.y2)
};
return Bbox;
}
}
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle">
<solid android:color="@color/colorPrimaryDark"/>
<corners android:radius="15dp"/>
<padding android:left="30dp" android:right="30dp" android:top="0dp" android:bottom="0dp"/>
</shape>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<shape xmlns:android="http://schemas.android.com/apk/res/android" android:shape="rectangle">
<solid android:color="@color/colorAccent"/>
<padding android:left="10dp" android:right="10dp" android:top="10dp" android:bottom="10dp"/>
</shape>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<RelativeLayout
xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context="com.qualcomm.qti.snpe.imageclassifiers.MainActivity">
<FrameLayout
android:id="@+id/main_content"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</RelativeLayout>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
xmlns:tools="http://schemas.android.com/tools"
android:orientation="vertical"
android:padding="10dp"
android:background="@color/colorTextIcons">
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_marginBottom="10dp">
<LinearLayout
android:id="@+id/model_overview_labels_row"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_marginBottom="5dp"
android:layout_marginTop="5dp">
<TextView
android:id="@+id/model_overview_name_label"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/model_name"
android:textAppearance="@style/itemKey"/>
<TextView
android:id="@+id/model_overview_version_label"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/model_version"
android:textAppearance="@style/itemKey"/>
<TextView
android:id="@+id/model_overview_dimensions_label"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/model_dimensions"
android:textAppearance="@style/itemKey"/>
</LinearLayout>
<LinearLayout
android:id="@+id/model_overview_values_row"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/model_overview_labels_row"
android:layout_marginBottom="5dp">
<TextView
android:id="@+id/model_overview_name_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
tools:text="@string/model_name_placeholder"
android:textAppearance="@style/itemValue"/>
<TextView
android:id="@+id/model_overview_version_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
tools:text="@string/model_version_placeholder"
android:textAppearance="@style/itemValue"/>
<TextView
android:id="@+id/model_overview_dimensions_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
tools:text="@string/model_dimensions_placeholder"
android:textAppearance="@style/itemValue"/>
</LinearLayout>
<LinearLayout
android:id="@+id/model_builder_options_row"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:orientation="horizontal"
android:layout_below="@id/model_overview_values_row"
android:layout_marginBottom="5dp"
android:layout_marginTop="5dp">
<TextView
android:id="@+id/model_builder_runtime_option"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/builder_runtime"
android:textAppearance="@style/itemKey"/>
<TextView
android:id="@+id/model_builder_tensor_option"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/builder_tensor_format"
android:textAppearance="@style/itemKey"/>
<TextView
android:id="@+id/model_builder_output_option"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:text="@string/output_layers"
android:textAppearance="@style/itemKey"/>
</LinearLayout>
<LinearLayout
android:id="@+id/model_builder_values_row"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/model_builder_options_row"
android:layout_marginBottom="5dp">
<Spinner
android:id="@+id/model_builder_runtime_spinner"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:padding="0dp"
android:layout_gravity="start"/>
<Spinner
android:id="@+id/model_builder_tensor_spinner"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"/>
<Spinner
android:id="@+id/model_overview_layers_spinner"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"/>
</LinearLayout>
<Button
android:id="@+id/model_build_button"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_below="@id/model_builder_values_row"
android:layout_centerHorizontal="true"
android:layout_marginTop="10dp"
android:background="@drawable/button"
android:textColor="@color/colorPrimaryText"
android:text="@string/build_network"/>
</RelativeLayout>
<GridView
android:id="@+id/model_image_grid"
android:layout_width="match_parent"
android:layout_height="0dp"
android:layout_weight="1"
android:layout_marginBottom="10dp"
android:padding="5dp"
android:clickable="true"
android:drawSelectorOnTop="true"
android:focusable="true"
android:horizontalSpacing="0dp"
android:numColumns="auto_fit"
android:stretchMode="columnWidth"
android:verticalSpacing="10dp" />
<RelativeLayout
android:layout_width="match_parent"
android:layout_height="wrap_content">
<TextView
android:id="@+id/model_overview_classification_text"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:minHeight="50dp"
android:textAppearance="@android:style/TextAppearance.Medium"
android:background="@drawable/classification_background"
android:textColor="@color/colorPrimaryText"
android:text="@string/classification_hint"
android:gravity="center"/>
<LinearLayout
android:id="@+id/model_statistics_labels_row"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/model_overview_classification_text"
android:orientation="horizontal"
android:layout_marginBottom="5dp"
android:layout_marginTop="5dp">
<TextView
android:id="@+id/model_statistics_init_label"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center"
android:text="@string/statistic_init"
android:textAppearance="@style/itemKey"/>
<TextView
android:id="@+id/model_statistics_java_execute_label"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center"
android:text="@string/statistic_java_execute"
android:textAppearance="@style/itemKey"/>
</LinearLayout>
<LinearLayout
android:id="@+id/model_statistics_values_row"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_below="@id/model_statistics_labels_row"
android:layout_marginBottom="5dp">
<TextView
android:id="@+id/model_statistics_init_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center"
android:text="@string/not_available"
tools:text="@string/model_statistic_init_placeholder"
android:textAppearance="@style/itemValue"/>
<TextView
android:id="@+id/model_statistics_java_execute_text"
android:layout_width="0dp"
android:layout_height="wrap_content"
android:layout_weight="1"
android:gravity="center"
android:text="@string/not_available"
tools:text="@string/model_statistic_execute_placeholder"
android:textAppearance="@style/itemValue"/>
</LinearLayout>
</RelativeLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<ImageView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/model_image"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:minHeight="200dp"
android:scaleType="fitCenter">
</ImageView>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
android:background="@color/colorTextIcons">
<TextView
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:gravity="center"
android:layout_margin="15dp"
android:textSize="32sp"
android:textStyle="bold"
android:text="@string/models_header"/>
<ListView
android:id="@+id/models_list"
android:layout_width="match_parent"
android:layout_height="wrap_content"
/>
<TextView
android:background="@color/colorPrimary"
android:id="@+id/models_load_status"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:textAppearance="@android:style/TextAppearance.Large"
android:textColor="@color/colorAccent"
android:padding="5dp"
android:gravity="center"/>
<Switch
android:id="@+id/unsignedPD_switch"
android:text="UNSIGNEDPD : "
android:textAppearance="@android:style/TextAppearance.Large"
android:paddingStart="10dp"
android:paddingEnd="10dp"
android:minHeight="150dp"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
/>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<TextView xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/model_name"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:minHeight="50dp"
android:gravity="center_vertical"
android:paddingStart="10dp"
android:paddingEnd="10dp"
android:textAppearance="@android:style/TextAppearance.Large">
</TextView>
<?xml version="1.0" encoding="utf-8"?>
<!--
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<resources>
<color name="colorPrimary">#03A9F4</color>
<color name="colorPrimaryDark">#0288D1</color>
<color name="colorAccent">#FF9800</color>
<color name="colorPrimaryLight">#B3E5FC</color>
<color name="colorPrimaryText">#212121</color>
<color name="colorSecondaryText">#757575</color>
<color name="colorDivider">#BDBDBD</color>
<color name="colorTextIcons">#FFFFFF</color>
</resources>
<!--
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<resources>
<string name="app_name">SNPE Image Classifiers</string>
<string name="loading_models">Loading models...</string>
<string name="models_header"><u>Models</u></string>
<string name="model_extraction_failed">Failed to extract model.</string>
<string name="model_name">Model</string>
<string name="model_name_placeholder">Model A</string>
<string name="model_dimensions">Dimensions</string>
<string name="model_dimensions_placeholder">[X,Y,Z]</string>
<string name="model_load_failed">Failed to load model</string>
<string name="output_layers">Output Layers</string>
<string name="loading_network">Loading Network...</string>
<string name="model_not_loaded">Model not yet loaded!</string>
<string name="classification_failed">Classification failed.</string>
<string name="model_version">Version</string>
<string name="model_version_placeholder">N/A</string>
<string name="snpe_version">SNPE version: %1$s</string>
<string name="builder_runtime">Runtime</string>
<string name="builder_tensor_format">Tensor</string>
<string name="statistic_init">Net Init</string>
<string name="statistic_java_execute">Java Execute</string>
<string name="statistic_native_execute">Native Execute</string>
<string name="model_statistic_init_placeholder">12345 ms</string>
<string name="model_statistic_execute_placeholder">20 ms</string>
<string name="not_available">N/A</string>
<string name="build_network">Build network!</string>
<string name="classification_hint">Click on an image to classify</string>
</resources>
<!--
* Copyright (c) 2016-2018 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
-->
<resources>
<style name="AppTheme" parent="android:Theme.Material.Light.DarkActionBar">
<item name="android:colorPrimary">@color/colorPrimary</item>
<item name="android:colorPrimaryDark">@color/colorPrimaryDark</item>
<item name="android:colorAccent">@color/colorAccent</item>
</style>
<style name="itemKey">
<item name="android:textColor">@color/colorPrimaryText</item>
<item name="android:textSize">18sp</item>
</style>
<style name="itemValue">
<item name="android:textColor">@color/colorSecondaryText</item>
<item name="android:textSize">14sp</item>
</style>
</resources>
/*
* Copyright (c) 2016, 2017 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
buildscript {
repositories {
jcenter()
google()
}
dependencies {
classpath 'com.android.tools.build:gradle:3.5.0'
}
}
allprojects {
repositories {
jcenter()
google()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}
#
# Copyright (c) 2016, 2017 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
org.gradle.jvmargs=-Xms1024m -Xmx2048m
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-5.6.4-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
#!/usr/bin/env bash
##############################################################################
##
## Gradle start up script for UN*X
##
##############################################################################
# Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
DEFAULT_JVM_OPTS=""
APP_NAME="Gradle"
APP_BASE_NAME=`basename "$0"`
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD="maximum"
warn ( ) {
echo "$*"
}
die ( ) {
echo
echo "$*"
echo
exit 1
}
# OS specific support (must be 'true' or 'false').
cygwin=false
msys=false
darwin=false
case "`uname`" in
CYGWIN* )
cygwin=true
;;
Darwin* )
darwin=true
;;
MINGW* )
msys=true
;;
esac
# Attempt to set APP_HOME
# Resolve links: $0 may be a link
PRG="$0"
# Need this for relative symlinks.
while [ -h "$PRG" ] ; do
ls=`ls -ld "$PRG"`
link=`expr "$ls" : '.*-> \(.*\)$'`
if expr "$link" : '/.*' > /dev/null; then
PRG="$link"
else
PRG=`dirname "$PRG"`"/$link"
fi
done
SAVED="`pwd`"
cd "`dirname \"$PRG\"`/" >/dev/null
APP_HOME="`pwd -P`"
cd "$SAVED" >/dev/null
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
# Determine the Java command to use to start the JVM.
if [ -n "$JAVA_HOME" ] ; then
if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
# IBM's JDK on AIX uses strange locations for the executables
JAVACMD="$JAVA_HOME/jre/sh/java"
else
JAVACMD="$JAVA_HOME/bin/java"
fi
if [ ! -x "$JAVACMD" ] ; then
die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
else
JAVACMD="java"
which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
Please set the JAVA_HOME variable in your environment to match the
location of your Java installation."
fi
# Increase the maximum file descriptors if we can.
if [ "$cygwin" = "false" -a "$darwin" = "false" ] ; then
MAX_FD_LIMIT=`ulimit -H -n`
if [ $? -eq 0 ] ; then
if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
MAX_FD="$MAX_FD_LIMIT"
fi
ulimit -n $MAX_FD
if [ $? -ne 0 ] ; then
warn "Could not set maximum file descriptor limit: $MAX_FD"
fi
else
warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
fi
fi
# For Darwin, add options to specify how the application appears in the dock
if $darwin; then
GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
fi
# For Cygwin, switch paths to Windows format before running java
if $cygwin ; then
APP_HOME=`cygpath --path --mixed "$APP_HOME"`
CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
JAVACMD=`cygpath --unix "$JAVACMD"`
# We build the pattern for arguments to be converted via cygpath
ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
SEP=""
for dir in $ROOTDIRSRAW ; do
ROOTDIRS="$ROOTDIRS$SEP$dir"
SEP="|"
done
OURCYGPATTERN="(^($ROOTDIRS))"
# Add a user-defined pattern to the cygpath arguments
if [ "$GRADLE_CYGPATTERN" != "" ] ; then
OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
fi
# Now convert the arguments - kludge to limit ourselves to /bin/sh
i=0
for arg in "$@" ; do
CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
else
eval `echo args$i`="\"$arg\""
fi
i=$((i+1))
done
case $i in
(0) set -- ;;
(1) set -- "$args0" ;;
(2) set -- "$args0" "$args1" ;;
(3) set -- "$args0" "$args1" "$args2" ;;
(4) set -- "$args0" "$args1" "$args2" "$args3" ;;
(5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
(6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
(7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
(8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
(9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
esac
fi
# Split up the JVM_OPTS And GRADLE_OPTS values into an array, following the shell quoting and substitution rules
function splitJvmOpts() {
JVM_OPTS=("$@")
}
eval splitJvmOpts $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS
JVM_OPTS[${#JVM_OPTS[*]}]="-Dorg.gradle.appname=$APP_BASE_NAME"
exec "$JAVACMD" "${JVM_OPTS[@]}" -classpath "$CLASSPATH" org.gradle.wrapper.GradleWrapperMain "$@"
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windowz variants
if not "%OS%" == "Windows_NT" goto win9xME_args
if "%@eval[2+2]" == "4" goto 4NT_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
goto execute
:4NT_args
@rem Get arguments from the 4NT Shell from JP Software
set CMD_LINE_ARGS=%$
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega
## This file must *NOT* be checked into Version Control Systems,
# as it contains information specific to your local configuration.
#
# Location of the SDK. This is only used by Gradle.
# For customization when using a Version Control System, please read the
# header note.
#Thu Sep 23 09:21:16 ICT 2021
sdk.dir=/home/quannm/Android/Sdk
/*
* Copyright (c) 2016 Qualcomm Technologies, Inc.
* All Rights Reserved.
* Confidential and Proprietary - Qualcomm Technologies, Inc.
*/
include ':app'
#
# Copyright (c) 2016, 2018 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
#############################################################
# Alexnet setup
#############################################################
mkdir -p alexnet
mkdir -p alexnet/images
cd alexnet
cp -R ../../../../models/alexnet/data/cropped/*.jpg images
FLOAT_DLC="../../../../models/alexnet/dlc/bvlc_alexnet.dlc"
QUANTIZED_DLC="../../../../models/alexnet/dlc/bvlc_alexnet_quantized.dlc"
if [ -f ${QUANTIZED_DLC} ]; then
cp -R ${QUANTIZED_DLC} model.dlc
else
cp -R ${FLOAT_DLC} model.dlc
fi
cp -R ../../../../models/alexnet/data/ilsvrc_2012_labels.txt labels.txt
cp -R ../../../../models/alexnet/data/ilsvrc_2012_mean_cropped.bin mean_image.bin
zip -r alexnet.zip ./*
mkdir ../app/src/main/res/raw/
cp alexnet.zip ../app/src/main/res/raw/
cd ..
rm -rf ./alexnet
#
# Copyright (c) 2018, 2019 Qualcomm Technologies, Inc.
# All Rights Reserved.
# Confidential and Proprietary - Qualcomm Technologies, Inc.
#
#############################################################
# Inception V3 setup
#############################################################
mkdir -p inception_v3
mkdir -p inception_v3/images
cd inception_v3
cp -R ../../../../models/inception_v3/data/cropped/*.jpg images
FLOAT_DLC="../../../../models/inception_v3/dlc/inception_v3.dlc"
QUANTIZED_DLC="../../../../models/inception_v3/dlc/inception_v3_quantized.dlc"
UDO_DLC="../../../../models/inception_v3/dlc/inception_v3_udo.dlc"
UDO_QUANTIZED_DLC="../../../../models/inception_v3/dlc/inception_v3_udo_quantized.dlc"
UDO_PACKAGE_PATH="../../../../models/inception_v3/SoftmaxUdoPackage/libs/arm64-v8a/"
UDO_DSP_PACKAGE_PATH="../../../../../models/inception_v3/SoftmaxUdoPackage/libs/dsp_v60/"
UDO_HTP_DSP_PACKAGE_PATH="../../../../../models/inception_v3/udo_dsp/SoftmaxUdoPackage/libs/dsp_v68/"
if [ -f ${UDO_QUANTIZED_DLC} ]; then
cp -R ${UDO_QUANTIZED_DLC} model.dlc
elif [ -f ${UDO_DLC} ]; then
cp -R ${UDO_DLC} model.dlc
else
if [ -f ${QUANTIZED_DLC} ]; then
cp -R ${QUANTIZED_DLC} model.dlc
else
cp -R ${FLOAT_DLC} model.dlc
fi
fi
if [ -d ${UDO_PACKAGE_PATH} ]; then
mkdir udo
cd udo
mkdir arm64-v8a
mkdir dsp
cp -R ../${UDO_PACKAGE_PATH}/* ./arm64-v8a/
mv ./arm64-v8a/libUdoSoftmaxUdoPackageReg.so ./arm64-v8a/UdoPackageReg.so
if [ -d ${UDO_HTP_DSP_PACKAGE_PATH} ]; then
cp -R ${UDO_HTP_DSP_PACKAGE_PATH}/* ./dsp/
elif [ -d ${UDO_DSP_PACKAGE_PATH} ]; then
cp -R ${UDO_DSP_PACKAGE_PATH}/* ./dsp/
fi
rm -rf ./arm64-v8a/libc++_shared.so
rm -rf ./arm64-v8a/libOpenCL.so
cd ../
fi
cp -R ../../../../models/inception_v3/data/imagenet_slim_labels.txt labels.txt
zip -r inception_v3.zip ./*
mkdir -p ../app/src/main/res/raw/
cp inception_v3.zip ../app/src/main/res/raw/
cd ..
rm -rf ./inception_v3
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment