diff --git a/.gitignore b/.gitignore index 37a513f..fa33526 100644 --- a/.gitignore +++ b/.gitignore @@ -113,3 +113,4 @@ dmypy.json # Hub exports **/*.mlmodel **/*.tflite +apps/android/SuperResolution/src/main/res/values/models.xml diff --git a/apps/android/SuperResolution/build.gradle b/apps/android/SuperResolution/build.gradle index 8fb765e..826271e 100644 --- a/apps/android/SuperResolution/build.gradle +++ b/apps/android/SuperResolution/build.gradle @@ -28,9 +28,7 @@ android { } preBuild.doFirst { - if (!file("./src/main/assets/" + project.properties['superresolution_tfLiteModelAsset']).exists()) { - throw new RuntimeException(missingModelErrorMsg) - } + generateModelList() for (int i = 1; i <= 2; ++i) { String filename = "./src/main/assets/images/Sample${i}.jpg" @@ -61,3 +59,34 @@ dependencies { if (System.getProperty("user.dir") != project.rootDir.path) { throw new RuntimeException("This project should be opened from the `android` directory (parent of SuperResolution directory), NOT the SuperResolution directory.") } + + +def generateModelList() { + def assetsDir = file("${projectDir}/src/main/assets") + def outputDir = file("${projectDir}/src/main/res/values") + def outputFile = file("${outputDir}/models.xml") + if (!outputDir.exists()) { + throw new GradleException("res directory not exist: ${outputDir}") + } + if (!assetsDir.exists()) { + throw new GradleException("assets directory not exist: ${assetsDir}") + } + + def files = [] + if (assetsDir.exists()) { + files = assetsDir.listFiles().findAll { it.name.endsWith('.tflite') || it.name.endsWith('.bin') }.collect { it.name } + } + + def xmlContent = """ + + +""" + files.each { fileName -> + xmlContent += " ${fileName}\n" + } + xmlContent += """ + +""" + outputFile.text = xmlContent + +} diff --git a/apps/android/SuperResolution/src/main/java/com/quicinc/superresolution/MainActivity.java b/apps/android/SuperResolution/src/main/java/com/quicinc/superresolution/MainActivity.java index d2ae0fa..9d4f78b 100644 --- a/apps/android/SuperResolution/src/main/java/com/quicinc/superresolution/MainActivity.java +++ b/apps/android/SuperResolution/src/main/java/com/quicinc/superresolution/MainActivity.java @@ -15,6 +15,8 @@ import android.os.Handler; import android.os.Looper; import android.provider.MediaStore; +import android.text.TextUtils; +import android.util.Log; import android.view.View; import android.widget.AdapterView; import android.widget.ArrayAdapter; @@ -51,7 +53,7 @@ public class MainActivity extends AppCompatActivity { ImageView selectedImageView; TextView inferenceTimeView; TextView predictionTimeView; - Spinner imageSelector; + Spinner imageSelector, modelSelector; Button predictionButton; ActivityResultLauncher selectImageResultLauncher; private final String fromGalleryImageSelectorOption = "From Gallery"; @@ -62,6 +64,8 @@ public class MainActivity extends AppCompatActivity { "Sample2.jpg", fromGalleryImageSelectorOption}; + private String[] modelSelectorOptions; + // Inference Elements Bitmap selectedImage = null; // Raw image, not resized private SuperResolution defaultDelegateUpscaler; @@ -91,6 +95,7 @@ protected void onCreate(Bundle savedInstanceState) { allDelegatesButton = (RadioButton)findViewById(R.id.defaultDelegateRadio); imageSelector = (Spinner) findViewById((R.id.imageSelector)); + modelSelector = (Spinner) findViewById((R.id.modelSelector)); inferenceTimeView = (TextView)findViewById(R.id.inferenceTimeResultText); predictionTimeView = (TextView)findViewById(R.id.predictionTimeResultText); predictionButton = (Button)findViewById(R.id.runModelButton); @@ -122,6 +127,26 @@ public void onItemSelected(AdapterView parent, View view, int position, long public void onNothingSelected(AdapterView parent) { } }); + // Setup Model Selector Dropdown + modelSelectorOptions = getResources().getStringArray(R.array.model_files); + ArrayAdapter modelAdapter = new ArrayAdapter(this, android.R.layout.simple_spinner_item, modelSelectorOptions); + modelAdapter.setDropDownViewResource(android.R.layout.simple_spinner_dropdown_item); + modelSelector.setAdapter(modelAdapter); + modelSelector.setOnItemSelectedListener(new AdapterView.OnItemSelectedListener() { + @Override + public void onItemSelected(AdapterView parent, View view, int position, long id) { + // Load selected models from assets + ((TextView) view).setTextColor(getResources().getColor(R.color.white)); + ((TextView) view).setEllipsize(TextUtils.TruncateAt.END); + + // Exit the UI thread and instantiate the model in the background. + String modelName = parent.getItemAtPosition(position).toString(); + createTFLiteUpscalerAsync(modelName); + } + + @Override + public void onNothingSelected(AdapterView parent) { } + }); // Setup Image Selection from Phone Gallery selectImageResultLauncher = registerForActivityResult( new ActivityResultContracts.StartActivityForResult(), @@ -155,9 +180,6 @@ public void onNothingSelected(AdapterView parent) { } // Setup button callback predictionButton.setOnClickListener((view) -> updatePredictionDataAsync()); - // Exit the UI thread and instantiate the model in the background. - createTFLiteUpscalerAsync(); - // Enable image selection enableImageSelector(); enableDelegateSelectionButtons(); @@ -176,12 +198,15 @@ void setInferenceUIEnabled(boolean enabled) { predictionButton.setAlpha(0.5f); imageSelector.setEnabled(false); imageSelector.setAlpha(0.5f); + modelSelector.setEnabled(false); + modelSelector.setAlpha(0.5f); cpuOnlyButton.setEnabled(false); allDelegatesButton.setEnabled(false); } else if (cpuOnlyUpscaler != null && defaultDelegateUpscaler != null && selectedImage != null) { predictionButton.setEnabled(true); predictionButton.setAlpha(1.0f); enableImageSelector(); + enableModelSelector(); enableDelegateSelectionButtons(); } } @@ -193,6 +218,13 @@ void enableImageSelector() { imageSelector.setEnabled(true); imageSelector.setAlpha(1.0f); } + /** + * Enable the model selector UI spinner. + */ + void enableModelSelector() { + modelSelector.setEnabled(true); + modelSelector.setAlpha(1.0f); + } /** * Enable the image selector UI radio buttons. @@ -327,9 +359,11 @@ void updatePredictionDataAsync() { * Loading the TF Lite model takes time, so this is done asynchronously to the main UI thread. * Disables the inference UI during load and reenables it afterwards. */ - void createTFLiteUpscalerAsync() { + void createTFLiteUpscalerAsync(final String tfLiteModelAsset) { if (defaultDelegateUpscaler != null || cpuOnlyUpscaler != null) { - throw new RuntimeException("Classifiers were already created"); + defaultDelegateUpscaler.close(); + cpuOnlyUpscaler.close(); +// throw new RuntimeException("Classifiers were already created"); } setInferenceUIEnabled(false); @@ -337,7 +371,6 @@ void createTFLiteUpscalerAsync() { backgroundTaskExecutor.execute(() -> { // Create two upscalers. // One uses the default set of delegates (can access NPU, GPU, CPU), and the other uses only XNNPack (CPU). - String tfLiteModelAsset = this.getResources().getString(R.string.tfLiteModelAsset); try { defaultDelegateUpscaler = new SuperResolution( this, @@ -352,6 +385,7 @@ void createTFLiteUpscalerAsync() { } catch (IOException | NoSuchAlgorithmException e) { throw new RuntimeException(e.getMessage()); } + Log.i("createTFLiteUpscalerAsync","model load finish: "+tfLiteModelAsset); mainLooperHandler.post(() -> setInferenceUIEnabled(true)); }); diff --git a/apps/android/SuperResolution/src/main/res/layout/main_activity.xml b/apps/android/SuperResolution/src/main/res/layout/main_activity.xml index f17b2cf..afff999 100644 --- a/apps/android/SuperResolution/src/main/res/layout/main_activity.xml +++ b/apps/android/SuperResolution/src/main/res/layout/main_activity.xml @@ -29,6 +29,39 @@ + + + + + + + +