Commit eae278ba authored by 姜天宇's avatar 姜天宇

feat:增加GPU推理

parent 309f64be
......@@ -3,8 +3,8 @@ plugins {
id 'org.jetbrains.kotlin.android'
}
def APP_VERSION_CODE = 1000202
def APP_VERSION_NAME = "1.0.2.2"
def APP_VERSION_CODE = 1000203
def APP_VERSION_NAME = "1.0.2.3"
android {
namespace 'com.wmdigit.cateringdetect'
......
......@@ -4,6 +4,7 @@ import android.app.ProgressDialog;
import android.content.Context;
import android.os.Bundle;
import android.view.View;
import android.view.WindowManager;
import android.view.inputmethod.InputMethodManager;
import androidx.activity.OnBackPressedCallback;
......@@ -60,6 +61,11 @@ public abstract class BaseFragment extends Fragment {
private void initProgressDialog() {
mProgressDialog = new ProgressDialog(requireContext());
mProgressDialog.setCanceledOnTouchOutside(false);
mProgressDialog.setCancelable(false);
mProgressDialog.getWindow().setFlags(
WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN
);
}
/**
......
......@@ -17,5 +17,13 @@ public enum MessageType {
/**
* 通过商品编码删除所有向量
*/
DEL_ALL_FEATURES_BY_PRODUCT_CODE
DEL_ALL_FEATURES_BY_PRODUCT_CODE,
/**
* 开始加载GPU
*/
GPU_LOAD_START,
/**
* GPU初始化完成
*/
GPU_LOAD_END
}
package com.wmdigit.common.model.message;
import com.wmdigit.common.base.model.BaseMessage;
import com.wmdigit.common.enums.MessageType;
public class GpuLoadEndMessage extends BaseMessage<String> {
public GpuLoadEndMessage(String payload) {
super(MessageType.GPU_LOAD_END, payload);
}
}
package com.wmdigit.common.model.message;
import com.wmdigit.common.base.model.BaseMessage;
import com.wmdigit.common.enums.MessageType;
public class GpuLoadStartMessage extends BaseMessage<String> {
public GpuLoadStartMessage(String payload) {
super(MessageType.GPU_LOAD_START, payload);
}
}
......@@ -2,6 +2,7 @@ package com.wmdigit.common.view.spinner;
import android.content.Context;
import android.util.AttributeSet;
import android.util.Log;
import android.view.LayoutInflater;
import android.view.MotionEvent;
import android.view.View;
......@@ -33,7 +34,7 @@ public class MySpinner extends ConstraintLayout {
private Spinner spinner;
private static InverseBindingListener inverseBindingListener;
private InverseBindingListener inverseBindingListener;
public MySpinner(@NonNull Context context) {
super(context);
......@@ -72,6 +73,7 @@ public class MySpinner extends ConstraintLayout {
@Override
public void onItemSelected(AdapterView<?> parent, View view, int position, long id) {
if (inverseBindingListener != null && touchSpinnerTime > 0){
// Log.d("Spinner", "onItemSelected1触发" + position);
inverseBindingListener.onChange();
}
}
......@@ -133,7 +135,7 @@ public class MySpinner extends ConstraintLayout {
@BindingAdapter(value = "selectedIndexAttrChanged")
public static void setSelectedIndexChangedListener(MySpinner view, InverseBindingListener listener){
if (listener != null){
inverseBindingListener = listener;
view.inverseBindingListener = listener;
}
}
......
......@@ -94,4 +94,7 @@
<string name="delete_success">删除成功</string>
<string name="delete_failed_empty_product_code">删除失败,商品编码为空</string>
<string name="loading_gpu">GPU初始化中</string>
<string name="loading_gpu_content">首次初始化会花费5左右分钟时间,请耐心等待</string>
</resources>
\ No newline at end of file
......@@ -7,9 +7,28 @@ void* handle;
*/
extern "C"
JNIEXPORT jint JNICALL
Java_com_wmdigit_core_catering_dish_DishDetection_init(JNIEnv *env, jobject thiz) {
LOGD("初始化DET");
return DETFEA_Init(nullptr, nullptr, DETFEA_CPU, &handle);
Java_com_wmdigit_core_catering_dish_DishDetection_init(JNIEnv *env, jobject thiz, jboolean useGpu, jstring cachePath1, jstring cachePath2) {
int ret = -1;
LOGD("初始化det");
DETFAE_INIT_INFO initInfo;
const char *jpath1 = env->GetStringUTFChars(cachePath1, nullptr);
const char *jpath2 = env->GetStringUTFChars(cachePath2, nullptr);
if (useGpu){
initInfo.device_type = DETFEA_OPENCL;
}
else{
initInfo.device_type = DETFEA_CPU;
}
initInfo.precision_level = 2;
initInfo.cache_dir1 = jpath1;
initInfo.cache_dir2 = jpath2;
// LOGD("InitDetFea cache path: %s", jpath);
ret = DETFAE_Init(initInfo, &handle);
LOGD("InitDet: %x", ret);
env->ReleaseStringUTFChars(cachePath1, jpath1);
env->ReleaseStringUTFChars(cachePath2, jpath2);
return ret;
}
/**
......
......@@ -4,9 +4,26 @@ void* handle;
extern "C"
JNIEXPORT jint JNICALL
Java_com_wmdigit_core_catering_plate_PlateDetection_init(JNIEnv *env, jobject thiz) {
Java_com_wmdigit_core_catering_plate_PlateDetection_init(JNIEnv *env, jobject thiz, jboolean useGpu, jstring cachePath1, jstring cachePath2) {
int ret = -1;
LOGD("初始化餐盘识别模型");
return DETFEA_COLOR_Init(nullptr, nullptr, DETFEA_COLOR_CPU, &handle);
DETFEA_COLOR_INIT_INFO initInfo;
const char *jpath1 = env->GetStringUTFChars(cachePath1, nullptr);
const char *jpath2 = env->GetStringUTFChars(cachePath2, nullptr);
if (useGpu){
initInfo.device_type = DETFEA_COLOR_OPENCL;
}
else{
initInfo.device_type = DETFEA_COLOR_CPU;
}
initInfo.precision_level = 2;
initInfo.cache_dir1 = jpath1;
initInfo.cache_dir2 = jpath2;
ret = DETFEA_COLOR_Init(initInfo, &handle);
LOGD("InitDetFeaColor: %x", ret);
env->ReleaseStringUTFChars(cachePath1, jpath1);
env->ReleaseStringUTFChars(cachePath2, jpath2);
return ret;
}
extern "C"
......
......@@ -5,6 +5,7 @@
#ifndef CATERINGDETECT_CATERING_DISH_DETECTION_H
#define CATERINGDETECT_CATERING_DISH_DETECTION_H
#include <jni.h>
#include "libdet.h"
#include "libdetfea.h"
#include "image_tools.h"
//#include <OpenCL/opencl.h>
......
......@@ -6,6 +6,7 @@
#define CATERINGDETECT_CATERING_PLATE_DETECTION_H
#include <jni.h>
#include "libdet.h"
#include "libdetfea_color.h"
#include "image_tools.h"
......
#pragma once
#ifndef _DET_LIB_H_
#define _DET_LIB_H_
#include <vector>
#include "opencv2/opencv.hpp"
#define DET_MAX_OBJ_NUM 15
#define DET_ALL_OK 0x00000000 //
#define DET_OUTPUT_SHAPE_ERROR 0x10040001 // �����ά���д���
#define DET_INPUT_IMAGE_EMPTY 0x10040002 // �����ͼ��Ϊ��
#define DET_HANDLE_NULL 0x10040003 // ����ľ��Ϊ��
#define DET_OUTPUT_NULL 0x10040004 // ����ķ��ؽṹ��Ϊ��
#define DET_SESSION_NULL 0x10040005 //
#define DET_INIT_MODEL_PATH_NOT_EXIST 0x10040006 // �����ģ��·��������
#define DET_CREATE_NET_FAILED 0x10040007 // ������ģ��ʧ��
#define DET_GET_NET_INPUT_FAILED 0x10040008 // ��ȡ����tensorʧ��
#define DET_GET_NET_OUTPUT_13_FAILED 0x10040009 // ��ȡ���tensorʧ��
#define DET_GET_NET_OUTPUT_26_FAILED 0x1004000A // ��ȡ���tensorʧ��
#define DET_GET_NET_OUTPUT_52_FAILED 0x1004000B // ��ȡ���tensorʧ��
#define DET_OUTPUT_13_SHAPE_ERROR 0x1004000C // ��ȡ���tensor��shape�����õIJ�һ��
#define DET_OUTPUT_26_SHAPE_ERROR 0x1004000D // ��ȡ���tensor��shape�����õIJ�һ��
#define DET_OUTPUT_52_SHAPE_ERROR 0x10040010 // ��ȡ���tensor��shape�����õIJ�һ��
#define DET_GET_NET_SESSION_FAILED 0x10040011 // ��ȡ���sessionʧ��
#define DET_FREE_SESSION_FAILED 0x10040012 // �ͷ�sessionʧ��
#define DET_INPUT_SHAPE_ERROR 0x10040013 // ��ȡ����tensor��shape�����õIJ�һ��
typedef struct _DET_INPUT_
{
cv::Mat img;
}DET_INPUT;
typedef struct _DET_SINGLE_OUTPUT_
{
float x1;
float y1;
float x2;
float y2;
float prob;
int id;
int label;
}DET_SINGLE_OUTPUT;
typedef struct _DET_OUTPUT_
{
std::vector < DET_SINGLE_OUTPUT> output_list;
}DET_OUTPUT;
// �����㷨��ʶ���豸
typedef enum _DET_DEVICE_
{
DET_CPU = 0x0000, // CPU
DET_OPENCL = 0x0001,
DET_VULKAN = 0x0002,
DET_OPENGL = 0x0003,
}DET_DEVICE;
typedef struct _DET_INIT_INFO_
{
int precision_level = 0; // Precision_Normal = 0, Precision_High=1, Precision_Low=2
int mem_level = 0; // Memory_Normal = 0, Memory_High=1, Memory_Low=2
int pow_level = 0; // Power_Normal = 0, Power_High=1, Power_Low=2
const char* model_path = NULL;
const char* cache_dir = NULL;
DET_DEVICE device_type;
}DET_INIT_INFO;
/***************************************************************************************************
* �� ��: ��ʼ��
* �� ��:
* const char* model_path - I ģ��·��(���������ΪNULL����ʾ�����õ�ģ��)
* DET_DEVICE device_name - I �豸����
* void** clsretri_handle - O ���
* ����ֵ: ������
***************************************************************************************************/
int DET_Init(DET_INIT_INFO init_info,void** det_handle);
/***************************************************************************************************
* �� ��: ʶ��
* �� ��:
* DET_INPUT in_img - I ����ͼƬ
* DET_OUTPUT* clsretri_output - O ����ʶ����
* void* handle - I ���
* ����ֵ: ������
***************************************************************************************************/
int DET_Process(DET_INPUT in_img, DET_OUTPUT* clsretri_output, void* handle);
/***************************************************************************************************
* �� ��: �ͷž��
* �� ��:
* void** handle - I ���
* ����ֵ: ������
***************************************************************************************************/
int DET_Release(void** handle);
#endif
\ No newline at end of file
......@@ -45,10 +45,22 @@ typedef struct _DETFEA_OUTPUT_
typedef enum _DETFEA_DEVICE_
{
DETFEA_CPU = 0x0000, // CPU
DETFEA_GPU = 0x0001, // GPU
DETFEA_OPENCL = 0x0001,
DETFEA_VULKAN = 0x0002,
DETFEA_OPENGL = 0x0003,
}DETFEA_DEVICE;
typedef struct _DETFAE_INIT_INFO_
{
int precision_level = 0; // Precision_Normal = 0, Precision_High=1, Precision_Low=2
int mem_level = 0; // Memory_Normal = 0, Memory_High=1, Memory_Low=2
int pow_level = 0; // Power_Normal = 0, Power_High=1, Power_Low=2
const char* model_path1 = NULL;
const char* model_path2 = NULL;
const char* cache_dir1 = NULL;
const char* cache_dir2 = NULL;
DETFEA_DEVICE device_type;
}DETFAE_INIT_INFO;
/***************************************************************************************************
* �� ��: ��ʼ��
......@@ -58,10 +70,7 @@ typedef enum _DETFEA_DEVICE_
* void** detfea_handle - O ���
* ����ֵ: ������
***************************************************************************************************/
int DETFEA_Init(const char* model_path1,
const char* model_path2,
DETFEA_DEVICE device_type,
void** handle);
int DETFAE_Init(DETFAE_INIT_INFO init_info,void** detfea_handle);
/***************************************************************************************************
......@@ -105,4 +114,4 @@ int DETFEA_GetDetIOUThres(float &value, void* handle);
int DETFEA_SetDetConfThres(float value, void* handle);
int DETFEA_GetDetConfThres(float& value, void* handle);
#endif
\ No newline at end of file
#endif
......@@ -10,16 +10,16 @@
#define DETFEA_COLOR_FEAT_DIM 160
#define DETFEA_COLOR_ALL_OK 0x00000000 //
#define DETFEA_COLOR_HANDLE_NULL 0x10070001 // 输入句柄为空
#define DETFEA_COLOR_INPUT_IMAGE_EMPTY 0x10070002 // 输入图像为空
#define DETFEA_COLOR_OUTPUT_NULL 0x10070003 // 输入的输出结构体为空
#define DETFEA_COLOR_HANDLE_NULL 0x10070001 // ��������
#define DETFEA_COLOR_INPUT_IMAGE_EMPTY 0x10070002 // ����ͼ��Ϊ��
#define DETFEA_COLOR_OUTPUT_NULL 0x10070003 // ���������ṹ��Ϊ��
typedef struct _DETFEA_COLOR_INPUT_
{
cv::Mat img;
bool dump_debug_crop_image=false; // 是否打开dump debug图
unsigned long int pts=0; // 输入时间戳,用于dump图时命名不同的输入图片
bool dump_debug_crop_image=false; // �Ƿ��dump debugͼ
unsigned long int pts=0; // ����ʱ���������dumpͼʱ������ͬ������ͼƬ
}DETFEA_COLOR_INPUT;
......@@ -41,48 +41,57 @@ typedef struct _DETFEA_COLOR_OUTPUT_
}DETFEA_COLOR_OUTPUT;
// 定义算法的识别设
// �����㷨��ʶ���豸
typedef enum _DETFEA_COLOR_DEVICE_
{
DETFEA_COLOR_CPU = 0x0000, // CPU
DETFEA_COLOR_GPU = 0x0001, // GPU
DETFEA_COLOR_OPENCL = 0x0001,
DETFEA_COLOR_VULKAN = 0x0002,
DETFEA_COLOR_OPENGL = 0x0003,
}DETFEA_COLOR_DEVICE;
typedef struct _DETFEA_COLOR_INIT_INFO_
{
int precision_level = 0; // Precision_Normal = 0, Precision_High=1, Precision_Low=2
int mem_level = 0; // Memory_Normal = 0, Memory_High=1, Memory_Low=2
int pow_level = 0; // Power_Normal = 0, Power_High=1, Power_Low=2
const char* model_path1 = NULL;
const char* model_path2 = NULL;
const char* cache_dir1 = NULL;
const char* cache_dir2 = NULL;
DETFEA_COLOR_DEVICE device_type;
}DETFEA_COLOR_INIT_INFO;
/***************************************************************************************************
* 功 能: 初始化
* 参 数:
* const char* model_path - I 模型路径(这个可以设为NULL,表示用内置的模型)
* DETFEA_COLOR_DEVICE device_name - I 设备类型
* void** detfea_handle - O 句柄
* 返回值: 错误码
* �� ��: ��ʼ��
* �� ��:
* const char* model_path - I ģ��·��(���������ΪNULL����ʾ�����õ�ģ��)
* DETFEA_COLOR_DEVICE device_name - I �豸����
* void** detfea_handle - O ���
* ����ֵ: ������
***************************************************************************************************/
int DETFEA_COLOR_Init(const char* model_path1,
const char* model_path2,
DETFEA_COLOR_DEVICE device_type,
void** handle);
int DETFEA_COLOR_Init(DETFEA_COLOR_INIT_INFO init_info,void** detfea_color_handle);
/***************************************************************************************************
* 功 能: 识别
* 参 数:
* DETFEA_COLOR_INPUT in_img - I 输入图片
* DETFEA_COLOR_OUTPUT* detfea_output - O 返回识别结果
* void* handle - I 句柄
* 返回值: 错误码
* �� ��: ʶ��
* �� ��:
* DETFEA_COLOR_INPUT in_img - I ����ͼƬ
* DETFEA_COLOR_OUTPUT* detfea_output - O ����ʶ����
* void* handle - I ���
* ����ֵ: ������
***************************************************************************************************/
int DETFEA_COLOR_Process(DETFEA_COLOR_INPUT in_img, DETFEA_COLOR_OUTPUT* detfea_output, void* handle);
int DETFEA_COLOR_Process(DETFEA_COLOR_INPUT in_img, DETFEA_COLOR_OUTPUT* detfea_color_output, void* handle);
/***************************************************************************************************
* 功 能: 释放句柄
* 参 数:
* void** handle - I 句柄
* 返回值: 错误码
* �� ��: �ͷž��
* �� ��:
* void** handle - I ���
* ����ֵ: ������
***************************************************************************************************/
int DETFEA_COLOR_Release(void** handle);
#endif
\ No newline at end of file
#endif
......@@ -8,6 +8,8 @@ import android.graphics.Paint;
import com.elvishew.xlog.XLog;
import com.wmdigit.common.model.CropValueDTO;
import com.wmdigit.common.model.message.GpuLoadEndMessage;
import com.wmdigit.common.model.message.GpuLoadStartMessage;
import com.wmdigit.core.catering.dish.DishDetection;
import com.wmdigit.core.catering.model.TargetDetectResult;
import com.wmdigit.core.catering.plate.PlateDetection;
......@@ -16,6 +18,8 @@ import com.wmdigit.data.disk.repository.DiskRepository;
import com.wmdigit.data.mmkv.repository.AiLocalRepository;
import com.wmdigit.data.mmkv.repository.CropLocalRepository;
import org.greenrobot.eventbus.EventBus;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
......@@ -110,11 +114,15 @@ public class TargetDetectionRepository {
// 使用Observable异步初始化目标检测
Observable.create(emitter -> {
initializeComplete.set(false);
EventBus.getDefault().post(new GpuLoadStartMessage(""));
targetDetection.initTargetDetection();
emitter.onNext(true);
}).observeOn(Schedulers.io())
.subscribeOn(AndroidSchedulers.mainThread())
.subscribe(success -> initializeComplete.set(true), error -> {
}).subscribeOn(Schedulers.io())
.observeOn(AndroidSchedulers.mainThread())
.subscribe(success -> {
initializeComplete.set(true);
EventBus.getDefault().post(new GpuLoadEndMessage(""));
}, error -> {
// 初始化失败时设置标志为false并记录错误日志
initializeComplete.set(false);
XLog.e(error);
......@@ -132,6 +140,9 @@ public class TargetDetectionRepository {
* @return
*/
public TargetDetectResult processImage(Bitmap bitmap, boolean isNeedCrop){
if(!initializeComplete.get()){
return null;
}
TargetDetectResult result;
// 先将图片裁剪
if (isNeedCrop && CropLocalRepository.getInstance().getHasCropped()){
......
......@@ -3,6 +3,7 @@ package com.wmdigit.core.catering.dish;
import android.graphics.Bitmap;
import com.elvishew.xlog.XLog;
import com.wmdigit.core.CoreModule;
import com.wmdigit.core.catering.TargetDetection;
import com.wmdigit.core.catering.TargetDetectionRepository;
import com.wmdigit.core.catering.model.TargetDetectResult;
......@@ -25,7 +26,7 @@ public class DishDetection implements TargetDetection {
* JNI初始化目标检测算法
* @return
*/
private native int init();
private native int init(boolean useGpu, String cachePath1, String cachePath2);
/**
* JNI推理图片
......@@ -51,7 +52,9 @@ public class DishDetection implements TargetDetection {
@Override
public void initTargetDetection(){
synchronized (syncLock) {
int ret = init();
String cachePath1 = CoreModule.getAppContext().getExternalFilesDir("cache").getAbsolutePath() + "/opengl_dish.cache";
String cachePath2 = CoreModule.getAppContext().getExternalFilesDir("cache").getAbsolutePath() + "/opengl_dish2.cache";
int ret = init(true, cachePath1, cachePath2);
XLog.i("菜品识别算法初始化结果:%s", ret);
}
}
......
......@@ -3,6 +3,7 @@ package com.wmdigit.core.catering.plate;
import android.graphics.Bitmap;
import com.elvishew.xlog.XLog;
import com.wmdigit.core.CoreModule;
import com.wmdigit.core.catering.TargetDetection;
import com.wmdigit.core.catering.TargetDetectionRepository;
import com.wmdigit.core.catering.model.TargetDetectResult;
......@@ -25,7 +26,7 @@ public class PlateDetection implements TargetDetection {
* JNI初始化目标检测算法
* @return
*/
private native int init();
private native int init(boolean useGpu, String cachePath1, String cachePath2);
/**
* JNI推理图片
......@@ -50,7 +51,9 @@ public class PlateDetection implements TargetDetection {
@Override
public void initTargetDetection() {
synchronized (syncLock) {
int ret = init();
String cachePath1 = CoreModule.getAppContext().getExternalFilesDir("cache").getAbsolutePath() + "/opengl_plate1.cache";
String cachePath2 = CoreModule.getAppContext().getExternalFilesDir("cache").getAbsolutePath() + "/opengl_plate2.cache";
int ret = init(true, cachePath1, cachePath2);
XLog.i("餐盘识别算法初始化结果:%s", ret);
}
}
......
......@@ -24,7 +24,7 @@ v1.0.2.1 2025/04/22 1.特征表增加字段记录图片地址,数据库版本
v1.0.2.2 2025/04/25 1.UvcCamera从mjpeg改为yuv
2.增加推理时的日志
3.修改索引库默认阈值为0.77
v1.0.2.3 2025/04/25 1.todo 增加GPU推理
v1.0.2.3 2025/04/25 1.增加GPU推理
2.增加纠错功能
3.学习页增加重置背景功能
......
......@@ -15,9 +15,9 @@ import com.wmdigit.common.base.viewholder.BaseViewHolder;
import com.wmdigit.common.model.ProductsVO;
import com.wmdigit.setting.R;
import com.wmdigit.setting.adapter.diff.ProductsDiffUtil;
import com.wmdigit.setting.model.DelAllFeaturesByProductCodeMessage;
import com.wmdigit.setting.model.message.DelAllFeaturesByProductCodeMessage;
import com.wmdigit.setting.model.LocalImage;
import com.wmdigit.setting.model.DelOneFeatureByPathMessage;
import com.wmdigit.setting.model.message.DelOneFeatureByPathMessage;
import org.greenrobot.eventbus.EventBus;
......
......@@ -13,8 +13,8 @@ import com.wmdigit.common.view.keyboard.EnglishAndNumberKeyboard;
import com.wmdigit.setting.R;
import com.wmdigit.setting.adapter.ProductsAdapter;
import com.wmdigit.setting.databinding.FragmentDataLearningBinding;
import com.wmdigit.setting.model.DelAllFeaturesByProductCodeMessage;
import com.wmdigit.setting.model.DelOneFeatureByPathMessage;
import com.wmdigit.setting.model.message.DelAllFeaturesByProductCodeMessage;
import com.wmdigit.setting.model.message.DelOneFeatureByPathMessage;
import com.wmdigit.setting.viewmodel.DataLearningViewModel;
import com.wmdigit.setting.viewmodel.SettingViewModel;
......@@ -56,10 +56,8 @@ public class DataLearningFragment extends BaseMvvmFragment<DataLearningViewModel
});
// 观察页码变化
mViewModel.currentPage.observe(this, page -> {
if (!mViewModel.currentPage.getValue().equals(page)) {
mViewModel.onPageChangedEvent(page);
productsAdapter.diffAndUpdate();
}
mViewModel.onPageChangedEvent(page);
productsAdapter.diffAndUpdate();
});
// 观察学习模式
settingViewModel.getModeOnLearningPage().observe(this, mode->{
......
package com.wmdigit.setting.fragment;
import android.widget.ArrayAdapter;
import com.wmdigit.common.model.SimpleUsbDevice;
import com.wmdigit.common.base.mvvm.BaseMvvmFragment;
import com.wmdigit.core.catering.TargetDetectionRepository;
import com.wmdigit.network.bean.response.AppVersionDTO;
import com.wmdigit.setting.R;
import com.wmdigit.setting.databinding.FragmentSystemInfoBinding;
import com.wmdigit.common.model.message.GpuLoadEndMessage;
import com.wmdigit.common.model.message.GpuLoadStartMessage;
import com.wmdigit.setting.viewmodel.SystemInfoViewModel;
import com.wmdigit.upgrade.AppUpgradePopupWindow;
import org.greenrobot.eventbus.EventBus;
import org.greenrobot.eventbus.Subscribe;
import org.greenrobot.eventbus.ThreadMode;
/**
* 系统信息页
* @author dizi
......@@ -24,7 +28,9 @@ public class SystemInfoFragment extends BaseMvvmFragment<SystemInfoViewModel, Fr
@Override
protected void initObserve() {
// 识别模式监听
mViewModel.aiMode.observe(this, position-> mViewModel.saveAiMode(position));
mViewModel.aiMode.observe(this, position-> {
mViewModel.saveAiMode(position);
});
// 监听HTTP请求信息
mViewModel.httpToast.observe(this, this::showToast);
// 监听升级消息
......@@ -57,6 +63,20 @@ public class SystemInfoFragment extends BaseMvvmFragment<SystemInfoViewModel, Fr
return SystemInfoViewModel.class;
}
@Override
public void onResume() {
super.onResume();
EventBus.getDefault().register(this);
// 展示加载框
showGpuLoadingProgress();
}
@Override
public void onPause() {
super.onPause();
EventBus.getDefault().unregister(this);
}
private void appUpgrade(AppVersionDTO appVersionDTO){
AppUpgradePopupWindow appUpgradePopupWindow = new AppUpgradePopupWindow(getActivity())
.setVersionName(appVersionDTO.getVersionName())
......@@ -64,4 +84,28 @@ public class SystemInfoFragment extends BaseMvvmFragment<SystemInfoViewModel, Fr
.setDlUrl(appVersionDTO.getDownloadUrl());
appUpgradePopupWindow.showWindow();
}
private void showGpuLoadingProgress(){
if (!TargetDetectionRepository.getInstance().isInitializeComplete()){
mProgressDialog.setTitle(com.wmdigit.common.R.string.loading_gpu_content);
mProgressDialog.show();
}
else {
if (mProgressDialog.isShowing()){
mProgressDialog.dismiss();
}
}
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onGpuLoadStart(GpuLoadStartMessage message){
}
@Subscribe(threadMode = ThreadMode.MAIN)
public void onGpuLoadEnd(GpuLoadEndMessage message){
if (mProgressDialog.isShowing()){
mProgressDialog.dismiss();
}
}
}
\ No newline at end of file
package com.wmdigit.setting.model;
package com.wmdigit.setting.model.message;
import com.wmdigit.common.base.model.BaseMessage;
import com.wmdigit.common.enums.MessageType;
......
package com.wmdigit.setting.model;
package com.wmdigit.setting.model.message;
import com.wmdigit.common.base.model.BaseMessage;
import com.wmdigit.common.enums.MessageType;
......
......@@ -136,7 +136,7 @@ public class DataLearningViewModel extends BaseViewModel {
}
else{
// 纠错模式
pageSize = 8;
pageSize = 5;
}
// 根据当前关键词,重新查询数据
onKeywordsChangedEvent(keywords.getValue());
......@@ -286,7 +286,7 @@ public class DataLearningViewModel extends BaseViewModel {
products.addAll(ProductsRepository.getInstance().queryByKeywords(keyword, page, pageSize));
}
else{
products.addAll(ProductsRepository.getInstance().queryProductsAndImgPathsByKeywords(keyword, 1, pageSize));
products.addAll(ProductsRepository.getInstance().queryProductsAndImgPathsByKeywords(keyword, page, pageSize));
}
for (ProductsVO vo : products){
vo.setLayoutType(learningMode);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment