Przeglądaj źródła

合并HDMIin功能到现有apk中

tags/A.1.0.0.0_20230714_alpha
tangmingjie 2 lat temu
rodzic
commit
0b9d10c505
48 zmienionych plików z 5985 dodań i 219 usunięć
  1. +1
    -2
      .idea/gradle.xml
  2. +25
    -10
      app/build.gradle
  3. BIN
      app/libs/framework.jar
  4. BIN
      app/libs/hdmiV1_0.jar
  5. +13
    -1
      app/src/main/AndroidManifest.xml
  6. +7
    -0
      app/src/main/assets/FaceConfig.xml
  7. BIN
      app/src/main/assets/faces.db3
  8. +7
    -2
      app/src/main/java/com/aispeech/nativedemo/DDSService.java
  9. +12
    -0
      app/src/main/java/com/aispeech/nativedemo/DuiApplication.java
  10. +810
    -31
      app/src/main/java/com/aispeech/nativedemo/MainActivity.java
  11. +417
    -18
      app/src/main/java/com/aispeech/nativedemo/MainActivity2.java
  12. +23
    -14
      app/src/main/java/com/aispeech/nativedemo/asr/observer/DuiMessageObserver.java
  13. +79
    -0
      app/src/main/java/com/aispeech/nativedemo/camera/AutoFitTextureView.java
  14. +771
    -0
      app/src/main/java/com/aispeech/nativedemo/camera/Camera2HelperRgb.java
  15. +49
    -0
      app/src/main/java/com/aispeech/nativedemo/camera/Camera2Listener.java
  16. +92
    -36
      app/src/main/java/com/aispeech/nativedemo/camera/CameraController.java
  17. +658
    -0
      app/src/main/java/com/aispeech/nativedemo/camera/ImageUtil.kt
  18. +965
    -0
      app/src/main/java/com/aispeech/nativedemo/camera/MainActivity.kt
  19. +8
    -1
      app/src/main/java/com/aispeech/nativedemo/config/Config.java
  20. +15
    -5
      app/src/main/java/com/aispeech/nativedemo/dds/DDSManager.java
  21. +7
    -12
      app/src/main/java/com/aispeech/nativedemo/face/FaceManager.java
  22. +6
    -3
      app/src/main/java/com/aispeech/nativedemo/network/ws/DigiWebSocketServer.java
  23. +2
    -0
      app/src/main/java/com/aispeech/nativedemo/network/ws/MessageUtils.java
  24. +131
    -0
      app/src/main/java/com/aispeech/nativedemo/rockchip/HdmiService.java
  25. +751
    -0
      app/src/main/java/com/aispeech/nativedemo/rockchip/RockchipCamera2.java
  26. +27
    -0
      app/src/main/java/com/aispeech/nativedemo/rockchip/util/DataUtils.java
  27. +23
    -0
      app/src/main/java/com/aispeech/nativedemo/rockchip/util/JniCameraCall.java
  28. +95
    -0
      app/src/main/java/com/aispeech/nativedemo/rockchip/util/SystemPropertiesProxy.java
  29. +447
    -0
      app/src/main/java/com/aispeech/nativedemo/rockchip/widget/RoundMenu.java
  30. +2
    -1
      app/src/main/java/com/aispeech/nativedemo/shape/ShapeManager.java
  31. +36
    -0
      app/src/main/java/com/aispeech/nativedemo/utils/ResourceUtils.java
  32. +44
    -8
      app/src/main/java/com/aispeech/nativedemo/widget/AlertWindowView.java
  33. +10
    -0
      app/src/main/jni/Android.mk
  34. +2
    -0
      app/src/main/jni/Application.mk
  35. +199
    -0
      app/src/main/jni/native.cpp
  36. BIN
      app/src/main/res/drawable/ic_close.png
  37. +19
    -11
      app/src/main/res/layout/activity_main.xml
  38. +39
    -30
      app/src/main/res/layout/activity_main_2.xml
  39. +27
    -0
      app/src/main/res/layout/activity_main_chip.xml
  40. +107
    -0
      app/src/main/res/layout/activity_rockchip_camera2.xml
  41. +12
    -0
      app/src/main/res/values/attrs.xml
  42. +16
    -0
      app/src/main/res/values/themes.xml
  43. +3
    -21
      build.gradle
  44. +4
    -3
      gradle.properties
  45. +2
    -3
      gradle/wrapper/gradle-wrapper.properties
  46. +15
    -0
      settings.gradle
  47. +7
    -7
      test/build.gradle
  48. BIN
      test/src/main/assets/vad_modify.bin

+ 1
- 2
.idea/gradle.xml Wyświetl plik

@@ -5,11 +5,10 @@
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="testRunner" value="GRADLE" />
<option name="disableWrapperSourceDistributionNotification" value="true" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleHome" value="$PROJECT_DIR$/../gradle/gradle-7.5-all" />
<option name="gradleJvm" value="Embedded JDK" />
<option name="gradleJvm" value="jbr-11" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />


+ 25
- 10
app/build.gradle Wyświetl plik

@@ -6,13 +6,20 @@ android {

defaultConfig {
applicationId "com.aispeech.nativedemo"
minSdkVersion 24
minSdkVersion 26
targetSdkVersion 30
versionCode 1
versionName "1.0"

sourceSets {
main { //这个配置是必须的,如果你的so文件放在了libs/armeabi/下,则下面srcDir = libs即可
jniLibs.srcDirs = ['libs']
}
}

ndk {
// 设置支持的SO库架构
abiFilters 'armeabi', 'armeabi-v7a', 'arm64-v8a'//, 'x86', , 'x86_64'
abiFilters 'armeabi-v7a', 'arm64-v8a'//, 'x86', , 'x86_64'
}
multiDexEnabled true
vectorDrawables.useSupportLibrary = true
@@ -34,7 +41,7 @@ android {

buildTypes {
release {
minifyEnabled true
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.pro'
signingConfig signingConfigs.release
}
@@ -47,6 +54,12 @@ android {
}
}

externalNativeBuild {
ndkBuild {
path file('src/main/jni/Android.mk')
}
}

compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
@@ -70,15 +83,15 @@ configurations {
}
}

repositories {
flatDir {
dirs 'libs', '../test/libs'//this way we can find the .aar file in libs folder
}
}
//repositories {
// flatDir {
// dirs 'libs', '../test/libs'//this way we can find the .aar file in libs folder
// }
//}

dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
implementation fileTree(dir: '../test/libs', include: ['*.jar', '*.aar'])
implementation fileTree(dir: 'libs', excludes: ['framework.jar'], include: ['*.jar', '*.aar'])
implementation fileTree(dir: '../test/libs', excludes: ['framework.jar'], include: ['*.jar', '*.aar'])
//noinspection GradleCompatible
implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'androidx.recyclerview:recyclerview:1.2.1'
@@ -107,4 +120,6 @@ dependencies {
api 'com.squareup.retrofit2:adapter-rxjava:2.1.0'
implementation 'org.apache.xmlrpc:xmlrpc-client:3.1.3'

implementation files('libs/hdmiV1_0.jar')
compileOnly files('libs/framework.jar')
}

BIN
app/libs/framework.jar Wyświetl plik


BIN
app/libs/hdmiV1_0.jar Wyświetl plik


+ 13
- 1
app/src/main/AndroidManifest.xml Wyświetl plik

@@ -20,6 +20,8 @@
<uses-permission android:name="android.permission.SYSTEM_ALERT_WINDOW" />

<uses-permission android:name="android.permission.ACCESS_FINE_LOCATION" />
<uses-permission android:name="android.permission.ACCESS_COARSE_LOCATION" />
<uses-permission android:name="android.permission.MANAGE_MEDIA_PROJECTION" />

<application
android:name=".DuiApplication"
@@ -41,7 +43,7 @@
android:screenOrientation="portrait">

</activity>
<activity android:name=".ui.LauncherActivity">
<activity android:name=".ui.LauncherActivity" android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />

@@ -54,6 +56,16 @@
<service android:name="org.eclipse.paho.android.service.MqttService" />

<activity android:name=".music.PlayerActivity" />

<service
android:name=".rockchip.HdmiService"
android:exported="true">
<intent-filter>
<action android:name="com.android.rockchip.camera2.HdmiService" />

<category android:name="android.intent.category.DEFAULT" />
</intent-filter>
</service>
</application>

</manifest>

+ 7
- 0
app/src/main/assets/FaceConfig.xml Wyświetl plik

@@ -0,0 +1,7 @@
<?xml version='1.0' encoding='utf-8' standalone='yes' ?>
<map>
<string name="Face.NOW_TS">0</string>
<string name="DeviceInfo.DEV_ID">ROBOT2_PAD2</string>
<string name="DeviceInfo.DEV_SN">TESTPAD22</string>
<string name="DeviceInfo.AUTH_CODE">da45281f771ce5f08b8ac9d5799fe38d</string>
</map>

BIN
app/src/main/assets/faces.db3 Wyświetl plik


+ 7
- 2
app/src/main/java/com/aispeech/nativedemo/DDSService.java Wyświetl plik

@@ -22,6 +22,7 @@ import com.aispeech.dui.dds.DDS;
import com.aispeech.dui.dds.DDSAuthListener;
import com.aispeech.dui.dds.DDSConfig;
import com.aispeech.dui.dds.DDSInitListener;
import com.aispeech.dui.dds.DDSMode;
import com.aispeech.dui.dds.agent.tts.TTSEngine;
import com.aispeech.dui.dds.exceptions.DDSNotInitCompleteException;
import com.aispeech.nativedemo.log.Logger;
@@ -119,12 +120,13 @@ public class DDSService extends Service {
DDS.getInstance().setAudioDebug(true);
try {
DDS.getInstance().getAgent().getTTSEngine().setMode(TTSEngine.LOCAL);
DDS.getInstance().getAgent().getTTSEngine().setMode(DDSMode.TTS_SILENCE);
// DDS.getInstance().getAgent().getTTSEngine().setSpeaker("zhilingfp");
DDS.getInstance().getAgent().getTTSEngine().setSpeaker("hqqiaf","hqqiaf_lstm_210909.bin");
} catch (DDSNotInitCompleteException e) {
throw new RuntimeException(e);
}
// startAsrListening();
// startTtsListening();
}

@Override
@@ -262,6 +264,8 @@ public class DDSService extends Service {
config.addConfig(DDSConfig.K_AUDIO_CHANNEL_CONF, AudioFormat.CHANNEL_IN_MONO);
config.addConfig(DDSConfig.K_AUDIO_SAMPLERATE, 32000);

// config.addConfig(DDSConfig.K_VAD_BIN, "vad_modify.bin");

// config.addConfig(DDSConfig.K_MIC_ARRAY_AEC_CFG, "/data/aec.bin"); // 麦克风阵列aec资源的磁盘绝对路径,需要开发者确保在这个路径下这个资源存在
// config.addConfig(DDSConfig.K_MIC_ARRAY_BEAMFORMING_CFG, "/data/beamforming.bin"); // 麦克风阵列beamforming资源的磁盘绝对路径,需要开发者确保在这个路径下这个资源存在

@@ -304,8 +308,9 @@ public class DDSService extends Service {

private long mTime = 0;

public void startAsrListening() {
public void startTtsListening() {
try {
DDS.getInstance().getAgent().getTTSEngine().setMode(DDSMode.TTS_SILENCE);
DDS.getInstance().getAgent().getTTSEngine().setListener(new TTSEngine.Callback() {
@Override
public void beginning(String ttsId) {


+ 12
- 0
app/src/main/java/com/aispeech/nativedemo/DuiApplication.java Wyświetl plik

@@ -6,9 +6,11 @@ import android.util.Log;

import com.aispeech.dui.dds.DDS;
import com.aispeech.dui.dds.DDSErrorListener;
import com.aispeech.nativedemo.config.Config;
import com.aispeech.nativedemo.face.FaceManager;
import com.aispeech.nativedemo.log.Logger;
import com.aispeech.nativedemo.network.DigiNetworkManager;
import com.aispeech.nativedemo.utils.ResourceUtils;
import com.facebook.drawee.backends.pipeline.Fresco;

import org.json.JSONObject;
@@ -30,6 +32,7 @@ public class DuiApplication extends Application {
FaceManager.getInstance(this).initEngine();
DigiNetworkManager.getManager().init(this);
Logger.init();
// copyDbAndConfig();
}

public static Context getContext() {
@@ -107,4 +110,13 @@ public class DuiApplication extends Application {
}
return sb.toString();
}

/**
* 拷贝数据库和配置
*/
private void copyDbAndConfig(){
String filePath = getExternalFilesDir(null).getAbsolutePath();
ResourceUtils.copyFromAssets(this, Config.CONFIG_NAME, filePath + Config.CONFIG_PATH);
ResourceUtils.copyFromAssets(this, Config.DB_NAME, filePath + Config.DB_PATH);
}
}

+ 810
- 31
app/src/main/java/com/aispeech/nativedemo/MainActivity.java Wyświetl plik

@@ -1,7 +1,9 @@
package com.aispeech.nativedemo;

import androidx.annotation.NonNull;
import androidx.annotation.Nullable;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.core.app.ActivityCompat;
import androidx.recyclerview.widget.LinearLayoutManager;
import androidx.recyclerview.widget.RecyclerView;

@@ -14,21 +16,40 @@ import android.content.IntentFilter;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.Color;
import android.graphics.ImageFormat;
import android.graphics.PixelFormat;
import android.graphics.SurfaceTexture;
import android.graphics.drawable.Drawable;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.net.ConnectivityManager;
import android.net.NetworkCapabilities;
import android.net.NetworkRequest;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.RemoteException;
import android.os.SystemClock;
import android.provider.Settings;
import android.text.TextUtils;
import android.util.AttributeSet;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
@@ -63,6 +84,9 @@ import com.aispeech.nativedemo.network.ws.WebSocketManager;
import com.aispeech.nativedemo.observer.DuiCommandObserver;
import com.aispeech.nativedemo.observer.DuiNativeApiObserver;
import com.aispeech.nativedemo.observer.DuiUpdateObserver;
import com.aispeech.nativedemo.rockchip.HdmiService;
import com.aispeech.nativedemo.rockchip.RockchipCamera2;
import com.aispeech.nativedemo.rockchip.util.DataUtils;
import com.aispeech.nativedemo.update.callback.DownloadListener;
import com.aispeech.nativedemo.update.core.DownloadManager;
import com.aispeech.nativedemo.utils.CommandExecution;
@@ -77,38 +101,474 @@ import com.aispeech.nativedemo.widget.CameraTextureView;
import com.lenovo.lefacesdk.LefaceEngine;
import com.lenovo.lefacesdk.MultiAtt;

import org.json.JSONException;
import org.json.JSONObject;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import java.util.Timer;
import java.util.TimerTask;

import rockchip.hardware.hdmi.V1_0.IHdmi;
import rockchip.hardware.hdmi.V1_0.IHdmiCallback;

public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCallback,
DuiMessageObserver.MessageCallback{
DuiMessageObserver.MessageCallback, CameraController.ImageReaderView, FaceManager.UpdateCallback{
private static final String TAG = "MainActivity";

private MyReceiver mInitReceiver;// 初始化监听广播

private boolean isForeground = false;
private long startTime = 0;
private MyReceiver mInitReceiver;
private DuiMessageObserver mMessageObserver = new DuiMessageObserver();// 消息监听器
private DuiCommandObserver mCommandObserver = new DuiCommandObserver();// 命令监听器
private DuiNativeApiObserver mNativeApiObserver = new DuiNativeApiObserver();// 本地方法回调监听器
private DuiUpdateObserver mUpdateObserver = new DuiUpdateObserver();// dds更新监听器

private NetworkStatusCallback listener;
private CameraTextureView mTextureView;
private TextView mRectView;
private TextView mRectFaceView;
private Button stopWeb;
private Button startWeb;
private Button stopApp;
private Button reboot;
private Button close;
private Button open;
private Button stopClient;
private Button startClient;
private RecyclerView webListRV;
private Bitmap textureBitmap;
private WebView mWebView;
public static MainActivity instance;

private boolean isForeground = false;
private long startTime = 0;

private String HANDLE_THREAD_NAME = "CameraBackgroundThread";
private HandlerThread backgroundThread;
private Handler backgroundHandler;
private int surfaceTextureUpdated = 0;
public static int selectType = 1;
public static MainActivity instance;
private LefaceEngine mEngine;

private final String ACCESS_KEY = "01679f4feaae046d90866bba2a0996978fd5ac39d0784a902cae15862af6f9bc";
private final String SECRET_KEY = "6250ddad98c6642ff160338fd97bd05c9cc757f4c6179fc872641913a9d85c72";

private void initView() {
mTextureView = findViewById(R.id.texture);
// mTextureView.setSurfaceTextureListener(new TextureView.SurfaceTextureListener() {
// @Override
// public void onSurfaceTextureAvailable(@NonNull SurfaceTexture surface, int width, int height) {
// Log.d(TAG, "onSurfaceTextureAvailable");
// mCameraController.openCamera(MainActivity.this, width, height);
// }
//
// @Override
// public void onSurfaceTextureSizeChanged(@NonNull SurfaceTexture surface, int width, int height) {
// Log.d(TAG, "onSurfaceTextureSizeChanged");
// }
//
// @Override
// public boolean onSurfaceTextureDestroyed(@NonNull SurfaceTexture surface) {
// Log.d(TAG, "onSurfaceTextureDestroyed");
// return false;
// }
//
// @Override
// public void onSurfaceTextureUpdated(@NonNull SurfaceTexture surface) {
//// Log.d(TAG, "onSurfaceTextureUpdated");
// surfaceTextureUpdated = 0;
// }
// });
mRectView = findViewById(R.id.my_view);
mRectFaceView = findViewById(R.id.my_face_view);

// webListRV = findViewById(R.id.web_list_rec);
// webListRV.setLayoutManager(new LinearLayoutManager(mContext));
// WebListRecAdapter webListRecAdapter = new WebListRecAdapter(Utils.websocketList, mContext);
// webListRV.setAdapter(webListRecAdapter);

stopWeb = findViewById(R.id.stop_web_socket);
stopWeb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// mWSManager.destroyWebSocketServer();
// List<Emp> empList = EmpDbHelper.getInstance().getAll();
// String feature = "";
// for (Emp emp : empList) {
// feature += emp.toString() + "\n";
// }
// mWSManager.sendMsg(feature);
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "今天天气真好");
WebSocketManager.getInstance(MainActivity.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
startWeb = findViewById(R.id.start_web_socket);
startWeb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// mWSManager.createWebSocketServer();
startHdmi();
}
});
stopApp = findViewById(R.id.kill_app);
stopApp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
killApp();
}
});
reboot = findViewById(R.id.reboot);
reboot.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// rebootDev();
// selectType = 1;
// reboot.setText("ChatGpt已选择");
// open.setText("选择GLM-6B");
// JSONObject jo = new JSONObject();
// try {
// jo.put("type", "animation");
// jo.put("data", "waveHand");
// WebSocketManager.getInstance(MainActivity.this).sendMsg(jo.toString());
// } catch (JSONException e) {
// throw new RuntimeException(e);
// }
startHdmi();
// JSONObject jo = new JSONObject();
// try {
// jo.put("type", "djBackground");
// jo.put("data", "https://www.baidu.com/img/PCtm_d9c8750bed0b3c7d089fa7d55720d6cf.png");
// WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
// } catch (JSONException e) {
// throw new RuntimeException(e);
// }
}
});
close = findViewById(R.id.close_camera);
close.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// mCameraController.closeCamera();
// mTextureView = null;
// List<Skill> skillList = SkillDbHelper.getInstance().getAll();
// String str = "";
// for (Skill skill : skillList) {
// str += skill.toString();
// }
// mWSManager.sendMsg(str);
// try {
// DDS.getInstance().getAgent().stopDialog();
// } catch (DDSNotInitCompleteException e) {
// throw new RuntimeException(e);
// }
JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showBothHandFlat");
WebSocketManager.getInstance(MainActivity.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
open = findViewById(R.id.open_camera);
open.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// mTextureView.refreshDrawableState();
// mCameraController.initTexture(mTextureView);

// selectType = 2;
// reboot.setText("选择ChatGpt");
// open.setText("GLM-6B已选择");

// new Thread(new Runnable() {
// @Override
// public void run() {
// String downloadUrl = "http://39.107.77.235:9090/dgmeta/app-release.apk";
// DownloadManager.downloadAPK(MainActivity.this, downloadUrl, null, new DownloadListener() {
// @Override
// public void onCheckerDownloading(int progress) {
// close.setText("下载进度:" + progress);
// }
//
// @Override
// public void onCheckerDownloadSuccess(File file) {
// close.setText("下载完成");
// }
//
// @Override
// public void onCheckerDownloadFail() {
// close.setText("下载失败");
// }
//
// @Override
// public void onCheckerStartDownload() {
// close.setText("开始下载");
// }
// });
// }
// }).start();
JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showRightHandFlat");
WebSocketManager.getInstance(MainActivity.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
stopClient = findViewById(R.id.stop_client);
stopClient.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// DigiWebSocketClient.Release();
//timer.cancel();
// EmpDbHelper.getInstance().deleteAll();
// Utils.userFaces.clear();
// SkillDbHelper.getInstance().deleteAll();
// ModelDbHelper.getInstance().deleteAll();
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "缔智元是一家缔造数字人员工的科技企业,致力于综合图像识别、自然语言交互、知识图谱、超写实3D渲染、物联网等前沿技术,助力企业的数字化与智能化变革");
WebSocketManager.getInstance(MainActivity.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
startClient = findViewById(R.id.start_client);
startClient.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// DigiWebSocketClient.connect(Utils.getIP(getApplicationContext()));
// sendWebsocketMsg();
// InitializeDbHelper.getInstance().delete(null);
}
});
mWebView = findViewById(R.id.webview);
mWebView.setBackgroundColor(0);
if(mWebView.getBackground() != null){
mWebView.getBackground().setAlpha(0);
}
//声明WebSettings子类
WebSettings webSettings = mWebView.getSettings();

//如果访问的页面中要与Javascript交互,则webview必须设置支持Javascript
webSettings.setJavaScriptEnabled(true);

//设置自适应屏幕,两者合用
webSettings.setUseWideViewPort(true); //将图片调整到适合webview的大小
webSettings.setLoadWithOverviewMode(true); // 缩放至屏幕的大小

//缩放操作
webSettings.setSupportZoom(true); //支持缩放,默认为true。是下面那个的前提。
webSettings.setBuiltInZoomControls(true); //设置内置的缩放控件。若为false,则该WebView不可缩放
webSettings.setDisplayZoomControls(false); //隐藏原生的缩放控件

//其他细节操作
webSettings.setCacheMode(WebSettings.LOAD_NO_CACHE); //关闭webview中缓存
webSettings.setAllowFileAccess(true); //设置可以访问文件
webSettings.setJavaScriptCanOpenWindowsAutomatically(true); //支持通过JS打开新窗口
webSettings.setLoadsImagesAutomatically(true); //支持自动加载图片
webSettings.setDefaultTextEncodingName("utf-8");//设置编码格式
webSettings.setDomStorageEnabled(true);

mWebView.loadUrl("39.107.77.235:48087");
}

private WebSocketManager mWSManager;
private FaceManager mFaceManager;
private CameraController mCameraController;

private void initManager() {
mWSManager = WebSocketManager.getInstance(getApplicationContext());
mFaceManager = FaceManager.getInstance(getApplicationContext());
mFaceManager.setCallback(this);
mCameraController = CameraController.getInstance(getApplicationContext());
mCameraController.initTexture(mTextureView);
mCameraController.setImageReaderView(this);
MqttManager.getInstance(this).init();
}

private void updateLeFaceModel() {
if (backgroundHandler != null) {
backgroundHandler.post(() -> {
try {
if (mEngine == null) {
mEngine = new LefaceEngine(getApplicationContext(), ACCESS_KEY, SECRET_KEY);
LefaceEngine.InitializationResult mInitResult = mEngine.initialize();
Log.d(TAG, "engine initialization result:" + mInitResult.toString());
if (mInitResult == LefaceEngine.InitializationResult.SUCCESS) {
mEngine.initializeMultiDetector();
mEngine.initializeRecognizer();
}
}
} catch (IOException e) {
Log.e(TAG, "Failed to initialize an image classifier.", e);
}
});
}
}

private int mDoa = 0;
private int fa_x = 0;
private int bd_x = 0;
private long mDoaTime = 0;
@Override
public void updateRect(MultiAtt result, String trackIds) {
runOnUiThread(new Runnable() {
@Override
public void run() {
if (result == null) {
mRectView.setVisibility(View.INVISIBLE);
mRectFaceView.setVisibility(View.INVISIBLE);
return;
}
// int width = result.hd_fa > 0 ? result.fa_w : result.bd_w;
// int height = result.hd_fa > 0 ? result.fa_h : result.bd_h;
// int x1 = result.hd_fa > 0 ? result.fa_x1 : result.bd_x1;
// int y1 = result.hd_fa > 0 ? result.fa_y1 : result.bd_y1;
if (result.bd_x1 == 0 && result.bd_y1 == 0 && result.bd_x2 == 0 && result.bd_y2 == 0) {
mRectView.setVisibility(View.INVISIBLE);
return;
}
fa_x = (result.fa_x1 + result.fa_x2) / 2;
bd_x = (result.bd_x1 + result.bd_x2) / 2;

private TextView mKillApp;
private TextView reboot;
int width = result.bd_w;
int height = result.bd_h;
int x1 = result.bd_x1;
int y1 = result.bd_y1;
ViewGroup.LayoutParams textureParams = mRectView.getLayoutParams();
textureParams.height = height;
textureParams.width = width;
mRectView.setVisibility(View.VISIBLE);
mRectView.setX(x1);
mRectView.setY(y1);
mRectView.setBackgroundColor(Color.GREEN);
mRectView.setLayoutParams(textureParams);
Drawable xml = (Drawable) getResources().getDrawable(R.drawable.my_view_background);
mRectView.setBackground(xml);
mRectView.setRotationY(180);
mRectView.setText(trackIds);

int fwidth = result.fa_w;
int fheight = result.fa_h;
int fx1 = result.fa_x1;
int fy1 = result.fa_y1;
ViewGroup.LayoutParams textureParams2 = mRectFaceView.getLayoutParams();
textureParams2.height = fheight;
textureParams2.width = fwidth;
mRectFaceView.setVisibility(View.VISIBLE);
mRectFaceView.setX(fx1);
mRectFaceView.setY(fy1);
mRectFaceView.setBackgroundColor(Color.GREEN);
mRectFaceView.setLayoutParams(textureParams2);
Drawable xml2 = (Drawable) getResources().getDrawable(R.drawable.my_view_background);
mRectFaceView.setBackground(xml2);
mRectFaceView.setRotationY(180);

if(SystemClock.uptimeMillis() - mDoaTime > 1000){
mDoaTime = SystemClock.uptimeMillis();
int doa = 0;
if(bd_x != 0 && bd_x > 100){
doa = 90 - (1080 - bd_x) / 25;
}
if(Math.abs(mDoa - doa) >= 5){
if(doa > 50 && doa < 130){
mDoa = doa;
try {
DDS.getInstance().getAgent().getWakeupEngine().setWakeupDoa(doa);
MessageUtils.sendDoa("define", doa);
} catch (DDSNotInitCompleteException e) {
throw new RuntimeException(e);
}
}
}
}
}
});
}

@Override
public void toView(Bitmap image) {
textureBitmap = image;
}

private void killApp() {
mWSManager.destroyWebSocketServer();
android.os.Process.killProcess(android.os.Process.myPid());
}

private void rebootDev() {
String[] commons = new String[1];
commons[0] = "reboot";
CommandExecution.CommandResult result = CommandExecution.execCommand(commons, true);
}

private void startBackgroundThread() {
backgroundThread = new HandlerThread(HANDLE_THREAD_NAME);
backgroundThread.start();
backgroundThread.setPriority(10);
backgroundHandler = new Handler(backgroundThread.getLooper());
updateLeFaceModel();
if (Utils.userFaces == null || Utils.userFaces.isEmpty()) {
List<Emp> empList = EmpDbHelper.getInstance().getAll();
for (Emp emp : empList) {
Utils.userFaces.put(emp.id + "_1", emp.dfeatures);
}
}
// if (Utils.userLogs == null || Utils.userLogs.isEmpty()) {
// Utils.userLogs = DbHelper.Instance().loadAllLogs();
// }
backgroundHandler.post(periodicDetect);
}

private final Runnable periodicDetect = new Runnable() {
@Override
public void run() {
try {
Log.e(TAG, "-----------------detect face----------------");
textureBitmap = mTextureView.getBitmap();
List<MultiAtt> results = mFaceManager.detectMultiAtt(textureBitmap);
if (results != null && results.size() > 0) {
mFaceManager.filterPersonForFeatExtract(results);

//删除过期人员
mFaceManager.deleteExpirePerson();

boolean isContinue = mFaceManager.featExtract(results, textureBitmap);
if(isContinue){
DDSManager.getInstance().wakeUpDDSDialog();
mFaceManager.confirmCurrentPerson(textureBitmap);
}
// if(!mFaceManager.hasPerson()){
// DDSManager.getInstance().stopDDSDialog();
// }
}
Log.e(TAG, "---------------thread execute over-------------");
backgroundHandler.post(periodicDetect);
} catch (Exception e) {
e.printStackTrace();
}
}
};

@Override
protected void onCreate(Bundle savedInstanceState) {
@@ -122,27 +582,16 @@ public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCa
DatabaseImpl.getDatabase();
checkPermissions();
initConfig();

initView();
initManager();
initDDS();

mKillApp = findViewById(R.id.kill_app);
mKillApp.setOnClickListener(new View.OnClickListener(){

@Override
public void onClick(View v) {
suspendedArea();
}
});

reboot = findViewById(R.id.reboot);
reboot.setOnClickListener(new View.OnClickListener(){

@Override
public void onClick(View v) {
startActivityForResult(new Intent(Settings.ACTION_MANAGE_OVERLAY_PERMISSION, Uri.parse("package:" + getPackageName())), 1);
}
});
startBackgroundThread();

mAssignCameraId = getIntent().getStringExtra(DataUtils.EXTRA_ASSIGN_CAMERA_ID);
rootView = (ConstraintLayout) findViewById(R.id.root_view);
//textureView = findViewById(R.id.texture);
// startHdmi();
}

@Override
@@ -164,6 +613,8 @@ public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCa
// moveTaskToBack(true);
// }
// }, 1000);

mPaused = false;
}

@Override
@@ -230,8 +681,6 @@ public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCa
}
}



@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
switch (requestCode) {
@@ -244,6 +693,14 @@ public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCa
Toast.makeText(this, "权限被禁用", Toast.LENGTH_SHORT).show();
}
break;
case REQUEST_CAMERA_PERMISSION:
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
// close the app
Toast.makeText(MainActivity.this, "Sorry!!!, you can't use this app without granting permission",
Toast.LENGTH_LONG).show();
// finish();
}
break;
default:
break;
}
@@ -355,8 +812,8 @@ public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCa
display.getSize(outSize);*/

//根据需要设置位置大小
layoutParams.width = 1600;//1920 960 480
layoutParams.height = 3200;//1080 540 270
layoutParams.width = 1080;//1920 960 480
layoutParams.height = 1920;//1080 540 270
layoutParams.x = 0;
layoutParams.y = 0;
return layoutParams;
@@ -372,4 +829,326 @@ public class MainActivity extends Activity implements DuiUpdateObserver.UpdateCa
mHelper.show(mSuspension);
}

private TextureView textureViewHdmi;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();

static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}

protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension = new Size(3840, 2160);
private ImageReader imageReader;
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
private HdmiService mHdmiService;
private ConstraintLayout rootView;
private boolean mPaused = false;
private String mAssignCameraId;

class HdmiCallback extends IHdmiCallback.Stub {
public HdmiCallback() {
}

public void onConnect(String cameraId) throws RemoteException {
Log.e(TAG, "onConnect" + cameraId);
openCamera();
}

public void onFormatChange(String cameraId, int width, int height) throws RemoteException {
Log.e(TAG, "onFormatChange" + cameraId);
closeCamera();
imageDimension = new Size(width, height);
openCamera();
}

public void onDisconnect(String cameraId) throws RemoteException {
Log.e(TAG, "onDisconnect" + cameraId);
closeCamera();
}
}

HdmiCallback mHdmiCallback;

private void fullScreen() {
getWindow().getDecorView().getRootView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LOW_PROFILE);
}

private void createTextureView() {
Log.d(TAG, "recreatTextureview");
runOnUiThread(new Runnable() {
@Override
public void run() {
Log.i(TAG, "textureView remove");
textureViewHdmi = (TextureView) findViewById(R.id.texture_hdmi);
textureViewHdmi.setSurfaceTextureListener(textureListener);
}
});
}

TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
// open your camera here
Log.d(TAG, "onSurfaceTextureAvailable");
openCamera();
// Intent hdmiService = new Intent(RockchipCamera2.this, HdmiService.class);
// hdmiService.setPackage(getPackageName());
// bindService(hdmiService, conn, Context.BIND_AUTO_CREATE);
}

@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.d(TAG, "onSurfaceTextureSizeChanged");
// Transform you image captured size according to the surface width and height
}

@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.d(TAG, "onSurfaceTextureDestroyed");
return true;
}

@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// Log.d(TAG,"onSurfaceTextureUpdated");
}
};

private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
// This is called when the camera is open
Log.d(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
}

@Override
public void onDisconnected(CameraDevice camera) {
Log.d(TAG, "onDisconnected");
cameraDevice.close();
}

@Override
public void onError(CameraDevice camera, int error) {
Log.i(TAG, "onError");
cameraDevice.close();
cameraDevice = null;
}
};

protected void startHdmiBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}

protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}

protected void createCameraPreview() {
try {
Log.d(TAG, "createCameraPreview");
SurfaceTexture texture = textureViewHdmi.getSurfaceTexture();
assert texture != null;
Log.d(TAG, "imageDimension.getWidth()=" + imageDimension.getWidth() + ",imageDimension.getHeight()="
+ imageDimension.getHeight());
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
Log.d(TAG, "onConfigured");
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}

@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigureFailed");
Toast.makeText(MainActivity.this, "Configuration failed", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

private void openCamera() {
String getHdmiDeviceId = "";
try {
IHdmi service = IHdmi.getService(true);
getHdmiDeviceId = service.getHdmiDeviceId();
service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.i(TAG, "openCamera start getHdmiDeviceId=" + getHdmiDeviceId);
try {
if (manager.getCameraIdList().length == 0) {
Log.i(TAG, "openCamera length == 0");
return;
}
boolean haveHDMI = false;
String hdmiCameraId = "";
String alternativeId = "";//备选cameraId
for (String cameraId : manager.getCameraIdList()) {
Log.i(TAG, "cameraId:" + cameraId);
if (TextUtils.isEmpty(mAssignCameraId)) {
if (cameraId.equals(getHdmiDeviceId)) {
haveHDMI = true;
hdmiCameraId = cameraId;
Log.i(TAG, "haveHDMI cameraId:" + cameraId);
}
} else if (!cameraId.equals(getHdmiDeviceId)) {
alternativeId = cameraId;
if (cameraId.equals(mAssignCameraId)) {
haveHDMI = true;
hdmiCameraId = cameraId;
Log.i(TAG, "have switch HDMI cameraId:" + cameraId);
break;
}
}
}
if (!haveHDMI) {
return;
}
CameraCharacteristics characteristics = manager.getCameraCharacteristics(hdmiCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
//imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
for (Size size : map.getOutputSizes(SurfaceTexture.class)) {
Log.d(TAG, "supported stream size: " + size.toString());
imageDimension = size;
}
Log.d(TAG, "current hdmi input size:" + imageDimension.toString());
if (ActivityCompat.checkSelfPermission(this,
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(hdmiCameraId, stateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
Log.i(TAG, "openCamera end");
}


protected void updatePreview() {
if (null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
Log.d(TAG, "updatePreview");
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

private void closeCamera() {
Log.d(TAG, "closeCamera");
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}

// @Override
// protected void onPause() {
// Log.d(TAG, "onPause");
// mPaused = true;
// super.onPause();
// try {
// IHdmi service = IHdmi.getService(true);
// service.unregisterListener((IHdmiCallback) mHdmiCallback);
// } catch (RemoteException e) {
// e.printStackTrace();
// }
// closeCamera();
// // JniCameraCall.closeDevice();
// stopBackgroundThread();
// if (textureView != null) {
// rootView.removeView(textureView);
// textureView = null;
// }
// }

void startHdmi(){
mHdmiCallback = new HdmiCallback();
try {
IHdmi service = IHdmi.getService(true);

service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
Log.d(TAG, "remove take pic button");
createTextureView();
assert textureViewHdmi != null;
// Add permission for camera and let user grant the permission
// if (ActivityCompat.checkSelfPermission(this,
// Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
// && ActivityCompat.checkSelfPermission(this,
// Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
// ActivityCompat.requestPermissions(MainActivity.this,
// new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE},
// REQUEST_CAMERA_PERMISSION);
// return;
// }
// fullScreen();

if (textureViewHdmi == null) {
// JniCameraCall.openDevice();
try {
IHdmi service = IHdmi.getService(true);

service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
createTextureView();
}
startHdmiBackgroundThread();
}

}

+ 417
- 18
app/src/main/java/com/aispeech/nativedemo/MainActivity2.java Wyświetl plik

@@ -1,12 +1,53 @@
package com.aispeech.nativedemo;

import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.widget.Button;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.core.app.ActivityCompat;

import com.aispeech.nativedemo.network.ws.WebSocketManager;
import com.aispeech.nativedemo.rockchip.HdmiService;
import com.aispeech.nativedemo.rockchip.util.DataUtils;
import com.aispeech.nativedemo.upload.UploadManager;

import org.json.JSONException;
import org.json.JSONObject;

import java.io.File;
import java.util.Arrays;

import rockchip.hardware.hdmi.V1_0.IHdmi;
import rockchip.hardware.hdmi.V1_0.IHdmiCallback;

public class MainActivity2 extends Activity{
private static final String TAG = "MainActivity";
private Button stopWeb;
@@ -17,84 +58,442 @@ public class MainActivity2 extends Activity{
private Button open;
private Button stopClient;
private Button startClient;
private WebView mWebView;


private WebSocketManager mWSManager;
private UploadManager mUploadManager;

private TextureView textureViewHdmi;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();

static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}

protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension = new Size(3840, 2160);
private ImageReader imageReader;
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
private HdmiService mHdmiService;
private ConstraintLayout rootView;
private boolean mPaused = false;
private String mAssignCameraId;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main_2);
initView();
initManager();

mAssignCameraId = getIntent().getStringExtra(DataUtils.EXTRA_ASSIGN_CAMERA_ID);
// rootView = (ConstraintLayout) findViewById(R.id.root_view);
//textureView = findViewById(R.id.texture);
mHdmiCallback = new HdmiCallback();
try {
IHdmi service = IHdmi.getService(true);

service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
Log.d(TAG, "remove take pic button");
createTextureView();
assert textureViewHdmi != null;
// Add permission for camera and let user grant the permission
// if (ActivityCompat.checkSelfPermission(this,
// Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
// && ActivityCompat.checkSelfPermission(this,
// Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
// ActivityCompat.requestPermissions(MainActivity.this,
// new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE},
// REQUEST_CAMERA_PERMISSION);
// return;
// }
// fullScreen();

if (textureViewHdmi == null) {
// JniCameraCall.openDevice();
try {
IHdmi service = IHdmi.getService(true);

service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
createTextureView();
}
startHdmiBackgroundThread();
}


private void initManager() {
mUploadManager = UploadManager.getInstance(this.getApplicationContext());
mWSManager = WebSocketManager.getInstance(getApplicationContext());
}

private void initView() {
stopWeb = findViewById(R.id.stop_web_socket);
stopWeb = findViewById(R.id.stop_web_socket2);
stopWeb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// mWSManager.destroyWebSocketServer();
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "缔智元是一家缔造数字人员工的科技企业,致力于综合图像识别、自然语言交互、知识图谱、超写实3D渲染、物联网等前沿技术,助力企业的数字化与智能化变革");
WebSocketManager.getInstance(MainActivity2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
startWeb = findViewById(R.id.start_web_socket);
startWeb = findViewById(R.id.start_web_socket2);
startWeb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

}
});
stopApp = findViewById(R.id.kill_app);
stopApp = findViewById(R.id.kill_app2);
stopApp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

killApp();
}
});
reboot = findViewById(R.id.reboot);
reboot = findViewById(R.id.reboot2);
reboot.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
new Thread(new Runnable() {
@Override
public void run() {
// mUploadManager.uploadFile("","");
}
}).start();

JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showBothHandFlat");
WebSocketManager.getInstance(MainActivity2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
close = findViewById(R.id.close_camera);
close = findViewById(R.id.close_camera2);
close.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "waveHand");
WebSocketManager.getInstance(MainActivity2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
open = findViewById(R.id.open_camera);
open = findViewById(R.id.open_camera2);
open.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showRightHandFlat");
WebSocketManager.getInstance(MainActivity2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
stopClient = findViewById(R.id.stop_client);
stopClient = findViewById(R.id.stop_client2);
stopClient.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

}
});
startClient = findViewById(R.id.start_client);
startClient = findViewById(R.id.start_client2);
startClient.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "今天天气真好");
WebSocketManager.getInstance(MainActivity2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
mWebView = findViewById(R.id.webview2);
mWebView.setBackgroundColor(0);
if(mWebView.getBackground() != null){
mWebView.getBackground().setAlpha(0);
}
//声明WebSettings子类
WebSettings webSettings = mWebView.getSettings();

//如果访问的页面中要与Javascript交互,则webview必须设置支持Javascript
webSettings.setJavaScriptEnabled(true);

//设置自适应屏幕,两者合用
webSettings.setUseWideViewPort(true); //将图片调整到适合webview的大小
webSettings.setLoadWithOverviewMode(true); // 缩放至屏幕的大小

//缩放操作
webSettings.setSupportZoom(true); //支持缩放,默认为true。是下面那个的前提。
webSettings.setBuiltInZoomControls(true); //设置内置的缩放控件。若为false,则该WebView不可缩放
webSettings.setDisplayZoomControls(false); //隐藏原生的缩放控件

//其他细节操作
webSettings.setCacheMode(WebSettings.LOAD_NO_CACHE); //关闭webview中缓存
webSettings.setAllowFileAccess(true); //设置可以访问文件
webSettings.setJavaScriptCanOpenWindowsAutomatically(true); //支持通过JS打开新窗口
webSettings.setLoadsImagesAutomatically(true); //支持自动加载图片
webSettings.setDefaultTextEncodingName("utf-8");//设置编码格式
webSettings.setDomStorageEnabled(true);

mWebView.loadUrl("39.107.77.235:48085");
}

private void killApp() {
android.os.Process.killProcess(android.os.Process.myPid());
}

class HdmiCallback extends IHdmiCallback.Stub {
public HdmiCallback() {
}

public void onConnect(String cameraId) throws RemoteException {
Log.e(TAG, "onConnect" + cameraId);
openCamera();
}

public void onFormatChange(String cameraId, int width, int height) throws RemoteException {
Log.e(TAG, "onFormatChange" + cameraId);
closeCamera();
imageDimension = new Size(width, height);
openCamera();
}

public void onDisconnect(String cameraId) throws RemoteException {
Log.e(TAG, "onDisconnect" + cameraId);
closeCamera();
}
}

HdmiCallback mHdmiCallback;

private void fullScreen() {
getWindow().getDecorView().getRootView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LOW_PROFILE);
}

private void createTextureView() {
Log.d(TAG, "recreatTextureview");
runOnUiThread(new Runnable() {
@Override
public void run() {
Log.i(TAG, "textureView remove");
textureViewHdmi = (TextureView) findViewById(R.id.texture_hdmi2);
textureViewHdmi.setSurfaceTextureListener(textureListener);
}
});
}

TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
// open your camera here
Log.d(TAG, "onSurfaceTextureAvailable");
openCamera();
// Intent hdmiService = new Intent(RockchipCamera2.this, HdmiService.class);
// hdmiService.setPackage(getPackageName());
// bindService(hdmiService, conn, Context.BIND_AUTO_CREATE);
}

@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.d(TAG, "onSurfaceTextureSizeChanged");
// Transform you image captured size according to the surface width and height
}

@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.d(TAG, "onSurfaceTextureDestroyed");
return true;
}

@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// Log.d(TAG,"onSurfaceTextureUpdated");
}
};

private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
// This is called when the camera is open
Log.d(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
}

@Override
public void onDisconnected(CameraDevice camera) {
Log.d(TAG, "onDisconnected");
cameraDevice.close();
}

@Override
public void onError(CameraDevice camera, int error) {
Log.i(TAG, "onError");
cameraDevice.close();
cameraDevice = null;
}
};

protected void startHdmiBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}

private void openCamera() {
String getHdmiDeviceId = "";
try {
IHdmi service = IHdmi.getService(true);
getHdmiDeviceId = service.getHdmiDeviceId();
service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.i(TAG, "openCamera start getHdmiDeviceId=" + getHdmiDeviceId);
try {
if (manager.getCameraIdList().length == 0) {
Log.i(TAG, "openCamera length == 0");
return;
}
boolean haveHDMI = false;
String hdmiCameraId = "";
String alternativeId = "";//备选cameraId
for (String cameraId : manager.getCameraIdList()) {
Log.i(TAG, "cameraId:" + cameraId);
if (TextUtils.isEmpty(mAssignCameraId)) {
if (cameraId.equals(getHdmiDeviceId)) {
haveHDMI = true;
hdmiCameraId = cameraId;
Log.i(TAG, "haveHDMI cameraId:" + cameraId);
}
} else if (!cameraId.equals(getHdmiDeviceId)) {
alternativeId = cameraId;
if (cameraId.equals(mAssignCameraId)) {
haveHDMI = true;
hdmiCameraId = cameraId;
Log.i(TAG, "have switch HDMI cameraId:" + cameraId);
break;
}
}
}
if (!haveHDMI) {
return;
}
CameraCharacteristics characteristics = manager.getCameraCharacteristics(hdmiCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
//imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
for (Size size : map.getOutputSizes(SurfaceTexture.class)) {
Log.d(TAG, "supported stream size: " + size.toString());
imageDimension = size;
}
Log.d(TAG, "current hdmi input size:" + imageDimension.toString());
if (ActivityCompat.checkSelfPermission(this,
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}
manager.openCamera(hdmiCameraId, stateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
Log.i(TAG, "openCamera end");
}


protected void updatePreview() {
if (null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
Log.d(TAG, "updatePreview");
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

private void closeCamera() {
Log.d(TAG, "closeCamera");
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}

protected void createCameraPreview() {
try {
Log.d(TAG, "createCameraPreview");
SurfaceTexture texture = textureViewHdmi.getSurfaceTexture();
assert texture != null;
Log.d(TAG, "imageDimension.getWidth()=" + imageDimension.getWidth() + ",imageDimension.getHeight()="
+ imageDimension.getHeight());
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
Log.d(TAG, "onConfigured");
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}

@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigureFailed");
Toast.makeText(MainActivity2.this, "Configuration failed", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
}

+ 23
- 14
app/src/main/java/com/aispeech/nativedemo/asr/observer/DuiMessageObserver.java Wyświetl plik

@@ -1,6 +1,7 @@
package com.aispeech.nativedemo.asr.observer;

import android.os.Environment;
import android.text.TextUtils;
import android.util.Log;

import com.aispeech.dui.dds.DDS;
@@ -11,6 +12,8 @@ import com.aispeech.nativedemo.log.Logger;
import com.aispeech.nativedemo.mqtt.MqttManager;
import com.aispeech.nativedemo.network.ws.MessageUtils;
import com.aispeech.nativedemo.config.Config;
import com.aispeech.nativedemo.network.ws.WebSocketManager;
import com.aispeech.nativedemo.shape.ShapeManager;
import com.aispeech.nativedemo.widget.pageview.utils.JSONUtils;
import com.google.gson.Gson;

@@ -70,21 +73,16 @@ public class DuiMessageObserver implements MessageObserver {
String nlg = jsonObject.optString("nlg");
Log.e(Tag, display);
Log.e(Tag, nlg);
// try {
// jsonObject.put("nlg", readText);
// jsonObject.put("display", readText);
// } catch (JSONException e) {
// e.printStackTrace();
// }
String readText = nlg;
// if(FaceManager.getInstance(DuiApplication.getContext()).hasPerson()){
if(JSONUtils.isJson(display)){
String readText = MessageUtils.sendSkill(display);
try {
jsonObject.put("nlg", readText);
jsonObject.put("display", readText);
} catch (JSONException e) {
e.printStackTrace();
}
readText = MessageUtils.sendSkill(display);
// try {
// jsonObject.put("nlg", readText);
// jsonObject.put("display", readText);
// } catch (JSONException e) {
// e.printStackTrace();
// }
}
// else{
// try {
@@ -109,6 +107,18 @@ public class DuiMessageObserver implements MessageObserver {
// e.printStackTrace();
// }
// }
if(!TextUtils.isEmpty(readText)){
try {
JSONObject jo = new JSONObject();
jo.put("type", "djTtsText");
jo.put("data", readText);
WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
jsonObject.put("nlg", "");
jsonObject.put("display", "");
} catch (JSONException e) {
e.printStackTrace();
}
}
}
return jsonObject;
}
@@ -131,7 +141,6 @@ public class DuiMessageObserver implements MessageObserver {
switch (message) {
case "context.output.text":
MessageUtils.sendChatMessage(data);
// ShapeManager.getInstance().start(txt);
break;
case "context.input.text":
// if(FaceManager.getInstance(DuiApplication.getContext()).hasPerson()){


+ 79
- 0
app/src/main/java/com/aispeech/nativedemo/camera/AutoFitTextureView.java Wyświetl plik

@@ -0,0 +1,79 @@
package com.aispeech.nativedemo.camera;


import android.annotation.SuppressLint;
import android.content.Context;
import android.util.AttributeSet;
import android.view.TextureView;

/**
* A {@link TextureView} that can be adjusted to a specified aspect ratio.
*/
public class AutoFitTextureView extends TextureView {

private int mRatioWidth = 0;
private int mRatioHeight = 0;

private double mRequestedAspect;


public AutoFitTextureView(Context context) {
this(context, null);
}

public AutoFitTextureView(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}

public AutoFitTextureView(Context context, AttributeSet attrs, int defStyle) {
super(context, attrs, defStyle);
this.mRequestedAspect = -1.0D;
}

/**
* Sets the aspect ratio for this view. The size of the view will be measured based on the ratio
* calculated from the parameters. Note that the actual sizes of parameters don't matter, that
* is, calling setAspectRatio(2, 3) and setAspectRatio(4, 6) make the same result.
*
* @param width Relative horizontal size
* @param height Relative vertical size
*/
public void setAspectRatio(int width, int height) {
if (width < 0 || height < 0) {
throw new IllegalArgumentException("Size cannot be negative.");
}
mRatioWidth = width;
mRatioHeight = height;
requestLayout();
}

@SuppressLint("WrongConstant")
@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
if (this.mRequestedAspect > 0.0D) {
int initialWidth = MeasureSpec.getSize(widthMeasureSpec);
int initialHeight = MeasureSpec.getSize(heightMeasureSpec);
int horizPadding = this.getPaddingLeft() + this.getPaddingRight();
int vertPadding = this.getPaddingTop() + this.getPaddingBottom();
initialWidth -= horizPadding;
initialHeight -= vertPadding;
double viewAspectRatio = (double)initialWidth / (double)initialHeight;
double aspectDiff = this.mRequestedAspect / viewAspectRatio - 1.0D;
if (Math.abs(aspectDiff) > 0.01D) {
if (aspectDiff > 0.0D) {
initialHeight = (int)((double)initialWidth / this.mRequestedAspect);
} else {
initialWidth = (int)((double)initialHeight * this.mRequestedAspect);
}

initialWidth += horizPadding;
initialHeight += vertPadding;
widthMeasureSpec = MeasureSpec.makeMeasureSpec(initialWidth, 1073741824);
heightMeasureSpec = MeasureSpec.makeMeasureSpec(initialHeight, 1073741824);
}
}
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
// setMeasuredDimension(640,400);
}

}

+ 771
- 0
app/src/main/java/com/aispeech/nativedemo/camera/Camera2HelperRgb.java Wyświetl plik

@@ -0,0 +1,771 @@
//package com.aispeech.nativedemo.camera;
//
//import android.annotation.SuppressLint;
//import android.content.Context;
//import android.content.res.Configuration;
//import android.graphics.Bitmap;
//import android.graphics.ImageFormat;
//import android.graphics.Matrix;
//import android.graphics.Point;
//import android.graphics.RectF;
//import android.graphics.SurfaceTexture;
//import android.hardware.camera2.CameraAccessException;
//import android.hardware.camera2.CameraCaptureSession;
//import android.hardware.camera2.CameraCharacteristics;
//import android.hardware.camera2.CameraDevice;
//import android.hardware.camera2.CameraManager;
//import android.hardware.camera2.CaptureRequest;
//import android.hardware.camera2.params.StreamConfigurationMap;
//import android.media.Image;
//import android.media.ImageReader;
//import android.os.Handler;
//import android.os.HandlerThread;
//import android.text.TextUtils;
//import android.util.Log;
//import android.util.Range;
//import android.util.Size;
//import android.view.Surface;
//import android.view.TextureView;
//
//import androidx.annotation.NonNull;
//
//
//import com.lenovo.faceheadtrackingdemo.MineApplication;
//
//import java.util.ArrayList;
//import java.util.Arrays;
//import java.util.List;
//import java.util.Objects;
//import java.util.concurrent.Semaphore;
//import java.util.concurrent.TimeUnit;
//
//public class Camera2HelperRgb {
// private static final String TAG = "Camera2Helper";
//
// private static final String RGBCAMERA = "0";
// private static final String IRCAMERA = "1";
//
//
// private String mCameraId;
// private String specificCameraId;
// private Camera2Listener camera2Listener;
// private AutoFitTextureView mTextureView;
// private int rotation;
// private Point previewViewSize;
// private Point specificPreviewSize;
// private boolean isMirror;
// private Context context;
// /**
// * A {@link CameraCaptureSession } for camera preview.
// */
// private CameraCaptureSession mCaptureSession;
//
// /**
// * A reference to the opened {@link CameraDevice}.
// */
// private CameraDevice mCameraDevice;
//
// private Size mPreviewSize;
//
// private Camera2HelperRgb(Builder builder) {
// mTextureView = builder.previewDisplayView;
// specificCameraId = builder.specificCameraId;
// camera2Listener = builder.camera2Listener;
// rotation = builder.rotation;
// previewViewSize = builder.previewViewSize;
// specificPreviewSize = builder.previewSize;
// isMirror = builder.isMirror;
// context = builder.context;
// if (isMirror) {
// mTextureView.setScaleX(-1);
// }
// }
//
// private int getCameraOri(int rotation, String cameraId) {
// int degrees = rotation * 90;
// switch (rotation) {
// case Surface.ROTATION_0:
// degrees = 0;
// break;
// case Surface.ROTATION_90:
// degrees = 90;
// break;
// case Surface.ROTATION_180:
// degrees = 180;
// break;
// case Surface.ROTATION_270:
// degrees = 270;
// break;
// default:
// break;
// }
//
// int result;
// if ("0".equals(cameraId)) {
// result = (mSensorOrientation + degrees) % 360;
// result = (360 - result) % 360;
// } else {
// result = (mSensorOrientation - degrees + 360) % 360;
//
// }
// return result;
// }
//
// private final TextureView.SurfaceTextureListener mSurfaceTextureListener
// = new TextureView.SurfaceTextureListener() {
//
// @Override
// public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
// Log.i(TAG, "onSurfaceTextureAvailable: ");
// openCamera();
// }
//
// @Override
// public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
// Log.i(TAG, "onSurfaceTextureSizeChanged: ");
// configureTransform(width, height);
// }
//
// @Override
// public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
// Log.i(TAG, "onSurfaceTextureDestroyed: ");
// return true;
// }
//
// @Override
// public void onSurfaceTextureUpdated(SurfaceTexture texture) {
// }
//
// };
//
// private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
//
// @Override
// public void onOpened(@NonNull CameraDevice cameraDevice) {
// Log.i(TAG, "onOpened: ");
// // This method is called when the camera is opened. We start camera preview here.
// mCameraOpenCloseLock.release();
// mCameraDevice = cameraDevice;
// createCameraPreviewSession();
// if (camera2Listener != null) {
// camera2Listener.onCameraOpened(cameraDevice, mCameraId, mPreviewSize, getCameraOri(rotation, mCameraId), isMirror);
// }
// }
//
// @Override
// public void onDisconnected(@NonNull CameraDevice cameraDevice) {
// Log.i(TAG, "onDisconnected: ");
// mCameraOpenCloseLock.release();
// cameraDevice.close();
// mCameraDevice = null;
// if (camera2Listener != null) {
// camera2Listener.onCameraClosed();
// }
// }
//
// @Override
// public void onError(@NonNull CameraDevice cameraDevice, int error) {
// Log.i(TAG, "onError: ");
// mCameraOpenCloseLock.release();
// cameraDevice.close();
// mCameraDevice = null;
//
// if (camera2Listener != null) {
// camera2Listener.onCameraError(new Exception("error occurred, code is " + error));
// }
// }
//
// };
// private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
//
// @Override
// public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// Log.i(TAG, "onConfigured: ");
// // The camera is already closed
// if (null == mCameraDevice) {
// return;
// }
//
// // When the session is ready, we start displaying the preview.
// mCaptureSession = cameraCaptureSession;
// try {
// mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
// new CameraCaptureSession.CaptureCallback() {
// }, mBackgroundHandler);
// } catch (CameraAccessException e) {
// e.printStackTrace();
// }
// }
//
// @Override
// public void onConfigureFailed(
// @NonNull CameraCaptureSession cameraCaptureSession) {
// Log.i(TAG, "onConfigureFailed: ");
// if (camera2Listener != null) {
// camera2Listener.onCameraError(new Exception("configureFailed"));
// }
// }
// };
// /**
// * An additional thread for running tasks that shouldn't block the UI.
// */
// private HandlerThread mBackgroundThread;
//
// /**
// * A {@link Handler} for running tasks in the background.
// */
// private Handler mBackgroundHandler;
//
// private ImageReader mImageReader;
// private int mRgbCount = 0;
//
// /**
// * 针对华为pad的camera回调
// */
// private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
// = new ImageReader.OnImageAvailableListener() {
//
// @Override
// public void onImageAvailable(ImageReader reader) {
// Log.e(TAG, "onImageAvailable: mCameraId:" + mCameraId);
// Log.e(TAG, "onImageAvailable: ThreadName:" + Thread.currentThread().getName());
//// try {
//// Thread.sleep(200);
// long l1 = System.currentTimeMillis();
// Image image = reader.acquireLatestImage();
//// Log.e(TAG, "onImageAvailable: RGB图像格式:" + image.getFormat());
//// Bitmap mBitmap = ImageUtil.INSTANCE.getBitmapFromImage(image);
// if (image != null) {
// Bitmap mBitmap = ImageUtil.INSTANCE.imageToBitmap(image, "RGB");
// long l2 = System.currentTimeMillis();
// Log.e(TAG, "onImageAvailable: RGB成像时间:" + (l2 - l1));
// camera2Listener.onPreview(mBitmap);
// image.close();
// }
//// } catch (InterruptedException e) {
//// e.printStackTrace();
//// }
// }
// };
//
//// /**
//// * 针对萤火虫pad的camera回调
//// */
//// private final ImageReader.OnImageAvailableListener mOnImageAvailableListener
//// = new ImageReader.OnImageAvailableListener() {
////
//// @Override
//// public void onImageAvailable(ImageReader reader) {
//// Log.e(TAG, "onImageAvailable: mCameraId:" + mCameraId);
//// Log.e(TAG, "onImageAvailable: ThreadName:" + Thread.currentThread().getName());
//// try {
//// Thread.sleep(200);
//// long l1 = System.currentTimeMillis();
//// Image image = reader.acquireNextImage();
//// Bitmap mBitmap = ImageUtil.INSTANCE.getBitmapFromImage(image);
//// Bitmap mRotateBitmap = ImageUtil.INSTANCE.rotateBitmap(mBitmap, -180f);
//// long l2 = System.currentTimeMillis();
//// Log.e(TAG, "onImageAvailable: RGB成像时间:" + (l2 - l1));
//// if (MainActivity.Companion.getMIsStartSave()) {
//// //开始保存图像
//// Objects.requireNonNull(MineApplication.Companion.getMainHandler()).post(() -> {
//// Log.e(TAG, "onImageAvailable: 保存RGB图片的ThreadName:" + Thread.currentThread().getName());
//// mRgbCount++;
//// if (mRotateBitmap != null) {
//// ImageUtil.INSTANCE.saveBitmapRGB(context, mRotateBitmap, MainActivity.Companion.getMShotNumber(), "RGB_" + mRgbCount);
//// }
//// });
////
//// } else {
//// mRgbCount = 0;
//// }
//// camera2Listener.onPreview(mRotateBitmap);
//// image.close();
//// } catch (InterruptedException e) {
//// e.printStackTrace();
//// }
//// }
//// };
//
// /**
// * {@link CaptureRequest.Builder} for the camera preview
// */
// private CaptureRequest.Builder mPreviewRequestBuilder;
//
//
// /**
// * A {@link Semaphore} to prevent the app from exiting before closing the camera.
// */
// private Semaphore mCameraOpenCloseLock = new Semaphore(1);
//
//
// /**
// * Orientation of the camera sensor
// */
// private int mSensorOrientation;
//
// private Size getBestSupportedSize(List<Size> sizes) {
//// Size[] tempSizes = sizes.toArray(new Size[0]);
//// Arrays.sort(tempSizes, new Comparator<Size>() {
//// @Override
//// public int compare(Size o1, Size o2) {
//// if (o1.getWidth() > o2.getWidth()) {
//// return -1;
//// } else if (o1.getWidth() == o2.getWidth()) {
//// return o1.getHeight() > o2.getHeight() ? -1 : 1;
//// } else {
//// return 1;
//// }
//// }
//// });
//// sizes = Arrays.asList(tempSizes);
//// Size bestSize = sizes.get(0);
//// float previewViewRatio;
//// if (previewViewSize != null) {
//// previewViewRatio = (float) previewViewSize.x / (float) previewViewSize.y;
//// } else {
//// previewViewRatio = (float) bestSize.getWidth() / (float) bestSize.getHeight();
//// }
////
//// if (previewViewRatio > 1) {
//// previewViewRatio = 1 / previewViewRatio;
//// }
//// for (Size s : sizes) {
//// if (specificPreviewSize != null && specificPreviewSize.x == s.getWidth() && specificPreviewSize.y == s.getHeight()) {
//// return s;
//// }
//// if (s.getWidth() > MAX_PREVIEW_WIDTH || s.getHeight() > MAX_PREVIEW_HEIGHT
//// || s.getWidth() < MIN_PREVIEW_WIDTH || s.getHeight() < MIN_PREVIEW_HEIGHT) {
//// continue;
//// }
//// if (Math.abs((s.getHeight() / (float) s.getWidth()) - previewViewRatio) < Math.abs(bestSize.getHeight() / (float) bestSize.getWidth() - previewViewRatio)) {
//// bestSize = s;
//// }
//// }
//
//// float aspectRatio = Float.valueOf(maxWidth) / maxHeight;
//// for(Size size : sizes){
//// if(Float.valueOf(size.getWidth()/ size.getHeight()) == aspectRatio && size.getHeight() <= maxHeight && size.getWidth() <= maxWidth){
//// return size;
//// }
//// }
//// for(Size size : sizes){
//// if(size.getWidth() == 2560 && size.getHeight() == 1440){
//// return size;
//// }
//// }
// return new Size(1280, 960);
// }
//
// public synchronized void start() {
// if (mCameraDevice != null) {
// return;
// }
// startBackgroundThread();
//
// // When the screen is turned off and turned back on, the SurfaceTexture is already
// // available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// // a camera and start preview from here (otherwise, we wait until the surface is ready in
// // the SurfaceTextureListener).
// if (mTextureView.isAvailable()) {
// openCamera();
// } else {
// mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
// }
// }
//
// public synchronized void stop() {
// if (mCameraDevice == null) {
// return;
// }
// closeCamera();
// stopBackgroundThread();
// }
//
// public void release() {
// stop();
// mTextureView = null;
// camera2Listener = null;
// context = null;
// }
//
// private void setUpCameraOutputs(CameraManager cameraManager) {
// try {
// if (configCameraParams(cameraManager, specificCameraId)) {
//// return;
// }
//
//// for (String cameraId : cameraManager.getCameraIdList()) {
//// Log.e("for", "cameraId == " + cameraId);
//// if (configCameraParams(cameraManager, cameraId)) {
//// return;
//// }
//// }
// } catch (CameraAccessException e) {
// e.printStackTrace();
// } catch (NullPointerException e) {
// // Currently an NPE is thrown when the Camera2API is used but not supported on the
// // device this code runs.
//
// if (camera2Listener != null) {
// camera2Listener.onCameraError(e);
// }
// }
// }
//
// private static Range<Integer>[] fpsRanges;
//
// private boolean configCameraParams(CameraManager manager, String cameraId) throws CameraAccessException {
//
// try {
// CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
// StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
// if (map == null) {
// return false;
// }
// for (String d : manager.getCameraIdList()) {
// Log.e("for_cameraId", "cameraId == " + d);
// }
// // 该相机的FPS范围
// fpsRanges = characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES);
// Log.e("FPS", "SYNC_MAX_LATENCY_PER_FRAME_CONTROL: " + Arrays.toString(fpsRanges));
//
// List<Size> list = Arrays.asList(map.getOutputSizes(SurfaceTexture.class));
//
// for (int i = 0; i < list.size(); i++) {
// Log.e("tag", "index: " + i + " 相机的预览列表: " + list.get(i).getWidth() + "x" + list.get(i).getHeight());
// }
//
//
// mPreviewSize = getBestSupportedSize(new ArrayList<Size>(Arrays.asList(map.getOutputSizes(SurfaceTexture.class))));
//
// mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
// ImageFormat.YUV_420_888, 5);
//
// mImageReader.setOnImageAvailableListener(
// mOnImageAvailableListener, mBackgroundHandler);
//
// //noinspection ConstantConditions
// mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
// this.mCameraId = cameraId;
// } catch (Exception e) {
// Log.e("tag", "CameraCharacteristics Exception" + e.toString());
// }
//
//
// return true;
// }
//
// /**
// * Opens the camera specified by {@link #mCameraId}.
// */
// @SuppressLint("MissingPermission")
// private void openCamera() {
// CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
// setUpCameraOutputs(cameraManager);
// configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
// try {
// if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
// throw new RuntimeException("Time out waiting to lock camera opening.");
// }
// cameraManager.openCamera(specificCameraId, mDeviceStateCallback, mBackgroundHandler);
//// if (specificCameraId.equals("0")) {
//// cameraManager.openCamera("0", mDeviceStateCallback, mBackgroundHandler);
//// } else {
//// cameraManager.openCamera("1", mDeviceStateCallback, mBackgroundHandler);
//// }
// } catch (CameraAccessException | InterruptedException e) {
// if (camera2Listener != null) {
// camera2Listener.onCameraError(e);
// }
// }
// }
//
// /**
// * Closes the current {@link CameraDevice}.
// */
// private void closeCamera() {
// try {
// mCameraOpenCloseLock.acquire();
// if (null != mCaptureSession) {
// mCaptureSession.close();
// mCaptureSession = null;
// }
// if (null != mCameraDevice) {
// mCameraDevice.close();
// mCameraDevice = null;
// }
// if (camera2Listener != null) {
// camera2Listener.onCameraClosed();
// }
// if (null != mImageReader) {
// mImageReader.close();
// mImageReader = null;
// }
// } catch (InterruptedException e) {
// if (camera2Listener != null) {
// camera2Listener.onCameraError(e);
// }
// } finally {
// mCameraOpenCloseLock.release();
// }
// }
//
// /**
// * Starts a background thread and its {@link Handler}.
// */
// private void startBackgroundThread() {
// mBackgroundThread = new HandlerThread("CameraBackground");
// mBackgroundThread.start();
// mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
// }
//
// /**
// * Stops the background thread and its {@link Handler}.
// */
// private void stopBackgroundThread() {
// mBackgroundThread.quitSafely();
// try {
// mBackgroundThread.join();
// mBackgroundThread = null;
// mBackgroundHandler = null;
// } catch (InterruptedException e) {
// e.printStackTrace();
// }
// }
//
// /**
// * Creates a new {@link CameraCaptureSession} for camera preview.
// */
// private void createCameraPreviewSession() {
// try {
// SurfaceTexture texture = mTextureView.getSurfaceTexture();
// assert texture != null;
//
// // We configure the size of default buffer to be the size of camera preview we want.
// texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
//
// // This is the output Surface we need to start preview.
// Surface surface = new Surface(texture);
//
// // We set up a CaptureRequest.Builder with the output Surface.
// mPreviewRequestBuilder
// = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
//
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
// CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
//
// // 设置预览画面的帧率 视实际情况而定选择一个帧率范围
//
// if (fpsRanges != null && fpsRanges.length > 0) {
// Range<Integer> maxFps = fpsRanges[0];
// for (Range<Integer> aFpsRange : fpsRanges) {
// if (maxFps.getLower() * maxFps.getUpper() < aFpsRange.getLower() * aFpsRange.getUpper()) {
// maxFps = aFpsRange;
// }
// }
// mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE, maxFps);
// }
//
// mPreviewRequestBuilder.get(CaptureRequest.CONTROL_AE_TARGET_FPS_RANGE);
//
// mPreviewRequestBuilder.addTarget(surface);
// mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
//
// // Here, we create a CameraCaptureSession for camera preview.
// mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
// mCaptureStateCallback, mBackgroundHandler
// );
// } catch (CameraAccessException e) {
// e.printStackTrace();
// }
// }
//
// /**
// * Configures the necessary {@link Matrix} transformation to `mTextureView`.
// * This method should be called after the camera preview size is determined in
// * setUpCameraOutputs and also the size of `mTextureView` is fixed.
// *
// * @param viewWidth The width of `mTextureView`
// * @param viewHeight The height of `mTextureView`
// */
// private void configureTransform(int viewWidth, int viewHeight) {
// if (null == mTextureView || null == mPreviewSize) {
// return;
// }
// Matrix matrix = new Matrix();
// RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
// RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
// float centerX = viewRect.centerX();
// float centerY = viewRect.centerY();
// if (Surface.ROTATION_90 == rotation || Surface.ROTATION_270 == rotation) {
// bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
// matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
// float scale = Math.max(
// (float) viewHeight / mPreviewSize.getHeight(),
// (float) viewWidth / mPreviewSize.getWidth());
// matrix.postScale(scale, scale, centerX, centerY);
// matrix.postRotate((90 * (rotation - 2)) % 360, centerX, centerY);
// } else if (Surface.ROTATION_180 == rotation) {
// matrix.postRotate(180, centerX, centerY);
// }
//
// Log.e(TAG, "configureTransform: " + mCameraId + "---------->" + getCameraOri(rotation, mCameraId) + " " + mCameraId + " " + (90 * (rotation - 2)) % 360);
//
//
// int orientation = Objects.requireNonNull(MineApplication.Companion.getContext()).getResources().getConfiguration().orientation;
//
// if (orientation == Configuration.ORIENTATION_LANDSCAPE) {
// mTextureView.setAspectRatio(
// mPreviewSize.getWidth(), mPreviewSize.getHeight());
// } else {
// mTextureView.setAspectRatio(
// mPreviewSize.getHeight(), mPreviewSize.getWidth());
// }
//
// // //左右镜像
// mTextureView.setScaleX(-1);
//
//// //解决左右镜像相反的问题(华为pad)
//// if (TextUtils.equals(RGBCAMERA, mCameraId)) {
//// //左右镜像
//// mTextureView.setScaleX(-1);
//// } else if (TextUtils.equals(IRCAMERA, mCameraId)) {
//// //上下镜像
//// mTextureView.setScaleY(-1);
//// mTextureView.setScaleX(-1);
//// }
//
//
// //解决左右镜像相反的问题(萤火虫pad)
//// if (TextUtils.equals(RGBCAMERA, mCameraId)) {
//// //左右镜像
//// mTextureView.setScaleY(-1);
//// mTextureView.setScaleX(-1);
//// } else if (TextUtils.equals(IRCAMERA, mCameraId)) {
//// //上下镜像
////// mTextureView.setScaleX(-1);
////// mTextureView.setScaleY(-1);
//// }
//
//// //解决左右镜像相反的问题(凯旋)
//// if (TextUtils.equals(RGBCAMERA, mCameraId)) {
//// //左右镜像
//// mTextureView.setScaleY(-1);
//// mTextureView.setScaleX(-1);
//// } else if (TextUtils.equals(IRCAMERA, mCameraId)) {
//// //上下镜像
////// mTextureView.setScaleX(-1);
////// mTextureView.setScaleY(-1);
//// }
//
//// //解决左右镜像相反的问题(旷世)
//// if (TextUtils.equals(RGBCAMERA, mCameraId)) {
//// //左右镜像
//// mTextureView.setScaleY(-1);
//// mTextureView.setScaleX(-1);
//// } else if (TextUtils.equals(IRCAMERA, mCameraId)) {
//// //上下镜像
////// mTextureView.setScaleX(-1);
////// mTextureView.setScaleY(-1);
//// }
//
// }
//
// public static final class Builder {
//
// /**
// * 预览显示的view,目前仅支持textureView
// */
// private AutoFitTextureView previewDisplayView;
//
// /**
// * 是否镜像显示,只支持textureView
// */
// private boolean isMirror;
// /**
// * 指定的相机ID
// */
// private String specificCameraId;
// /**
// * 事件回调
// */
// private Camera2Listener camera2Listener;
// /**
// * 屏幕的长宽,在选择最佳相机比例时用到
// */
// private Point previewViewSize;
// /**
// * 传入getWindowManager().getDefaultDisplay().getRotation()的值即可
// */
// private int rotation;
// /**
// * 指定的预览宽高,若系统支持则会以这个预览宽高进行预览
// */
// private Point previewSize;
//
// /**
// * 上下文,用于获取CameraManager
// */
// private Context context;
//
// public Builder() {
// }
//
//
// public Builder previewOn(AutoFitTextureView val) {
// previewDisplayView = val;
// return this;
// }
//
//
// public Builder isMirror(boolean val) {
// isMirror = val;
// return this;
// }
//
// public Builder previewSize(Point val) {
// previewSize = val;
// return this;
// }
//
// public Builder previewViewSize(Point val) {
// previewViewSize = val;
// return this;
// }
//
// public Builder rotation(int val) {
// rotation = val;
// return this;
// }
//
//
// public Builder specificCameraId(String val) {
// specificCameraId = val;
// return this;
// }
//
// public Builder cameraListener(Camera2Listener val) {
// camera2Listener = val;
// return this;
// }
//
// public Builder context(Context val) {
// context = val;
// return this;
// }
//
// public Camera2HelperRgb build() {
// if (previewViewSize == null) {
// Log.e(TAG, "previewViewSize is null, now use default previewSize");
// }
// if (camera2Listener == null) {
// Log.e(TAG, "camera2Listener is null, callback will not be called");
// }
// if (previewDisplayView == null) {
// throw new RuntimeException("you must preview on a textureView or a surfaceView");
// }
// return new Camera2HelperRgb(this);
// }
// }
//}

+ 49
- 0
app/src/main/java/com/aispeech/nativedemo/camera/Camera2Listener.java Wyświetl plik

@@ -0,0 +1,49 @@
package com.aispeech.nativedemo.camera;


import android.graphics.Bitmap;
import android.hardware.camera2.CameraDevice;
import android.media.Image;
import android.util.Size;

import java.util.List;

public interface Camera2Listener {
/**
* 当打开时执行
*
* @param cameraDevice 相机实例
* @param cameraId 相机ID
* @param displayOrientation 相机预览旋转角度
* @param isMirror 是否镜像显示
*/
void onCameraOpened(CameraDevice cameraDevice, String cameraId, Size previewSize, int displayOrientation, boolean isMirror);

/**
* 预览数据回调
*
* @param y 预览数据,Y分量
* @param u 预览数据,U分量
* @param v 预览数据,V分量
* @param previewSize 预览尺寸
* @param yRowStride y步长
* @param uRowStride u步长
* @param vRowStride v步长
*/
// void onPreview(byte[] y, byte[] u, byte[] v, Size previewSize, int yRowStride, int uRowStride, int vRowStride);
void onPreview(Bitmap bitmap);
// void onPreView(List<Bitmap> bitmapList,Bitmap bitmap);

/**
* 当相机关闭时执行
*/
void onCameraClosed();

/**
* 当出现异常时执行
*
* @param e 相机相关异常
*/
void onCameraError(Exception e);

}

+ 92
- 36
app/src/main/java/com/aispeech/nativedemo/camera/CameraController.java Wyświetl plik

@@ -36,6 +36,8 @@ import com.aispeech.nativedemo.utils.BitmapUtil;

import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;

public class CameraController implements ICamera{
private static final String TAG = "CameraController";
@@ -106,43 +108,48 @@ public class CameraController implements ICamera{
}
};

private ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
private final ImageReader.OnImageAvailableListener mOnImageAvailableListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
//获取最新的一帧的Image
Image image = reader.acquireLatestImage();
if (image != null) {
Log.v(TAG, "onImageAvailable");
//因为是ImageFormat.JPEG格式,所以 image.getPlanes()返回的数组只有一个,也就是第0个。
ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
//BitmapUtil.dumpFile("mnt/sdcard/1.jpg", bytes);
byteBuffer.get(bytes);
//ImageFormat.JPEG格式直接转化为Bitmap格式。
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
//因为摄像机数据默认是横的,所以需要旋转90度。
Bitmap newBitmap = BitmapUtil.rotateBitmap(temp, 180);
//抛出去展示或存储。
imageReaderView.toView(newBitmap);
temp.recycle();
//一定需要close,否则不会收到新的Image回调。
image.close();
mCameraHandler.post(new ImageSaver(reader.acquireLatestImage(), imageReaderView));

/*if (image == null) {
return;
}
int width = image.getWidth(), height = image.getHeight();
byte[] i420bytes = CameraUtil.getDataFromImage(image, COLOR_FormatI420);
//BitmapUtil.dumpFile("mnt/sdcard/1.yuv", i420bytes);
byte[] i420RorateBytes = BitmapUtil.rotateYUV420Degree90(i420bytes, width, height);
byte[] nv21bytes = BitmapUtil.I420Tonv21(i420RorateBytes, height, width);
Bitmap bitmap = BitmapUtil.getBitmapImageFromYUV(nv21bytes, height, width);
//Bitmap newBitmap = BitmapUtil.rotateBitmap(bitmap, 90);
imageReaderView.toView(bitmap);
image.close();*/


}
//获取最新的一帧的Image
// new Thread(new Runnable() {
// @Override
// public void run() {
// Image image = reader.acquireLatestImage();
// if (image != null) {
// Log.v(TAG, "onImageAvailable");
// //因为是ImageFormat.JPEG格式,所以 image.getPlanes()返回的数组只有一个,也就是第0个。
// ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
// byte[] bytes = new byte[byteBuffer.remaining()];
// //BitmapUtil.dumpFile("mnt/sdcard/1.jpg", bytes);
// byteBuffer.get(bytes);
// //ImageFormat.JPEG格式直接转化为Bitmap格式。
// Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
// //因为摄像机数据默认是横的,所以需要旋转90度。
// Bitmap newBitmap = BitmapUtil.rotateBitmap(temp, 180);
// //抛出去展示或存储。
// imageReaderView.toView(newBitmap);
// temp.recycle();
// //一定需要close,否则不会收到新的Image回调。
// image.close();
//
// /*if (image == null) {
// return;
// }
// int width = image.getWidth(), height = image.getHeight();
// byte[] i420bytes = CameraUtil.getDataFromImage(image, COLOR_FormatI420);
// //BitmapUtil.dumpFile("mnt/sdcard/1.yuv", i420bytes);
// byte[] i420RorateBytes = BitmapUtil.rotateYUV420Degree90(i420bytes, width, height);
// byte[] nv21bytes = BitmapUtil.I420Tonv21(i420RorateBytes, height, width);
// Bitmap bitmap = BitmapUtil.getBitmapImageFromYUV(nv21bytes, height, width);
// //Bitmap newBitmap = BitmapUtil.rotateBitmap(bitmap, 90);
// imageReaderView.toView(bitmap);
// image.close();*/
// }
// }
// }).start();
}
};

@@ -189,9 +196,13 @@ public class CameraController implements ICamera{
// new Size(1080, 600);
// getOptimalSize(map.getOutputSizes(SurfaceTexture.class), width, height);
//1080 1920 800 1422
// previewSize = Collections.max(
// Arrays.asList(map.getOutputSizes(SurfaceTexture.class)),
// Comparator.comparingInt(Size::getWidth));
// Log.e(TAG, "select size: width-" + previewSize.getWidth() + ",height-" + previewSize.getHeight());

mImageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.JPEG, 2);
mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mCameraHandler);
// mImageReader = ImageReader.newInstance(previewSize.getWidth(), previewSize.getHeight(), ImageFormat.JPEG, 1);
// mImageReader.setOnImageAvailableListener(mOnImageAvailableListener, mCameraHandler);

Log.e(TAG, "previewSize's size: width-" + previewSize.getWidth() + ",height-" + previewSize.getHeight());
Display display = ((Activity)context).getWindowManager().getDefaultDisplay();
@@ -232,4 +243,49 @@ public class CameraController implements ICamera{
public void toView(Bitmap image);
}

private static class ImageSaver implements Runnable {
private final Image image;
private final ImageReaderView listener;

private ImageSaver(Image image, ImageReaderView listener) {
this.image = image;
this.listener = listener;
}

@Override
public void run() {
if (image != null) {
Log.v(TAG, "onImageAvailable");
//因为是ImageFormat.JPEG格式,所以 image.getPlanes()返回的数组只有一个,也就是第0个。
ByteBuffer byteBuffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[byteBuffer.remaining()];
//BitmapUtil.dumpFile("mnt/sdcard/1.jpg", bytes);
byteBuffer.get(bytes);
//ImageFormat.JPEG格式直接转化为Bitmap格式。
Bitmap temp = BitmapFactory.decodeByteArray(bytes, 0, bytes.length);
//因为摄像机数据默认是横的,所以需要旋转90度。
Bitmap newBitmap = BitmapUtil.rotateBitmap(temp, 180);
//抛出去展示或存储。
listener.toView(newBitmap);
temp.recycle();
//一定需要close,否则不会收到新的Image回调。
image.close();

/*if (image == null) {
return;
}
int width = image.getWidth(), height = image.getHeight();
byte[] i420bytes = CameraUtil.getDataFromImage(image, COLOR_FormatI420);
//BitmapUtil.dumpFile("mnt/sdcard/1.yuv", i420bytes);
byte[] i420RorateBytes = BitmapUtil.rotateYUV420Degree90(i420bytes, width, height);
byte[] nv21bytes = BitmapUtil.I420Tonv21(i420RorateBytes, height, width);
Bitmap bitmap = BitmapUtil.getBitmapImageFromYUV(nv21bytes, height, width);
//Bitmap newBitmap = BitmapUtil.rotateBitmap(bitmap, 90);
imageReaderView.toView(bitmap);
image.close();*/
}
}

}

}

+ 658
- 0
app/src/main/java/com/aispeech/nativedemo/camera/ImageUtil.kt Wyświetl plik

@@ -0,0 +1,658 @@
package com.aispeech.nativedemo.camera

import android.content.Context
import android.content.Intent
import android.graphics.*
import android.media.Image
import android.net.Uri
import android.os.Build
import android.os.Environment
import android.text.TextUtils
import android.util.Log
import androidx.annotation.RequiresApi
import java.io.*
import kotlin.experimental.and


object ImageUtil {
private const val YUV420P = 0
private const val YUV420SP = 1
private const val NV21 = 2
private const val TAG = "ImageUtil"

/***
* 此方法内注释以640*480为例
* 未考虑CropRect的
*/
@RequiresApi(api = Build.VERSION_CODES.KITKAT)
fun getBytesFromImageAsType(image: Image?, type: Int): ByteArray? {
try {
//获取源数据,如果是YUV格式的数据planes.length = 3
//plane[i]里面的实际数据可能存在byte[].length <= capacity (缓冲区总大小)
val planes = image!!.planes

//数据有效宽度,一般的,图片width <= rowStride,这也是导致byte[].length <= capacity的原因
// 所以我们只取width部分
val width = image.width
val height = image.height


//此处用来装填最终的YUV数据,需要1.5倍的图片大小,因为Y U V 比例为 4:1:1
val yuvBytes =
ByteArray(width * height * ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888) / 8)
//目标数组的装填到的位置
var dstIndex = 0

//临时存储uv数据的
val uBytes = ByteArray(width * height / 4)
val vBytes = ByteArray(width * height / 4)
var uIndex = 0
var vIndex = 0
var pixelsStride: Int
var rowStride: Int
for (i in planes.indices) {
pixelsStride = planes[i].pixelStride
rowStride = planes[i].rowStride
val buffer = planes[i].buffer

//如果pixelsStride==2,一般的Y的buffer长度=640*480,UV的长度=640*480/2-1
//源数据的索引,y的数据是byte中连续的,u的数据是v向左移以为生成的,两者都是偶数位为有效数据
val bytes = ByteArray(buffer.capacity())
buffer[bytes]
var srcIndex = 0
if (i == 0) {
//直接取出来所有Y的有效区域,也可以存储成一个临时的bytes,到下一步再copy
for (j in 0 until height) {
System.arraycopy(bytes, srcIndex, yuvBytes, dstIndex, width)
srcIndex += rowStride
dstIndex += width
}
} else if (i == 1) {
//根据pixelsStride取相应的数据
for (j in 0 until height / 2) {
for (k in 0 until width / 2) {
uBytes[uIndex++] = bytes[srcIndex]
srcIndex += pixelsStride
}
if (pixelsStride == 2) {
srcIndex += rowStride - width
} else if (pixelsStride == 1) {
srcIndex += rowStride - width / 2
}
}
} else if (i == 2) {
//根据pixelsStride取相应的数据
for (j in 0 until height / 2) {
for (k in 0 until width / 2) {
vBytes[vIndex++] = bytes[srcIndex]
srcIndex += pixelsStride
}
if (pixelsStride == 2) {
srcIndex += rowStride - width
} else if (pixelsStride == 1) {
srcIndex += rowStride - width / 2
}
}
}
}
when (type) {
YUV420P -> {
System.arraycopy(uBytes, 0, yuvBytes, dstIndex, uBytes.size)
System.arraycopy(vBytes, 0, yuvBytes, dstIndex + uBytes.size, vBytes.size)
}
YUV420SP -> {
var i = 0
while (i < vBytes.size) {
yuvBytes[dstIndex++] = uBytes[i]
yuvBytes[dstIndex++] = vBytes[i]
i++
}
}
NV21 -> {
var i = 0
while (i < vBytes.size) {
yuvBytes[dstIndex++] = vBytes[i]
yuvBytes[dstIndex++] = uBytes[i]
i++
}
}
}
return yuvBytes
} catch (e: Exception) {
image?.close()
Log.i(TAG, e.toString())
}
return null
}

/**
* camera2的image数据转化bitmap
* @param image Image
* @return bitmap
*/
fun imageToBitmap(image: Image, type: String): Bitmap? {
val mImageWidth = image.width
val mImageHeight = image.height
val bytesFromImageAsType = getBytesFromImageAsType(image, 2)
val mImageArrays = decodeYUV420SP(
bytesFromImageAsType!!, mImageWidth, mImageHeight
)
val mBitmap = Bitmap.createBitmap(
mImageArrays, 0, mImageWidth,
mImageWidth, mImageHeight,
Bitmap.Config.ARGB_8888
)
// val mRotateBitmap = rotateBitmap(mBitmap, 90f)
// if (TextUtils.equals("IR", type)) {
// return mRotateBitmap
// } else if (TextUtils.equals("RGB", type)) {
// return convert(mRotateBitmap)
// }

return convert(mBitmap)
}

/***
* YUV420 转化成 RGB
*/
private fun decodeYUV420SP(yuv420sp: ByteArray, width: Int, height: Int): IntArray {
val frameSize = width * height
val rgb = IntArray(frameSize)
var j = 0
var yp = 0
while (j < height) {
var uvp = frameSize + (j shr 1) * width
var u = 0
var v = 0
var i = 0
while (i < width) {
var y = (0xff and yuv420sp[yp].toInt()) - 16
if (y < 0) {
y = 0
}
if (i and 1 == 0) {
v = (0xff and yuv420sp[uvp++].toInt()) - 128
u = (0xff and yuv420sp[uvp++].toInt()) - 128
}
val y1192 = 1192 * y
var r = y1192 + 1634 * v
var g = y1192 - 833 * v - 400 * u
var b = y1192 + 2066 * u
if (r < 0) {
r = 0
} else if (r > 262143) {
r = 262143
}
if (g < 0) {
g = 0
} else if (g > 262143) {
g = 262143
}
if (b < 0) {
b = 0
} else if (b > 262143) {
b = 262143
}
rgb[yp] = (-0x1000000 or (r shl 6 and 0xff0000)
or (g shr 2 and 0xff00) or (b shr 10 and 0xff))
i++
yp++
}
j++
}
return rgb
}

/**
* 旋转bitmap
*
* @param bitmap 原bitmap
* @param rotate 角度
*/
fun rotateBitmap(bitmap: Bitmap?, rotate: Float): Bitmap? {
if (bitmap == null) {
return null
}
val width = bitmap.width
val height = bitmap.height
val matrix = Matrix()
//旋转bitmap
matrix.setRotate(rotate)
// 围绕原地进行旋转
return Bitmap.createBitmap(bitmap, 0, 0, width, height, matrix, false)
}

/**
* bitmap镜像
*
* @param bitmap 原始bitmap
* @param width 宽度
* @param height 高度
* @return 镜像后的bitmap
*/
fun convert(bitmap: Bitmap, width: Int, height: Int): Bitmap {
val w = bitmap.width
val h = bitmap.height
// 创建一个新的和SRC长度宽度一样的位图
val newb = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888)
val cv = Canvas(newb)
val m = Matrix()
//镜像水平翻转
m.postScale(-1f, 1f)
val new2 = Bitmap.createBitmap(bitmap, 0, 0, w, h, m, true)
cv.drawBitmap(new2, Rect(0, 0, new2.width, new2.height), Rect(0, 0, width, height), null)
return newb
}

/**
* bitmap镜像旋转
* @param a 原bitmap
*/
fun convert(a: Bitmap?): Bitmap? {
val w = a?.width
val h = a?.height
val m = Matrix()
// m.postScale(1F, (-1).toFloat()) //镜像垂直翻转
m.postScale((-1).toFloat(), 1F) //镜像水平翻转
return a?.let {
w?.let { it1 ->
h?.let { it2 ->
Bitmap.createBitmap(
it, 0, 0, it1,
it2, m, true
)
}
}
}
}

/**
* bitmap镜像旋转
* @param a 原bitmap
*/
fun irConvert(a: Bitmap?): Bitmap? {
val w = a?.width
val h = a?.height
val m = Matrix()
// m.postScale(1F, (-1).toFloat()) //镜像垂直翻转
m.postScale((-1).toFloat(), 1F) //镜像水平翻转
return a?.let {
w?.let { it1 ->
h?.let { it2 ->
Bitmap.createBitmap(
it, 0, 0, it1,
it2, m, true
)
}
}
}
}

/**
* 保存RGB的bitmap到本地文件夹
*
* @param context 上下文
* @param image bitmap图片
* @param bitmapName 图片名称
* @param count 图片序号
*/
fun saveBitmapRGB(context: Context, image: Bitmap, count: Int, bitmapName: String) {
val mString = "_"
val sdCardDir: String = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
//当前版本为Android10或更高时获取本地沙盒路径
context
.getExternalFilesDir(Environment.DIRECTORY_PICTURES)
.toString() + "/Bitmap/"
} else {
Environment.getExternalStorageDirectory()
.absolutePath + "/Bitmap/"
}
if (Environment.getExternalStorageState() === Environment.MEDIA_MOUNTED) {
//SD卡已装入
Log.e("file", "SD卡已装入")
}
val dirFile = File(sdCardDir)
if (!dirFile.exists()) {
dirFile.mkdirs()
}
val file = File(sdCardDir, "$count$mString$bitmapName.png")
Log.e("file", file.absoluteFile.toString())
var out: FileOutputStream? = null
try {
out = FileOutputStream(file)
image.compress(Bitmap.CompressFormat.JPEG, 100, out)
} catch (e: FileNotFoundException) {
e.printStackTrace()
}
try {
if (out != null) {
out.flush()
out.close()
}
} catch (e: IOException) {
e.printStackTrace()
}
context.sendBroadcast(
Intent(
Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
Uri.parse("file://\$sdCardDir")
)
)
}

/**
* 保存IR的bitmap到本地文件夹
*
* @param context 上下文
* @param image bitmap图片
* @param bitmapName 图片名称
* @param count 图片序号
*/
fun saveBitmapIR(context: Context, image: Bitmap, count: Int, bitmapName: String) {
val mString = "_"
val sdCardDir: String = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
//当前版本为Android10或更高时获取本地沙盒路径
context
.getExternalFilesDir(Environment.DIRECTORY_PICTURES)
.toString() + "/Bitmap/"
} else {
Environment.getExternalStorageDirectory()
.absolutePath + "/Bitmap/"
}
if (Environment.getExternalStorageState() === Environment.MEDIA_MOUNTED) {
//SD卡已装入
Log.e("file", "SD卡已装入")
}
val dirFile = File(sdCardDir)
if (!dirFile.exists()) {
dirFile.mkdirs()
}
val file = File(sdCardDir, "$count$mString$bitmapName.png")
Log.e("file", file.absoluteFile.toString())
var out: FileOutputStream? = null
try {
out = FileOutputStream(file)
image.compress(Bitmap.CompressFormat.JPEG, 100, out)
} catch (e: FileNotFoundException) {
e.printStackTrace()
}
try {
if (out != null) {
out.flush()
out.close()
}
} catch (e: IOException) {
e.printStackTrace()
}
context.sendBroadcast(
Intent(
Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
Uri.parse("file://\$sdCardDir")
)
)
}

/**
* 保存IR的bitmap到本地文件夹
*
* @param context 上下文
* @param image bitmap图片
* @param bitmapName 图片名称
* @param count 图片序号
*/
fun saveBitmapIR(context: Context, image: Bitmap, count: String, bitmapName: String) {
val mString = "_"
val sdCardDir: String = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
//当前版本为Android10或更高时获取本地沙盒路径
context
.getExternalFilesDir(Environment.DIRECTORY_PICTURES)
.toString() + "/Bitmap/"
} else {
Environment.getExternalStorageDirectory()
.absolutePath + "/Bitmap/"
}
if (Environment.getExternalStorageState() === Environment.MEDIA_MOUNTED) {
//SD卡已装入
Log.e("file", "SD卡已装入")
}
val dirFile = File(sdCardDir)
if (!dirFile.exists()) {
dirFile.mkdirs()
}
val file = File(sdCardDir, "$count$mString$bitmapName.png")
Log.e("file", file.absoluteFile.toString())
var out: FileOutputStream? = null
try {
out = FileOutputStream(file)
image.compress(Bitmap.CompressFormat.JPEG, 100, out)
} catch (e: FileNotFoundException) {
e.printStackTrace()
}
try {
if (out != null) {
out.flush()
out.close()
}
} catch (e: IOException) {
e.printStackTrace()
}
context.sendBroadcast(
Intent(
Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
Uri.parse("file://\$sdCardDir")
)
)
}

/**
* 保存IR的bitmap到本地文件夹
*
* @param context 上下文
* @param image bitmap图片
* @param bitmapName 图片名称
* @param count 图片序号
*/
fun saveBitmapIR(context: Context, image: Bitmap, count: Float, bitmapName: String) {
val mString = "_"
val sdCardDir: String = if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
//当前版本为Android10或更高时获取本地沙盒路径
context
.getExternalFilesDir(Environment.DIRECTORY_PICTURES)
.toString() + "/Bitmap/"
} else {
Environment.getExternalStorageDirectory()
.absolutePath + "/Bitmap/"
}
if (Environment.getExternalStorageState() === Environment.MEDIA_MOUNTED) {
//SD卡已装入
Log.e("file", "SD卡已装入")
}
val dirFile = File(sdCardDir)
if (!dirFile.exists()) {
dirFile.mkdirs()
}
val file = File(sdCardDir, "$count$mString$bitmapName.png")
Log.e("file", file.absoluteFile.toString())
var out: FileOutputStream? = null
try {
out = FileOutputStream(file)
image.compress(Bitmap.CompressFormat.JPEG, 100, out)
} catch (e: FileNotFoundException) {
e.printStackTrace()
}
try {
if (out != null) {
out.flush()
out.close()
}
} catch (e: IOException) {
e.printStackTrace()
}
context.sendBroadcast(
Intent(
Intent.ACTION_MEDIA_SCANNER_SCAN_FILE,
Uri.parse("file://\$sdCardDir")
)
)
}

/**
* yuv数据转化bitmap
* @param image 相机返回的image数据
* @return bitmap
*/
fun getBitmapFromImage(image: Image): Bitmap? {
val w: Int = image.width
val h: Int = image.height
val i420Size = w * h * 3 / 2
val picel1 = ImageFormat.getBitsPerPixel(ImageFormat.NV21)
val picel2 = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888)
val planes: Array<Image.Plane> = image.planes
val remaining0: Int = planes[0].buffer.remaining()
val remaining1: Int = planes[1].buffer.remaining()
val remaining2: Int = planes[2].buffer.remaining()
//获取pixelStride,可能跟width相等,可能不相等
val pixelStride: Int = planes[2].pixelStride
val rowOffest: Int = planes[2].rowStride
val nv21 = ByteArray(i420Size)
val yRawSrcBytes = ByteArray(remaining0)
val uRawSrcBytes = ByteArray(remaining1)
val vRawSrcBytes = ByteArray(remaining2)
planes[0].buffer.get(yRawSrcBytes)
planes[1].buffer.get(uRawSrcBytes)
planes[2].buffer.get(vRawSrcBytes)
if (pixelStride == w) {
//两者相等,说明每个YUV块紧密相连,可以直接拷贝
System.arraycopy(yRawSrcBytes, 0, nv21, 0, rowOffest * h)
System.arraycopy(vRawSrcBytes, 0, nv21, rowOffest * h, rowOffest * h / 2 - 1)
} else {
val ySrcBytes = ByteArray(w * h)
val uSrcBytes = ByteArray(w * h / 2 - 1)
val vSrcBytes = ByteArray(w * h / 2 - 1)
for (row in 0 until h) {
//源数组每隔 rowOffest 个bytes 拷贝 w 个bytes到目标数组
System.arraycopy(yRawSrcBytes, rowOffest * row, ySrcBytes, w * row, w)

//y执行两次,uv执行一次
if (row % 2 == 0) {
//最后一行需要减一
if (row == h - 2) {
System.arraycopy(
vRawSrcBytes,
rowOffest * row / 2,
vSrcBytes,
w * row / 2,
w - 1
)
} else {
System.arraycopy(
vRawSrcBytes,
rowOffest * row / 2,
vSrcBytes,
w * row / 2,
w
)
}
}
}
System.arraycopy(ySrcBytes, 0, nv21, 0, w * h)
System.arraycopy(vSrcBytes, 0, nv21, w * h, w * h / 2 - 1)
}
var bm: Bitmap? = getBitmapImageFromYUV(nv21, w, h)
bm = rotateBitmap(bm, 90f)
bm = bm?.let { convert(it) }
return bm
}

/**
* yuv数据转化bitmap
* @param data 流数据
* @param width 宽度
* @param height 高度
*/
private fun getBitmapImageFromYUV(
data: ByteArray?,
width: Int,
height: Int
): Bitmap {
val yuvimage =
YuvImage(data, ImageFormat.NV21, width, height, null)
val baos = ByteArrayOutputStream()
yuvimage.compressToJpeg(Rect(0, 0, width, height), 80, baos)
val jdata = baos.toByteArray()
val bitmapFatoryOptions =
BitmapFactory.Options()
bitmapFatoryOptions.inPreferredConfig = Bitmap.Config.ARGB_8888
val bitmap = BitmapFactory.decodeByteArray(
jdata,
0,
jdata.size,
bitmapFatoryOptions
)
//bitmap内存重用(而3.0开始可以使用BitmapFactory.Options.inBitmap这个选项,设置一个可复用的bitmap,这样以后新的bitmap且大小相同的就可以直接使用这块内存,而无需重复申请内存)
bitmapFatoryOptions.inBitmap = bitmap
return bitmap
}


fun inputStream2ByteArray(filePath: String?): ByteArray? {
val file = File(filePath)
if (!file.exists() && file.isDirectory) {
return null
}
var mIn: InputStream? = null
var data: ByteArray? = null
try {
mIn = FileInputStream(filePath)
val out = ByteArrayOutputStream()
val buffer = ByteArray(1024 * 4)
var n = 0
while (mIn.read(buffer).also { n = it } != -1) {
out.write(buffer, 0, n)
}
data = out.toByteArray()
} catch (e: Throwable) {
e.printStackTrace()
} finally {
mIn?.close()
}
return data
}


fun readArray2File(destPath: String?): Array<Float>? {
if (TextUtils.isEmpty(destPath)) {
return null
}
val file = File(destPath) //存放数组数据的文件
val datas: MutableList<Float> = ArrayList()
try {
val input = DataInputStream(FileInputStream(file)) //文件写入流
while (input.available() > 0) {
// read character
val c = input.readFloat()
datas.add(c)
// print
// System.out.print(c + " ");
}
} catch (e: java.lang.Exception) {
println("写文件出错:$e")
}
return datas.toTypedArray()
}

fun ByteArrayToFloatArray(data: ByteArray): FloatArray? {
val result = FloatArray(data.size / 4)
var temp = 0
var i = 0
while (i < data.size) {
temp = temp or ((data[i] and 0xff.toByte()).toInt()) shl 0
temp = temp or ((data[i + 1] and 0xff.toByte()).toInt()) shl 8
temp = temp or ((data[i + 2] and 0xff.toByte()).toInt()) shl 16
temp = temp or ((data[i + 3] and 0xff.toByte()).toInt()) shl 24
result[i / 4] = java.lang.Float.intBitsToFloat(temp)
i += 4
}
return result
}
}

+ 965
- 0
app/src/main/java/com/aispeech/nativedemo/camera/MainActivity.kt Wyświetl plik

@@ -0,0 +1,965 @@
//package com.lenovo.faceheadtrackingdemo
//
////import com.lenovo.xuanlib.TestBedNcnn
//import android.annotation.SuppressLint
//import android.app.AlertDialog
//import android.content.Context
//import android.content.Intent
//import android.graphics.*
//import android.graphics.drawable.BitmapDrawable
//import android.hardware.camera2.CameraCharacteristics
//import android.hardware.camera2.CameraDevice
//import android.hardware.camera2.CameraManager
//import android.media.Image
//import android.os.Build
//import android.os.Bundle
//import android.util.Log
//import android.util.Size
//import android.view.*
//import android.widget.*
//import androidx.appcompat.app.AppCompatActivity
//import androidx.camera.core.*
//import androidx.camera.lifecycle.ProcessCameraProvider
//import androidx.camera.view.PreviewView
//import androidx.core.content.ContextCompat
//import androidx.recyclerview.widget.GridLayoutManager
//import androidx.recyclerview.widget.LinearLayoutManager
//import androidx.recyclerview.widget.RecyclerView
//import com.google.common.util.concurrent.ListenableFuture
//import com.hjq.permissions.Permission
//import com.hjq.permissions.XXPermissions
//import com.lenovo.Trackinglib.DetTracking
//import com.lenovo.faceheadtrackingdemo.camera2.AutoFitTextureView
//import com.lenovo.faceheadtrackingdemo.camera2.Camera2HelperRgb
//import com.lenovo.faceheadtrackingdemo.camera2.Camera2Listener
//import com.lenovo.faceheadtrackingdemo.camerax.*
//import com.lenovo.faceheadtrackingdemo.mqtt.MyUtils
//import com.luck.picture.lib.PictureSelector
//import com.luck.picture.lib.config.PictureConfig
//import com.luck.picture.lib.config.PictureMimeType
//import com.luck.picture.lib.entity.LocalMedia
//import com.luck.picture.lib.listener.OnResultCallbackListener
//import kotlinx.coroutines.*
//import java.io.ByteArrayOutputStream
//import java.util.*
//
//
//class MainActivity : AppCompatActivity(), View.OnClickListener {
// private val mChoosevideoButton: Button by lazy { findViewById(R.id.choosevideo_button) }
// private val mPreviewView: PreviewView by lazy { findViewById(R.id.preview_view) }
// private val txErrorCode: TextView by lazy { findViewById(R.id.error_code) }
// private val txDirectionCode: TextView by lazy { findViewById(R.id.direction_code) }
// private val txInLineCode: TextView by lazy { findViewById(R.id.in_line_code) }
// private val txInLineCodeCart: TextView by lazy { findViewById(R.id.in_line_code_cart) }
// private val saveBmp: Button by lazy { findViewById(R.id.save_bmp_btn) }
//
//
// private val mRecyclerView: RecyclerView by lazy { findViewById(R.id.recycler_view) }
//
// private val mAutofirtexttureview: AutoFitTextureView by lazy {
// findViewById<AutoFitTextureView>(
// R.id.autofirtexttureview
// )
// }
//
// /**
// * 初始化camera配置信息
// */
// private val mCameraXSize: Size = Size(1920, 1080)
// private val mAspectRatio: Int = AspectRatio.RATIO_16_9
// private val mCameraSelectorInt = CameraSelector.LENS_FACING_FRONT
// private var mFaceBitmap: Bitmap? = null
// private var mPreView: Preview? = null
// private lateinit var mImageAnalysis: ImageAnalysis
// private lateinit var mImageCapture: ImageCapture
// private lateinit var mCameraSelector: CameraSelector
// private var mRotation: Int = 0
//
// /**
// * 文件管理器请求状态码
// */
// private val FILEREQUESTCODE = 1000
//
// // private lateinit var mTestBedNcnn: TestBedNcnn
// private lateinit var mTestBedNcnn: DetTracking
//
// /**
// * 网络加载loading
// */
// private lateinit var mLoadingDialog: LoadingDialog
//
// private var mColor: Int? = null
//
// private var mTrackingObjArray: Array<DetTracking.Obj> = arrayOf<DetTracking.Obj>()
//
// private var mMergeArray: Array<DetTracking.Obj> = arrayOf<DetTracking.Obj>()
// private var mDetectResult: Array<DetTracking.Obj> = arrayOf<DetTracking.Obj>()
//
// private var mTrackingResultSize = 2.5
// private var mCount = 0
// private var mCount1 = 0
//
//
// companion object {
// private const val TAG = "MainActivity"
// }
//
// override fun onCreate(savedInstanceState: Bundle?) {
// super.onCreate(savedInstanceState)
// setContentView(R.layout.activity_main)
// initSDK()
// initPermission()
// }
//
// private fun initAdapter(bitmapList: List<Bitmap?>) {
// val mAdapter = BitmapRecyclerAdapter(this, bitmapList)
// mRecyclerView.layoutManager = GridLayoutManager(this, 5)
//// mRecyclerView.isNestedScrollingEnabled = false//禁止滑动
// mRecyclerView.adapter = mAdapter
// mAdapter.setOnItemClickListener(object : BitmapRecyclerAdapter.OnItemClickListener {
// override fun onClick(position: Int) {
// detect(bitmapList[position]!!)
// }
//
// })
// }
//
// /**
// * 显示网络加载框
// */
// fun showLoading() {
// mLoadingDialog = LoadingDialog.show(
// this, "加载中...", false
// ) { }
// }
//
// /**
// * 关闭网络加载框
// */
// fun closeLoading() {
//
// if (mLoadingDialog.isShowing) {
// mLoadingDialog.dismiss()
// }
// }
//
// private fun initSDK() {
// Log.e(TAG, "initSDK: 初始化sdk")
//// mTestBedNcnn = TestBedNcnn()
// mChoosevideoButton.setOnClickListener(this)
// saveBmp.setText(needSave.toString() + "")
// saveBmp.setOnClickListener(View.OnClickListener {
// needSave = !needSave
// saveBmp.setText(needSave.toString() + "")
// })
// mTestBedNcnn = DetTracking()
// mTestBedNcnn.Init(assets)
// }
//
// private fun showDialog(bitmap: Bitmap, faceInfoList: Array<DetTracking.Obj>) {
// val view = LayoutInflater.from(this).inflate(R.layout.faceinfo_dialog, null, false)
// val mFaceInfoRecycler = view.findViewById<RecyclerView>(R.id.face_recycler)
// val mFaceInfoImage = view.findViewById<ImageView>(R.id.face_image)
// val mAdapter = FaceInfoRecyclerAdapter(this, faceInfoList)
// mFaceInfoRecycler.layoutManager = LinearLayoutManager(this)
//// mRecyclerView.isNestedScrollingEnabled = false//禁止滑动
// mFaceInfoRecycler.adapter = mAdapter
//
//// mFaceInfoImage.setImageBitmap(bitmap)
// //将bitmap解析成数组流并进行copy进行画框操作
// val mBitmapByteArrayOutputStream = ByteArrayOutputStream()
// bitmap.compress(Bitmap.CompressFormat.PNG, 100, mBitmapByteArrayOutputStream)
// val mBitmapArray = mBitmapByteArrayOutputStream.toByteArray()
// val copybitmap = BitmapFactory.decodeByteArray(mBitmapArray, 0, mBitmapArray.size)
// .copy(Bitmap.Config.ARGB_8888, true);
//
//
// val mCanvas = Canvas(copybitmap)
// val mPaint = Paint()
// val mTextPaint = Paint()
// mPaint.color = Color.GREEN
// //设置空心
// mPaint.style = Paint.Style.STROKE
// mPaint.strokeWidth = 5.0f
//
// mTextPaint.color = Color.RED
// //设置系统自带的字体
// mTextPaint.typeface = Typeface.DEFAULT
// mTextPaint.strokeWidth = 12f
// mTextPaint.textSize = 50f
//// // maxiao 不包含bbox 时需要注释掉
//// for (value in faceInfoList) {
//// val mLeft = value.xmin
//// val mTop = value.ymin
//// val mRight = value.xmax
//// val mBottom = value.ymax
//// val mErrCode = value.errorCode
////
//// val mText = "id:${value.ID} errcode:$mErrCode"
////// mCanvas.drawBitmap(bitmap, Matrix(), mPaint)
//// mCanvas.drawText(mText, mLeft, mTop, mTextPaint)
////
//// mCanvas.drawRect(mLeft, mTop, mRight, mBottom, mPaint)
//// }
// // maxiao 不包含bbox 时需要注释掉
// mFaceInfoImage.setImageBitmap(copybitmap)
//
//
// val mDialog = AlertDialog.Builder(this).setView(view).create()
// mDialog.setCanceledOnTouchOutside(true)
// mDialog.window?.setBackgroundDrawable(BitmapDrawable())
// mDialog.show()
// //此处设置位置窗体大小,我这里设置为了手机屏幕宽度的3/4 注意一定要在show方法调用后再写设置窗口大小的代码,否则不起效果会
// mDialog.window?.setLayout(
// (ScreenUtils.getScreenWidth(this) / 4 * 3),
// LinearLayout.LayoutParams.WRAP_CONTENT
// )
// }
//
// /**
// * 删除item
// * @index:删除位置下标
// * */
// private fun deleteTrackingObjArray(index: Int, mTrackingObjArray: Array<DetTracking.Obj>) {
// val mNewTrackingObjArray: Array<DetTracking.Obj> = arrayOf<DetTracking.Obj>()
// System.arraycopy(
// mTrackingObjArray,
// index + 1,
// mTrackingObjArray,
// index,
// mTrackingObjArray.size - index - 1
// )
// for (i in mNewTrackingObjArray.indices) {
// if (i < index) {
// mNewTrackingObjArray[i] = mTrackingObjArray[i]
// } else {
// mNewTrackingObjArray[i] = mTrackingObjArray[i + 1]
// }
// }
// this@MainActivity.mTrackingObjArray = mNewTrackingObjArray
// }
//
// /**
// * 删除item
// * @index:删除位置下标
// * */
//// private fun deleteDetectResult(index: Int, mDetectResult: Array<DetTracking.Obj>) {
//// val mNewDetectResultArray: Array<DetTracking.Obj> = arrayOf<DetTracking.Obj>()
//// System.arraycopy(
//// mDetectResult,
//// index + 1,
//// mDetectResult,
//// index,
//// mDetectResult.size - index - 1
//// )
//// for (i in mNewDetectResultArray.indices) {
//// if (i < index) {
//// mNewDetectResultArray[i] = mDetectResult[i]
//// } else {
//// mNewDetectResultArray[i] = mDetectResult[i + 1]
//// }
//// }
//// this@MainActivity.mDetectResult = mNewDetectResultArray
//// }
//
// /*
// * 删除数组中的某个元素
// * @index:数组索引
// * @array
// */
//// private fun removeElement(index: Int, array: Array<T?>) {
//// val numMove = array.size - index - 1
//// System.arraycopy(array, index + 1, array, index, numMove)
//// array[array.size - 1] = null
//// for (i in array.indices) {
//// if (null != array[i]) {
//// print(array[i].toString().toString() + " ")
//// }
//// }
//// }
//
// /**
// * 合并数组
// */
// private fun <T> concatAll(first: Array<T>, vararg rest: Array<T>): Array<T>? {
// var totalLength = first.size
// for (array in rest) {
// totalLength += array.size
// }
// val result = Arrays.copyOf(first, totalLength)
// var offset = first.size
// for (array in rest) {
// System.arraycopy(array, 0, result, offset, array.size)
// offset += array.size
// }
// return result
// }
//
// private fun mergeArray(
// mDetectArray: Array<DetTracking.Obj>,
// mTrackingArray: Array<DetTracking.Obj>,
// mDetectFlag: IntArray,
// mTrackingFlag: IntArray
// ): Array<DetTracking.Obj> {
// var mResultArray = arrayOf<DetTracking.Obj>()
// for ((index, value) in mDetectArray.withIndex()) {
// Log.e(TAG, "mergeArray: mDetectArray.size:${mDetectArray.size} index:$index")
// if (mDetectFlag[index] == 1) {
// mResultArray = mResultArray.plus(mDetectArray[index])
// }
// }
// for ((index, value) in mTrackingArray.withIndex()) {
// Log.e(TAG, "mergeArray: mTracking.size:${mTrackingArray.size} index:$index")
// if (mTrackingFlag[index] == 1) {
// mResultArray = mResultArray.plus(mTrackingArray[index])
// }
// }
//
// for (value in mDetectFlag) {
// Log.e(TAG, "mergeArray: mDetectFlag.size:${mDetectFlag.size} value:$value")
// }
//
// for (value in mTrackingFlag) {
// Log.e(TAG, "mergeArray: mTrackingFlag.size:${mTrackingFlag.size} value:$value")
// }
//
// return mResultArray
// }
//
// /*@SuppressLint("SetTextI18n")
// private fun detect(bitmap: Bitmap) {
//// if (mRectList.size != 0) {
//// mRectList.clear()
//// }
//
// val mRectList: MutableList<RectFId> = kotlin.collections.ArrayList<RectFId>()
//
// Log.e(TAG, "detect: 开始检测:${bitmap.width} ${bitmap.height}")
//
// // 返回值没有bbox maxiao
// mDetectResult = mDetectResult.plus(mTestBedNcnn.Detect(bitmap, 0, 100.0F))
//// mDetectResult[0] = mTestBedNcnn.Detect(bitmap, 0, 100.0F) //
//
//
// Log.e(
// TAG,
// "detect: 检测结果: errCode:${mDetectResult.last().errorCode} direction:${mDetectResult.last().directionCode} inLineCode:${mDetectResult.last().inLineCode}"
// )
// }*/
//
// private val VERTICAL_DISTANCE_500 = 0
// private val VERTICAL_DISTANCE_800 = 1
// private val VERTICAL_DISTANCE_1100 = 2
//
// var LINE_L = 540
// var LINE_R = 740
//
// @SuppressLint("SetTextI18n")
// private fun detect(bitmap: Bitmap) {
// Log.d(
// TAG,
// "-----------------------------------------start detectHuman-----------------------------------------"
// )
// val start = System.currentTimeMillis()
// if (mTestBedNcnn == null) {
// Log.e(TAG, "Uninitialized face detector or invalid context.")
// return
// }
// val bitmap: Bitmap = bitmap
// val trailingDistance = 30f
// val obj: DetTracking.Obj =
// mTestBedNcnn.Detect(bitmap, VERTICAL_DISTANCE_800, trailingDistance)
// val humanPos = obj.humanPos
// val humanLength = humanPos.size / 4
// Log.d(
// TAG,
// "detected info errorCode: " + obj.errorCode + ", directionCode: " + obj.directionCode + ", inLineCode: " + obj.inLineCode + ", inLineCodeCart: " + obj.inLineCodeCart + ", human numbers: " + humanLength
// )
// val oder = ShortArray(3)
// oder[0] = obj.inLineCode.toByte().toShort()
// oder[1] = obj.directionCode.toByte().toShort()
// if (obj.inLineCode != 0) {
// var a1: Byte = 0
// var a2: Byte = 0
// var a3: Byte = 0
// var a4: Byte = 0
// for (i in 0 until humanLength) {
// Log.d(
// TAG,
// "detected info human info: num->" + i + ", xMin->" + humanPos[i * 4] + ", yMin->" + humanPos[i * 4 + 1] + ", xMax->" + humanPos[i * 4 + 2] + ", yMax->" + humanPos[i * 4 + 3]
// )
// Log.d(
// TAG,
// "detected location: num->" + i + ", 在屏幕" + (if (humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) < 640) "左" else "右") + "/" + humanPos[i * 4] + "-" + humanPos[i * 4 + 2] + "=" + (humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4])) / 2
// )
// if (humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) / 2 < LINE_L) {
// a1 = (a1 + 1).toByte()
// } else if (humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) / 2 >= LINE_L && humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) / 2 < 640) {
// a2 = (a2 + 1).toByte()
// } else if (humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) / 2 >= 640 && humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) / 2 < LINE_R) {
// a3 = (a3 + 1).toByte()
// } else if (humanPos[i * 4] + (humanPos[i * 4 + 2] - humanPos[i * 4]) / 2 >= LINE_R) {
// a4 = (a4 + 1).toByte()
// }
// }
// oder[2] =
// (a1.toInt() shl 6 or (a2.toInt() shl 4) or (a3.toInt() shl 2) or a4.toInt()).toByte()
// .toShort()
// Log.d(TAG, "detected location: " + MyUtils.byte2Binary(byteArrayOf(oder[2].toByte())))
// }
// val cartPos = obj.cartPos
// val cartLength = cartPos.size / 4
// if (obj.inLineCodeCart > 0) {
// var a1: Byte = 0
// var a2: Byte = 0
// var a3: Byte = 0
// var a4: Byte = 0
// for (i in 0 until cartLength) {
// //Log.d(TAG, "detected info cart info: num->" + i + ", xMin->" + cartPos[(i * 4)] + ", yMin->" + cartPos[(i * 4) + 1] + ", xMax->" + cartPos[(i * 4) + 2] + ", yMax->" + cartPos[(i * 4) + 3]);
// //Log.d(TAG, "detected cart location: num->" + i + ", 在屏幕" + ((cartPos[(i * 4)] + (cartPos[(i * 4) + 2] - cartPos[(i * 4)])) < 640 ? "左" : "右") + "/" + (cartPos[(i * 4)] + (cartPos[(i * 4) + 2] - cartPos[(i * 4)])));
// if (cartPos[i * 4] + (cartPos[i * 4 + 2] - cartPos[i * 4]) < 320) {
// a1 = (a1 + 1).toByte()
// } else if (cartPos[i * 4] + (cartPos[i * 4 + 2] - cartPos[i * 4]) >= 320 && cartPos[i * 4] + (cartPos[i * 4 + 2] - cartPos[i * 4]) < 640) {
// a2 = (a2 + 1).toByte()
// } else if (cartPos[i * 4] + (cartPos[i * 4 + 2] - cartPos[i * 4]) >= 640 && cartPos[i * 4] + (cartPos[i * 4 + 2] - cartPos[i * 4]) < 940) {
// a3 = (a3 + 1).toByte()
// } else if (cartPos[i * 4] + (cartPos[i * 4 + 2] - cartPos[i * 4]) >= 940) {
// a4 = (a4 + 1).toByte()
// }
// }
// //oder[2] = (byte) (a1 << 6 | a2 << 4 | a3 << 2 | a4);
// //Log.d(TAG, "detected location: " + MyUtils.byte2Binary(new byte[]{(byte) oder[2]}));
// }
// if (needSave) {
// BitmapUtil.saveBitmap(
// obj.inLineCode.toString() + "" + obj.inLineCodeCart + "00" + System.currentTimeMillis(),
// bitmap
// )
// }
// /*val hexCmd: String = SerialPortUtil.sendHexCmd(0x61.toByte(), oder)
// Log.d(TAG, "detected cmd hex: $hexCmd")*/
// runOnUiThread(java.lang.Runnable {
// txErrorCode.setText(obj.errorCode.toString() + "")
// txDirectionCode.setText(obj.directionCode.toString() + "")
// txInLineCode.setText(obj.inLineCode.toString() + "")
// txInLineCodeCart.setText(obj.inLineCodeCart.toString() + "")
// })
// val end = System.currentTimeMillis()
// Log.d(
// TAG,
// "-----------------------------------------end detectHuman-----------------------------------------"
// )
// Log.d(TAG, "detectHuman cost time: " + (end - start) + "ms")
// }
//
// private var needSave = false
//
// /**
// * 权限申请
// */
// private fun initPermission() {
// if (XXPermissions.isGranted(this, Permission.CAMERA)) {
//// initCamera()
// initCamera2()
// } else {
// XXPermissions.with(this)
// .permission(Permission.CAMERA)
// .permission(Permission.Group.STORAGE)
// .request { _, all ->
// if (all) {
// Toast.makeText(
// this@MainActivity,
// "权限获取成功",
// Toast.LENGTH_SHORT
// )
// .show()
//// initCamera()
// initCamera2()
// } else {
// Toast.makeText(
// this@MainActivity,
// "请检查权限授予情况",
// Toast.LENGTH_SHORT
// )
// .show()
// }
// }
// }
// }
//
// @SuppressLint("WrongConstant")
// override fun onActivityResult(requestCode: Int, resultCode: Int, data: Intent?) {
// super.onActivityResult(requestCode, resultCode, data)
// if (requestCode == XXPermissions.REQUEST_CODE) {
// if (XXPermissions.isGranted(this, Permission.CAMERA) && XXPermissions.isGranted(
// this,
// Permission.Group.STORAGE
// )
// ) {
// Toast.makeText(this, "用户已经在权限设置页授予了权限", Toast.LENGTH_SHORT).show()
//// initCamera()
// initCamera2()
// } else {
// Toast.makeText(this, "用户没有在权限设置页授予权限", Toast.LENGTH_SHORT).show()
// }
// } else if (requestCode == FILEREQUESTCODE) {
// if (data != null) {
// val mChooseFilePath = data.getStringExtra("path")
// Log.e(TAG, "onActivityResult: 选择到文件路径:$mChooseFilePath")
// if (mChooseFilePath != null) {
// val mBitmapList = VideoToBitmapUtil.getBitmapsFromVideo(mChooseFilePath)
// Log.e(TAG, "onActivityResult:解析出来的图片数量:${mBitmapList.size}")
// }
// }
// }
// }
//
// private fun initCamera2() {
// CoroutineScope(Dispatchers.IO).launch {
// val camera2Helper = Camera2HelperRgb.Builder()
// .cameraListener(mRGBListener)
// .specificCameraId("141") //RGB
// .context(applicationContext)
// .previewOn(mAutofirtexttureview)
// .previewViewSize(
// Point(
// mAutofirtexttureview.layoutParams.width,
// mAutofirtexttureview.layoutParams.height
// )
// )
// .build()
// camera2Helper?.start()
// }
// }
//
// /**
// * RGB相机的回调
// */
// private val mRGBListener = object : Camera2Listener {
// override fun onCameraOpened(
// cameraDevice: CameraDevice?,
// cameraId: String?,
// previewSize: Size?,
// displayOrientation: Int,
// isMirror: Boolean
// ) {
// Log.e(TAG, "onCameraOpened: RGB相机打开")
// }
//
// override fun onPreview(bitmap: Bitmap?) {
// Log.d(
// TAG,
// "onPreview: bitmap.width():${bitmap?.width} bitmap.height():${bitmap?.height}"
// )
// if (bitmap != null) {
// detect(bitmap)
// }
//// CoroutineScope(Dispatchers.Main).launch {
//// mImageView.setImageBitmap(bitmap)
//// }
//// runOnUiThread {
//// mRgbImageview.setImageBitmap(mbitmap)
//// }
// }
//
// override fun onCameraClosed() {
// Log.e(TAG, "onCameraClosed: RGB相机关闭")
// }
//
// override fun onCameraError(e: Exception?) {
// Log.e(TAG, "onCameraError: RGB相机出错:${e?.message}")
// }
//
// }
//
// /**
// * 初始化camera
// */
// private fun initCamera() {
// CameraXUtil.instance!!.initCameraX(this, object : CameraXInterface {
// override fun initCameraSuccess(
// cameraProviderFuture: ListenableFuture<ProcessCameraProvider>,
// cameraProvider: ProcessCameraProvider?,
// iamgeAnalysis: ImageAnalysis.Builder?,
// imageCapture: ImageCapture.Builder?,
// preView: Preview.Builder?,
// cameraSelector: CameraSelector.Builder?,
// videoCapture: VideoCapture.Builder?
// ) {
// searhPreview()
// initUseCases(iamgeAnalysis, imageCapture, preView, cameraSelector, videoCapture)
// //绑定当前生命周期并得到相机对象来定义相机的一些配置
// CameraXUtil.instance!!.bindToLifecycle(
// mCameraSelector,
// mPreView,
// mImageCapture,
// mImageAnalysis
// )
// //渲染相机预览画面
// cameraProviderFuture.addListener(kotlinx.coroutines.Runnable {
// mPreView?.setSurfaceProvider(
// mPreviewView.surfaceProvider
// )
// }, ContextCompat.getMainExecutor(this@MainActivity))
// }
//
// override fun initCameraError(error: String?) {
// //TODO("Not yet implemented")
// finish()
// Toast.makeText(
// this@MainActivity,
// "初始化相机失败,请稍后重试$error",
// Toast.LENGTH_SHORT
// )
// .show()
// }
// })
// }
//
// /**
// * 获取camera信息
// */
// private fun searhPreview() {
// val cameraManager = this.getSystemService(CAMERA_SERVICE) as CameraManager
// Log.e(TAG, "initgetid: 当前设备Camera数量:${cameraManager.cameraIdList.size}")
// for (cameraId in cameraManager.cameraIdList) {
// Log.e(TAG, "initgetid: CameraId$cameraId")
// }
//
// val characteristics: CameraCharacteristics = cameraManager.getCameraCharacteristics("1")
// val map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
// for (d in cameraManager.cameraIdList) {
// Log.e(TAG, "cameraId == $d")
// }
// // 该相机的FPS范围
// val fpsRanges =
// characteristics.get(CameraCharacteristics.CONTROL_AE_AVAILABLE_TARGET_FPS_RANGES)
// Log.e(
// TAG,
// "SYNC_MAX_LATENCY_PER_FRAME_CONTROL: " + Arrays.toString(fpsRanges)
// )
//
// val list = listOf(
// *map!!.getOutputSizes(
// SurfaceTexture::class.java
// )
// )
// }
//
// /**
// * 初始化camera配置信息
// */
// private fun initUseCases(
// imageAnalysis: ImageAnalysis.Builder?,
// imageCapture: ImageCapture.Builder?,
// preView: Preview.Builder?,
// cameraSelector: CameraSelector.Builder?,
// videoCapture: VideoCapture.Builder?
// ) {
// mRotation =
// (getSystemService(Context.WINDOW_SERVICE) as WindowManager).defaultDisplay.rotation
// initCameraSelector(cameraSelector)
// initPreview(preView)
// initImageCapture(imageCapture)
// initImageAnalysis(imageAnalysis)
// }
//
// /**
// * 初始化图像分析配置
// */
// @SuppressLint("UnsafeExperimentalUsageError", "UnsafeOptInUsageError")
// private fun initImageAnalysis(imageAnalysis: ImageAnalysis.Builder?) {
// //初始化图像分析对象
// mImageAnalysis = imageAnalysis!!
// .setTargetResolution(mCameraXSize)
//// .setTargetAspectRatio(mAspectRatio)
// .setTargetRotation(mRotation)
// .setBackpressureStrategy(ImageAnalysis.STRATEGY_KEEP_ONLY_LATEST)
// .build()
//
// mImageAnalysis.setAnalyzer(
// ContextCompat.getMainExecutor(this),
// { image ->
// val mImage = image.image
// mFaceBitmap = mImage?.let { getBitmapFromImage(it) }
//// mImageView.setImageBitmap(mFaceBitmap)
// Log.e(
// TAG,
// "initImageAnalysis: bitmap的分辨率:${mFaceBitmap?.width} * ${mFaceBitmap?.height}"
// )
// mFaceBitmap?.let { detect(it) }
// image.close()
// })
// }
//
// /**
// * 构建图像捕获用例
// */
// private fun initImageCapture(imageCapture: ImageCapture.Builder?) {
// //初始化画面捕获对象
// mImageCapture = imageCapture!!
// .setTargetResolution(mCameraXSize)
//// .setTargetAspectRatio(mAspectRatio)
// .setFlashMode(ImageCapture.FLASH_MODE_AUTO)
// .setCaptureMode(ImageCapture.CAPTURE_MODE_MINIMIZE_LATENCY)
// .build()
//
// // 旋转监听
// val orientationEventListener: OrientationEventListener =
// object : OrientationEventListener(this) {
// override fun onOrientationChanged(orientation: Int) {
// // 监视方向值以确定目标旋转值
// val rotation: Int = when (orientation) {
// in 45..135 -> {
// Surface.ROTATION_270
// }
// in 135..225 -> {
// Surface.ROTATION_180
// }
// in 225..315 -> {
// Surface.ROTATION_90
// }
// else -> {
// Surface.ROTATION_0
// }
// }
// mImageCapture.targetRotation = rotation
// }
// }
// orientationEventListener.enable()
// }
//
// /**
// * 构建图像预览
// */
// private fun initPreview(preView: Preview.Builder?) {
// //初始化画面预览对象
// mPreView = preView!!
// .setTargetAspectRatio(mAspectRatio)
// .setTargetRotation(mRotation)
// .build()
//
// }
//
// /**
// * 选择摄像头
// */
// private fun initCameraSelector(cameraSelector: CameraSelector.Builder?) {
// //初始化相机选择对象
// mCameraSelector = cameraSelector!!
// .requireLensFacing(mCameraSelectorInt)
// .build()
// }
//
//
// /**
// * yuv数据转化bitmap
// * @param image 相机返回的image数据
// */
// private fun getBitmapFromImage(image: Image): Bitmap? {
//// val mStartTime = System.currentTimeMillis()
//// val w: Int = image.width
//// val h: Int = image.height
//// val i420Size = w * h * 3 / 2
//// val picel1 = ImageFormat.getBitsPerPixel(ImageFormat.NV21)
//// val picel2 = ImageFormat.getBitsPerPixel(ImageFormat.YUV_420_888)
//// val planes: Array<Image.Plane> = image.planes
//// val remaining0: Int = planes[0].buffer.remaining()
//// val remaining1: Int = planes[1].buffer.remaining()
//// val remaining2: Int = planes[2].buffer.remaining()
//// //获取pixelStride,可能跟width相等,可能不相等
//// val pixelStride: Int = planes[2].pixelStride
//// val rowOffest: Int = planes[2].rowStride
//// val nv21 = ByteArray(i420Size)
//// val yRawSrcBytes = ByteArray(remaining0)
//// val uRawSrcBytes = ByteArray(remaining1)
//// val vRawSrcBytes = ByteArray(remaining2)
//// planes[0].buffer.get(yRawSrcBytes)
//// planes[1].buffer.get(uRawSrcBytes)
//// planes[2].buffer.get(vRawSrcBytes)
//// if (pixelStride == w) {
//// //两者相等,说明每个YUV块紧密相连,可以直接拷贝
//// System.arraycopy(yRawSrcBytes, 0, nv21, 0, rowOffest * h)
//// System.arraycopy(vRawSrcBytes, 0, nv21, rowOffest * h, rowOffest * h / 2 - 1)
//// } else {
//// val ySrcBytes = ByteArray(w * h)
//// val uSrcBytes = ByteArray(w * h / 2 - 1)
//// val vSrcBytes = ByteArray(w * h / 2 - 1)
//// for (row in 0 until h) {
//// //源数组每隔 rowOffest 个bytes 拷贝 w 个bytes到目标数组
//// System.arraycopy(yRawSrcBytes, rowOffest * row, ySrcBytes, w * row, w)
////
//// //y执行两次,uv执行一次
//// if (row % 2 == 0) {
//// //最后一行需要减一
//// if (row == h - 2) {
//// System.arraycopy(
//// vRawSrcBytes,
//// rowOffest * row / 2,
//// vSrcBytes,
//// w * row / 2,
//// w - 1
//// )
//// } else {
//// System.arraycopy(
//// vRawSrcBytes,
//// rowOffest * row / 2,
//// vSrcBytes,
//// w * row / 2,
//// w
//// )
//// }
//// }
//// }
//// System.arraycopy(ySrcBytes, 0, nv21, 0, w * h)
//// System.arraycopy(vSrcBytes, 0, nv21, w * h, w * h / 2 - 1)
//// }
//// Log.e(TAG, "getBitmapFromImage: 生成YUV数据;${System.currentTimeMillis() - mStartTime}")
////// var bm: Bitmap? = BitmapUtil.getBitmapImageFromYUV(nv21, w, h)
//// val mStartTime5 = System.currentTimeMillis()
//// //将image转换为byte数组
//// val mNV21Bytes = BitmapUtil.getYUVBuffer(image)
//// Log.e(
//// TAG,
//// "getBitmapFromImage: yuv流转化byte数组所需时间:${System.currentTimeMillis() - mStartTime5}"
//// )
//// val mStartTime4 = System.currentTimeMillis()
//// //将byte数组生成bitmap
//// var bm = NV21ToBitmap(this).nv21ToBitmap(mNV21Bytes, w, h)
//// Log.e(TAG, "getBitmapFromImage: 生成bitmap所需时间:${System.currentTimeMillis() - mStartTime4}")
//// val mStartTIme2 = System.currentTimeMillis()
//// bm = bm?.let { BitmapUtil.convert(it) }
//// Log.e(TAG, "getBitmapFromImage: 镜像:${System.currentTimeMillis() - mStartTIme2}")
////// val mStartTIme3 = System.currentTimeMillis()
////// bm = BitmapUtil.rotateBitmap(bm, 0f)
////// Log.e(TAG, "getBitmapFromImage: bitmap旋转耗时:${System.currentTimeMillis() - mStartTIme3}")
////// Log.e(TAG, "getBitmapFromImage: 处理bitmap总时间:${System.currentTimeMillis() - mStartTime}")
//// return bm
//
// //用jni里的接口
// return NV21ToBitmap.getBitmapByImageJPEG(image, mTestBedNcnn, this);
//
//// // 用opencv java的接口
//// // Image to byte[]
//// val beforeGetRRTime = System.currentTimeMillis()
//// val nv21data = getYUVBuffer(image)
//// Log.e(
//// OpencvBitmapUtils.TAG,
//// " image to byte[]: " + (System.currentTimeMillis() - beforeGetRRTime)
//// )
////
//// // byte[] to bitmap
//// val beforeGetRRTime2 = System.currentTimeMillis()
//// var bm = NV21ToBitmap(this).nv21ToBitmap(nv21data, image.width, image.height);
//// Log.e(
//// OpencvBitmapUtils.TAG,
//// " byte[] to bitmap: " + (System.currentTimeMillis() - beforeGetRRTime2)
//// )
////
//// return OpencvBitmapUtils.getBitmapByImage(bm);
// }
//
// override fun onClick(v: View?) {
// when (v?.id) {
// R.id.choosevideo_button -> {
//// LFilePicker().withActivity(this)
//// .withChooseMode(true)
//// .withRequestCode(FILEREQUESTCODE)
//// .start()
//
// PictureSelector.create(this)
// .openGallery(PictureMimeType.ofVideo())
// .imageEngine(GlideEngine.createGlideEngine())
// .selectionMode(PictureConfig.SINGLE)
// .isAndroidQTransform(true)
// .forResult(object : OnResultCallbackListener<LocalMedia?> {
// override fun onResult(result: List<LocalMedia?>) {
// GlobalScope.launch {
// withContext(Dispatchers.IO) {
// // 结果回调
// for ((index, value) in result.withIndex()) {
// if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
// Log.e(
// TAG,
// "onResult: path:${value?.androidQToPath} index:$index"
// )
// //获取本地视频列表第一个视频解析成list<Bitmap>
// if (value != null) {
// withContext(Dispatchers.Main) {
// showLoading()
// }
// val mVideoBitmapList =
// VideoToBitmapUtil.getBitmapsFromVideo(value.androidQToPath)
// Log.e(
// TAG,
// "onActivityResult: androidQ 图片长度:${mVideoBitmapList.size}"
// )
// withContext(Dispatchers.Main) {
// closeLoading()
// initAdapter(mVideoBitmapList)
// }
// for ((index, bitmap) in mVideoBitmapList.withIndex()) {
// if (bitmap != null) {
// BitmapUtils.saveBitmapDepthImage(
// this@MainActivity,
// bitmap,
// "$index"
// )
// }
// if (bitmap != null) {
// Log.e(
// TAG,
// "onResult: 当前检测的bitmap格式:${bitmap.config}",
// )
//// detect(bitmap)
// }
// }
// }
// } else {
// Log.e(
// TAG,
// "onResult: path:${value?.path} index:$index"
// )
// //获取本地视频列表第一个视频解析成list<Bitmap>
// if (value != null) {
// withContext(Dispatchers.Main) {
// showLoading()
// }
// val mVideoBitmapList =
// VideoToBitmapUtil.getBitmapsFromVideo(value.path)
// Log.e(
// TAG,
// "onActivityResult: android 图片长度:${mVideoBitmapList.size}"
// )
// withContext(Dispatchers.Main) {
// closeLoading()
// initAdapter(mVideoBitmapList)
// }
// for (bitmap in mVideoBitmapList) {
// if (bitmap != null) {
//// detect(bitmap)
// Log.e(
// TAG,
// "onResult: 当前检测的bitmap格式:${bitmap.config}",
// )
// }
// }
//
// }
// }
// }
// }
// }
// }
//
// override fun onCancel() {
// // 取消
// Toast.makeText(this@MainActivity, "取消选择", Toast.LENGTH_SHORT)
// .show()
// }
// })
//
// }
// }
// }
//}

+ 8
- 1
app/src/main/java/com/aispeech/nativedemo/config/Config.java Wyświetl plik

@@ -12,6 +12,12 @@ public class Config {
public static final long ONE_DAY = 24 * ONE_HOUR;
public static final long ONE_MONTH = 30 * ONE_DAY;

public static final String DB_NAME = "faces.db3";
public static final String DB_PATH = "/db/";

public static final String CONFIG_NAME = "FaceConfig";
public static final String CONFIG_PATH = "/config/";

public static class FilePath{
public static final String DIRECTORY = "/Android/data/com.aispeech.nativedemo/files";//文件根路径
public static final String PROGRAM_ROOT_PATH = Environment.getExternalStorageDirectory().getAbsolutePath() + DIRECTORY;
@@ -22,7 +28,8 @@ public class Config {

public static class ErrorEvent{
public static final int ERROR_LEVEL_1 = 1; //错误等级:紧急(1,2,3类)
public static final int ERROR_LEVEL_2 = 2; //错误等级:普通(1,2,3类)
public static final int ERROR_LEVEL_2 = 2; //错误等级:普通(1,2,3
// 类)
public static final int ERROR_LEVEL_3 = 3; //错误等级:不重要(1,2,3类)

public static final int ERROR_TYPE_2 = 2; //问题类别(重启,摄像头故障等)(1,2,3类)


+ 15
- 5
app/src/main/java/com/aispeech/nativedemo/dds/DDSManager.java Wyświetl plik

@@ -8,10 +8,14 @@ import com.aispeech.dui.dds.DDS;
import com.aispeech.dui.dds.DDSErrorListener;
import com.aispeech.dui.dds.agent.wakeup.word.WakeupWord;
import com.aispeech.dui.dds.exceptions.DDSNotInitCompleteException;
import com.aispeech.nativedemo.MainActivity;
import com.aispeech.nativedemo.R;
import com.aispeech.nativedemo.asr.observer.DuiMessageObserver;
import com.aispeech.nativedemo.config.Config;
import com.aispeech.nativedemo.log.Logger;
import com.aispeech.nativedemo.network.ws.WebSocketManager;

import org.json.JSONException;
import org.json.JSONObject;

import java.util.ArrayList;
@@ -70,11 +74,15 @@ public class DDSManager {
public void speakText(String text){
synchronized (lock){
try {
if (!DuiMessageObserver.mIsSleep) {
DDS.getInstance().getAgent().stopDialog();
}
DDS.getInstance().getAgent().avatarClick(text);
} catch (DDSNotInitCompleteException e) {
// if (!DuiMessageObserver.mIsSleep) {
// DDS.getInstance().getAgent().stopDialog();
// }
// DDS.getInstance().getAgent().avatarClick(text);
JSONObject jo = new JSONObject();
jo.put("type", "djTtsText");
jo.put("data", text);
WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@@ -134,7 +142,9 @@ public class DDSManager {
words.add(getWakeupWord("bie shuo le", "别说了", "0.18", "好的"));
words.add(getWakeupWord("ting yi xia", "停一下", "0.18", "好的"));
DDS.getInstance().getAgent().getWakeupEngine().updateShortcutWakeupWords(words);
Logger.e("设置唤醒词成功");
} catch (DDSNotInitCompleteException e) {
Logger.e("设置唤醒词失败");
e.printStackTrace();
}
}


+ 7
- 12
app/src/main/java/com/aispeech/nativedemo/face/FaceManager.java Wyświetl plik

@@ -121,13 +121,13 @@ public class FaceManager {
long detectEndTime = SystemClock.uptimeMillis();
Log.d(TAG, "----- detectMultiAtt time ----- " + (detectEndTime - detectStartTime));
mSb = new StringBuilder();
// for (MultiAtt att : results_att) {
// Log.d(TAG, "LZH_hd: " + String.valueOf(att.hd_x1) + " " + String.valueOf(att.hd_y1) + " " + String.valueOf(att.hd_x2) + " " + String.valueOf(att.hd_y2) + " " + String.valueOf(att.track_id));
// Log.d(TAG, "LZH_fa: " + String.valueOf(att.fa_x1) + " " + String.valueOf(att.fa_y1) + " " + String.valueOf(att.fa_x2) + " " + String.valueOf(att.fa_y2) + " " + String.valueOf(att.fa_quality));
// Log.d(TAG, "LZH_bd: " + String.valueOf(att.bd_x1) + " " + String.valueOf(att.bd_y1) + " " + String.valueOf(att.bd_x2) + " " + String.valueOf(att.bd_y2) + " " + String.valueOf(att.bd_gender) + " " + String.valueOf(att.bd_age));
// Log.d(TAG, "LZH_hd_fa_bd: " + String.valueOf(att.hd_fa) + " " + String.valueOf(att.hd_bd));
// Log.d(TAG, "track_id: " + String.valueOf(att.track_id));
// }
for (MultiAtt att : results_att) {
Log.d(TAG, "LZH_hd: " + String.valueOf(att.hd_x1) + " " + String.valueOf(att.hd_y1) + " " + String.valueOf(att.hd_x2) + " " + String.valueOf(att.hd_y2) + " " + String.valueOf(att.track_id));
Log.d(TAG, "LZH_fa: " + String.valueOf(att.fa_x1) + " " + String.valueOf(att.fa_y1) + " " + String.valueOf(att.fa_x2) + " " + String.valueOf(att.fa_y2) + " " + String.valueOf(att.fa_quality));
Log.d(TAG, "LZH_bd: " + String.valueOf(att.bd_x1) + " " + String.valueOf(att.bd_y1) + " " + String.valueOf(att.bd_x2) + " " + String.valueOf(att.bd_y2) + " " + String.valueOf(att.bd_gender) + " " + String.valueOf(att.bd_age));
Log.d(TAG, "LZH_hd_fa_bd: " + String.valueOf(att.hd_fa) + " " + String.valueOf(att.hd_bd));
Log.d(TAG, "track_id: " + String.valueOf(att.track_id));
}
int idx = 0;
List<MultiAtt> results = new ArrayList<>();
MultiAtt trackAtt = null;
@@ -184,10 +184,6 @@ public class FaceManager {
}

String lastUserID = "";
int mLastTrackId = -1;
int mLastTrackTimes = 0;
long mLastDetectTime = 0;
private int mMaxTag = 100;

private Map<String, PersonInfo> mPersons = new HashMap<>();
private Map<Integer, PersonInfo> mUndeterminedPersons = new HashMap<>();
@@ -561,7 +557,6 @@ public class FaceManager {
mCallback.updateRect(mSelectPerson.result, mSelectPerson.result.fa_w + "");
sendMsg(mSelectPerson);
mTrackId = mSelectPerson.trackId;
mMaxTag = 100;

/**
* 发送日志


+ 6
- 3
app/src/main/java/com/aispeech/nativedemo/network/ws/DigiWebSocketServer.java Wyświetl plik

@@ -158,10 +158,13 @@ public class DigiWebSocketServer extends WebSocketServer {
int code = obj.optInt("code", 0);
if (code != 200) {
onFail(obj.getString("msg"));
MessageUtils.sendInitialize();
} else{
MessageUtils.sendSNCode();
ConfigManager.getInstance().scheduleConfigTask();
if(obj.optString("msg").equals("true")){
MessageUtils.sendSNCode();
ConfigManager.getInstance().scheduleConfigTask();
} else{
MessageUtils.sendInitialize();
}
}
} catch (Exception e) {
e.printStackTrace();


+ 2
- 0
app/src/main/java/com/aispeech/nativedemo/network/ws/MessageUtils.java Wyświetl plik

@@ -12,6 +12,7 @@ import com.aispeech.nativedemo.db.ModelDbHelper;
import com.aispeech.nativedemo.db.SkillDbHelper;
import com.aispeech.nativedemo.entity.Model;
import com.aispeech.nativedemo.entity.Skill;
import com.aispeech.nativedemo.shape.ShapeManager;

import org.json.JSONException;
import org.json.JSONObject;
@@ -94,6 +95,7 @@ public class MessageUtils {
person.put("type", "chatMessage");
person.put("data", txt);
sendMessage(person.toString());
// ShapeManager.getInstance().start(txt);
} catch (JSONException e) {
e.printStackTrace();
}


+ 131
- 0
app/src/main/java/com/aispeech/nativedemo/rockchip/HdmiService.java Wyświetl plik

@@ -0,0 +1,131 @@
package com.aispeech.nativedemo.rockchip;

import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.os.SystemClock;
import android.util.Log;
import android.util.Size;

import com.aispeech.nativedemo.rockchip.util.JniCameraCall;

public class HdmiService extends Service {

private static String TAG = "HdmiService";

private boolean loop = true;
private boolean isHdmiIn = false;
private final boolean debug = false;
private OnHdmiStatusListener mOnHdmiStatusListener;
private Size curDriverDimension = null;

Runnable mScanHdmiIn = new Runnable() {

@Override
public void run() {
isHdmiIn = false;
while (loop) {
int[] format = JniCameraCall.getFormat();
if (format != null && format.length > 0) {
curDriverDimension = new Size(format[0],format[1]);
if (debug)
Log.i(TAG, "format != null format[2] = " + format[2]);
if (format[2] != 0 && isHdmiIn != true) {
Log.i(TAG, "hdmi is plug");
isHdmiIn = true;
//wait activity bind service success
while (mOnHdmiStatusListener == null) {
try {
Thread.sleep(200);
} catch(InterruptedException e) {
e.printStackTrace();
}
if (!loop) break;
}
if (mOnHdmiStatusListener != null) {
mOnHdmiStatusListener.onHdmiStatusChange(isHdmiIn, curDriverDimension);
}
} else if (format[2] == 0 && isHdmiIn != false) {
Log.i(TAG, "hdmi is unplug");
isHdmiIn = false;
//wait activity bind service success
while (mOnHdmiStatusListener == null) {
try {
Thread.sleep(200);
} catch(InterruptedException e) {
e.printStackTrace();
}
if (!loop) break;
}
if (mOnHdmiStatusListener != null) {
mOnHdmiStatusListener.onHdmiStatusChange(isHdmiIn, curDriverDimension);
}
} else {
// Log.i(TAG, "hdmi is no change");
}
curDriverDimension = null;
}
SystemClock.sleep(500);
}
}
};

/**
* OnHdmiStatusListener
*/
public interface OnHdmiStatusListener {
void onHdmiStatusChange(boolean isHdmiIn, Size driverDimension);
}

/**
* HdmiBinder
*/
public class HdmiBinder extends Binder {
/**
* 获取当前Service的实例
*
* @return
*/
public HdmiService getService() {
return HdmiService.this;
}
}

@Override
public void onCreate() {
super.onCreate();
Log.i(TAG, "HdmiService onCreate()");
new Thread(mScanHdmiIn).start();
}

@Override
public void onDestroy() {
super.onDestroy();
Log.i(TAG, "HdmiService onDestroy()");
loop = false;
this.mOnHdmiStatusListener = null;
}

/**
* 返回一个Binder对象
*/
@Override
public IBinder onBind(Intent intent) {
return new HdmiBinder();
}

@Override
public boolean onUnbind(Intent intent) {
Log.i(TAG, "onUnbind");
loop = false;
return super.onUnbind(intent);
}

public void setOnHdmiStatusListener(OnHdmiStatusListener hdmiStatusListener) {
this.mOnHdmiStatusListener = hdmiStatusListener;
if (hdmiStatusListener == null) {
stopSelf();
}
}
}

+ 751
- 0
app/src/main/java/com/aispeech/nativedemo/rockchip/RockchipCamera2.java Wyświetl plik

@@ -0,0 +1,751 @@
package com.aispeech.nativedemo.rockchip;

import android.Manifest;
import android.app.Activity;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.RemoteException;
import android.text.TextUtils;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.widget.Button;
import android.widget.Toast;

import androidx.annotation.NonNull;
import androidx.constraintlayout.widget.ConstraintLayout;
import androidx.core.app.ActivityCompat;

import com.aispeech.nativedemo.R;
import com.aispeech.nativedemo.network.ws.WebSocketManager;
import com.aispeech.nativedemo.rockchip.util.DataUtils;

import org.json.JSONException;
import org.json.JSONObject;

import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;

import rockchip.hardware.hdmi.V1_0.IHdmi;
import rockchip.hardware.hdmi.V1_0.IHdmiCallback;

public class RockchipCamera2 extends Activity {

private static final String TAG = "RockchipCamera2";
private TextureView textureView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();

static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}

protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension = new Size(3840, 2160);
private ImageReader imageReader;
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
private HdmiService mHdmiService;
private ConstraintLayout rootView;
private boolean mPaused = false;
private String mAssignCameraId;

private Button stopWeb;
private Button startWeb;
private Button stopApp;
private Button reboot;
private Button close;
private Button open;
private Button stopClient;
private Button startClient;
private WebView mWebView;

class HdmiCallback extends IHdmiCallback.Stub {
public HdmiCallback() {
}

public void onConnect(String cameraId) throws RemoteException {
Log.e(TAG, "onConnect" + cameraId);
openCamera();
}

public void onFormatChange(String cameraId, int width, int height) throws RemoteException {
Log.e(TAG, "onFormatChange" + cameraId);
closeCamera();
imageDimension = new Size(width, height);
openCamera();
}

public void onDisconnect(String cameraId) throws RemoteException {
Log.e(TAG, "onDisconnect" + cameraId);
closeCamera();
}
}

HdmiCallback mHdmiCallback;

@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_rockchip_camera2);
mAssignCameraId = getIntent().getStringExtra(DataUtils.EXTRA_ASSIGN_CAMERA_ID);
rootView = (ConstraintLayout) findViewById(R.id.root_view);
//textureView = findViewById(R.id.texture);


initView();
WebSocketManager.getInstance(getApplicationContext());

initHdmi();
startHdmi();
}

private void fullScreen() {
getWindow().getDecorView().getRootView().setSystemUiVisibility(
View.SYSTEM_UI_FLAG_LAYOUT_STABLE
| View.SYSTEM_UI_FLAG_LAYOUT_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_LAYOUT_FULLSCREEN
| View.SYSTEM_UI_FLAG_HIDE_NAVIGATION
| View.SYSTEM_UI_FLAG_FULLSCREEN
| View.SYSTEM_UI_FLAG_IMMERSIVE_STICKY
| View.SYSTEM_UI_FLAG_LOW_PROFILE);
}

private void createTextureView() {
Log.d(TAG, "recreatTextureview");
runOnUiThread(new Runnable() {
@Override
public void run() {
Log.i(TAG, "textureView remove");
/*if (textureView != null) {
rootView.removeView(textureView);
textureView = null;
}*/
//textureView = new TextureView(RockchipCamera2.this);
textureView = (TextureView) findViewById(R.id.texture3);
//textureView.setRotationY(180);
//textureView.setRotation(270);
//ConstraintLayout.LayoutParams layoutParams = new ConstraintLayout.LayoutParams(
//ViewGroup.LayoutParams.WRAP_CONTENT, ViewGroup.LayoutParams.WRAP_CONTENT);
//ViewGroup.LayoutParams.MATCH_PARENT, ViewGroup.LayoutParams.MATCH_PARENT);
//layoutParams.addRule(RelativeLayout.ALIGN_PARENT_TOP);
// textureView.setLayoutParams(layoutParams);
//rootView.addView(textureView, 0);
textureView.setSurfaceTextureListener(textureListener);
}
});
}

TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
// open your camera here
Log.d(TAG, "onSurfaceTextureAvailable");
openCamera();
// Intent hdmiService = new Intent(RockchipCamera2.this, HdmiService.class);
// hdmiService.setPackage(getPackageName());
// bindService(hdmiService, conn, Context.BIND_AUTO_CREATE);
}

@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
Log.d(TAG, "onSurfaceTextureSizeChanged");
// Transform you image captured size according to the surface width and height
}

@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
Log.d(TAG, "onSurfaceTextureDestroyed");
return true;
}

@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// Log.d(TAG,"onSurfaceTextureUpdated");
/*
* int width = 0; int height = 0; int[] format = JniCameraCall.getFormat(); if
* (format != null && format.length > 0) { width = format[0]; height =
* format[1]; } Log.d(TAG,"width = "+width+",height = "+height);
*/

}
};

// ServiceConnection conn = new ServiceConnection() {
// @Override
// public void onServiceDisconnected(ComponentName name) {
// Log.i(TAG, "onServiceDisconnected");
// }

// @Override
// public void onServiceConnected(ComponentName name, IBinder service) {
// Log.i(TAG, "onServiceConnected");
// // 返回一个HdmiService对象
// mHdmiService = ((HdmiService.HdmiBinder) service).getService();

// // 注册回调接口来接收HDMI的变化
// mHdmiService.setOnHdmiStatusListener(new HdmiService.OnHdmiStatusListener() {

// @Override
// public void onHdmiStatusChange(boolean isHdmiIn, Size driverDimension) {
// if (mPaused) return;
// Log.i(TAG, "onHdmiStatusChange isHdmiIn = " + isHdmiIn + ",mPaused:" + mPaused);
// imageDimension = driverDimension;
// if (isHdmiIn) {
// openCamera();
// } else {
// closeCamera();
// }
// }
// });

// }
// };

private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
// This is called when the camera is open
Log.d(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
}

@Override
public void onDisconnected(CameraDevice camera) {
Log.d(TAG, "onDisconnected");
cameraDevice.close();
}

@Override
public void onError(CameraDevice camera, int error) {
Log.i(TAG, "onError");
cameraDevice.close();
cameraDevice = null;
}
};

final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(RockchipCamera2.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};

protected void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}

protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}

protected void takePicture() {
if (null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP)
.getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
width = imageDimension.getWidth();
height = imageDimension.getHeight();
Log.d(TAG, "pic size W=" + width + ",H=" + height);
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, 90);
final File file = new File(Environment.getExternalStorageDirectory() + "/pic.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}

private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request,
TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(RockchipCamera2.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

protected void createCameraPreview() {
try {
Log.d(TAG, "createCameraPreview");
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
Log.d(TAG, "imageDimension.getWidth()=" + imageDimension.getWidth() + ",imageDimension.getHeight()="
+ imageDimension.getHeight());
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
// The camera is already closed
if (null == cameraDevice) {
return;
}
Log.d(TAG, "onConfigured");
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}

@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigureFailed");
Toast.makeText(RockchipCamera2.this, "Configuration failed", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

private void openCamera() {
String getHdmiDeviceId = "";
try {
IHdmi service = IHdmi.getService(true);
getHdmiDeviceId = service.getHdmiDeviceId();
service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.i(TAG, "openCamera start getHdmiDeviceId=" + getHdmiDeviceId);
try {
if (manager.getCameraIdList().length == 0) {
Log.i(TAG, "openCamera length == 0");
return;
}
boolean haveHDMI = false;
String hdmiCameraId = "";
String alternativeId = "";//备选cameraId
for (String cameraId : manager.getCameraIdList()) {
Log.i(TAG, "cameraId:" + cameraId);
if (TextUtils.isEmpty(mAssignCameraId)) {
if (cameraId.equals(getHdmiDeviceId)) {
haveHDMI = true;
hdmiCameraId = cameraId;
Log.i(TAG, "haveHDMI cameraId:" + cameraId);
}
} else if (!cameraId.equals(getHdmiDeviceId)) {
alternativeId = cameraId;
if (cameraId.equals(mAssignCameraId)) {
haveHDMI = true;
hdmiCameraId = cameraId;
Log.i(TAG, "have switch HDMI cameraId:" + cameraId);
break;
}
}
}
/*if (TextUtils.isEmpty(hdmiCameraId)
&& !TextUtils.isEmpty(mAssignCameraId) && !TextUtils.isEmpty(alternativeId)) {
haveHDMI = true;
hdmiCameraId = alternativeId;
Log.i(TAG, "have alternative cameraId:"+mAssignCameraId);
}*/
if (!haveHDMI) {
return;
}
CameraCharacteristics characteristics = manager.getCameraCharacteristics(hdmiCameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
//imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
for (Size size : map.getOutputSizes(SurfaceTexture.class)) {
Log.d(TAG, "supported stream size: " + size.toString());
imageDimension = size;
}
Log.d(TAG, "current hdmi input size:" + imageDimension.toString());
if (ActivityCompat.checkSelfPermission(this,
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
return;
}


manager.openCamera(hdmiCameraId, stateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (Exception e) {
e.printStackTrace();
}
Log.i(TAG, "openCamera end");
}


protected void updatePreview() {
if (null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
Log.d(TAG, "updatePreview");
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}

private void closeCamera() {
Log.d(TAG, "closeCamera");
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}

@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
// close the app
Toast.makeText(RockchipCamera2.this, "Sorry!!!, you can't use this app without granting permission",
Toast.LENGTH_LONG).show();
finish();
}
}
}

@Override
protected void onResume() {
mPaused = false;
super.onResume();
Log.d(TAG, "onResume");

}

@Override
protected void onPause() {
Log.d(TAG, "onPause");
mPaused = true;
super.onPause();
try {
IHdmi service = IHdmi.getService(true);
service.unregisterListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
closeCamera();
// JniCameraCall.closeDevice();
stopBackgroundThread();
if (textureView != null) {
rootView.removeView(textureView);
textureView = null;
}
}

@Override
protected void onDestroy() {
super.onDestroy();
Log.i(TAG, "onDestroy");
}

private void initView() {
stopWeb = findViewById(R.id.stop_web_socket3);
stopWeb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
// mWSManager.destroyWebSocketServer();
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "缔智元是一家缔造数字人员工的科技企业,致力于综合图像识别、自然语言交互、知识图谱、超写实3D渲染、物联网等前沿技术,助力企业的数字化与智能化变革");
WebSocketManager.getInstance(RockchipCamera2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
startWeb = findViewById(R.id.start_web_socket3);
startWeb.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

}
});
stopApp = findViewById(R.id.kill_app3);
stopApp.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
killApp();
}
});
reboot = findViewById(R.id.reboot3);
reboot.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showBothHandFlat");
WebSocketManager.getInstance(RockchipCamera2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
close = findViewById(R.id.close_camera3);
close.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "waveHand");
WebSocketManager.getInstance(RockchipCamera2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
open = findViewById(R.id.open_camera3);
open.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showRightHandFlat");
WebSocketManager.getInstance(RockchipCamera2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
stopClient = findViewById(R.id.stop_client3);
stopClient.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {

}
});
startClient = findViewById(R.id.start_client3);
startClient.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "今天天气真好");
WebSocketManager.getInstance(RockchipCamera2.this).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
mWebView = findViewById(R.id.webview3);
mWebView.setBackgroundColor(0);
if(mWebView.getBackground() != null){
mWebView.getBackground().setAlpha(0);
}
//声明WebSettings子类
WebSettings webSettings = mWebView.getSettings();

//如果访问的页面中要与Javascript交互,则webview必须设置支持Javascript
webSettings.setJavaScriptEnabled(true);

//设置自适应屏幕,两者合用
webSettings.setUseWideViewPort(true); //将图片调整到适合webview的大小
webSettings.setLoadWithOverviewMode(true); // 缩放至屏幕的大小

//缩放操作
webSettings.setSupportZoom(true); //支持缩放,默认为true。是下面那个的前提。
webSettings.setBuiltInZoomControls(true); //设置内置的缩放控件。若为false,则该WebView不可缩放
webSettings.setDisplayZoomControls(false); //隐藏原生的缩放控件

//其他细节操作
webSettings.setCacheMode(WebSettings.LOAD_NO_CACHE); //关闭webview中缓存
webSettings.setAllowFileAccess(true); //设置可以访问文件
webSettings.setJavaScriptCanOpenWindowsAutomatically(true); //支持通过JS打开新窗口
webSettings.setLoadsImagesAutomatically(true); //支持自动加载图片
webSettings.setDefaultTextEncodingName("utf-8");//设置编码格式
webSettings.setDomStorageEnabled(true);

mWebView.loadUrl("39.107.77.235:48085");
}

private void killApp() {
android.os.Process.killProcess(android.os.Process.myPid());
}

private void initHdmi(){
mHdmiCallback = new HdmiCallback();
try {
IHdmi service = IHdmi.getService(true);

service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
//JniCameraCall.openDevice();
// try {
// IHdmi service = IHdmi.getService(IHdmi.kInterfaceName,true);

// service.registerListener((IHdmiCallback)mHdmiCallback);
// } catch (RemoteException e) {
// e.printStackTrace();
// }
Log.d(TAG, "remove take pic button");
/*
Button takePictureButton = (Button) findViewById(R.id.btn_takepicture);
assert takePictureButton != null;
takePictureButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takePicture();
}
});
*/
createTextureView();
assert textureView != null;
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(RockchipCamera2.this,
Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED
&& ActivityCompat.checkSelfPermission(RockchipCamera2.this,
Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(RockchipCamera2.this,
new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE},
REQUEST_CAMERA_PERMISSION);
return;
}

fullScreen();
}

private void startHdmi(){
if (textureView == null) {
// JniCameraCall.openDevice();
try {
IHdmi service = IHdmi.getService(true);

service.registerListener((IHdmiCallback) mHdmiCallback);
} catch (RemoteException e) {
e.printStackTrace();
}
createTextureView();
}
startBackgroundThread();
}
}

+ 27
- 0
app/src/main/java/com/aispeech/nativedemo/rockchip/util/DataUtils.java Wyświetl plik

@@ -0,0 +1,27 @@
package com.aispeech.nativedemo.rockchip.util;

public class DataUtils {
public static final String INPUT_ID = "com.example.partnersupportsampletvinput/.SampleTvInputService/HW0";
public static final long LIMIT_DOUBLE_CLICK_TIME = 1000;
public static final long START_TV_REVIEW_DELAY = 500;
public static final long MAIN_REQUEST_SCREENSHOT_DELAYED = 1000;
public static final long MAIN_ENABLE_SETTINGS_DEALY = 1000;

public static final String EXTRA_ASSIGN_CAMERA_ID = "extra_assign_cameraid";

public static final int VIDEO_RECORD_BIT_RATE = 6000000;
public static final int VIDEO_RECORD_FRAME_RATE = 30;
public static final int AUDIO_RECORD_TOTAL_NUM_TRACKS = 1;
public static final int AUDIO_RECORD_BIT_RATE = 16;
public static final int AUDIO_RECORD_SAMPLE_RATE = 44100;

public static final String HDMIIN_AUDIO_PACKAGE_NAME = "com.rockchip.rkhdmiinaudio";
public static final String HDMIIN_AUDIO_CLS_NAME = "com.rockchip.rkhdmiinaudio.HdmiInAudioService";

public static final String STORAGE_PATH_NAME = "hdmiin";

public static final String PERSIST_HDMIRX_EDID = "persist.sys.hdmirx.edid";
public static final String HDMIRX_EDID_1 = "1";
public static final String HDMIRX_EDID_2 = "2";

}

+ 23
- 0
app/src/main/java/com/aispeech/nativedemo/rockchip/util/JniCameraCall.java Wyświetl plik

@@ -0,0 +1,23 @@
package com.aispeech.nativedemo.rockchip.util;

import android.util.Log;

public class JniCameraCall {
static {
Log.d("JNI" ,"JNI CAMERA CALL init");
System.loadLibrary("hdmiinput_jni");
}
/*
public static native int[] get(double x, double y);
public static native int[] getOther(double x, double y);
public static native boolean isSupportHDR();
public static native void setHDREnable(int enable);
public static native int[] getEetf(float maxDst, float minDst);
public static native int[] getOetf(float maxDst, float minDst);
public static native int[] getMaxMin(float maxDst, float minDst);
*/
public static native void openDevice();
public static native void closeDevice();
public static native int[] getFormat();
}


+ 95
- 0
app/src/main/java/com/aispeech/nativedemo/rockchip/util/SystemPropertiesProxy.java Wyświetl plik

@@ -0,0 +1,95 @@
/*
* Copyright (C) 2015 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package com.aispeech.nativedemo.rockchip.util;

import android.util.Log;

import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;

/**
* Proxy class that gives an access to a hidden API {@link android.os.SystemProperties#getBoolean}.
*/
public class SystemPropertiesProxy {
private static final String TAG = "SystemPropertiesProxy";

private SystemPropertiesProxy() {}

public static boolean getBoolean(String key, boolean def) throws IllegalArgumentException {
try {
Class SystemPropertiesClass = Class.forName("android.os.SystemProperties");
Method getBooleanMethod =
SystemPropertiesClass.getDeclaredMethod(
"getBoolean", String.class, boolean.class);
getBooleanMethod.setAccessible(true);
return (boolean) getBooleanMethod.invoke(SystemPropertiesClass, key, def);
} catch (InvocationTargetException
| IllegalAccessException
| NoSuchMethodException
| ClassNotFoundException e) {
Log.e(TAG, "Failed to invoke SystemProperties.getBoolean()", e);
}
return def;
}

public static int getInt(String key, int def) throws IllegalArgumentException {
try {
Class SystemPropertiesClass = Class.forName("android.os.SystemProperties");
Method getIntMethod =
SystemPropertiesClass.getDeclaredMethod("getInt", String.class, int.class);
getIntMethod.setAccessible(true);
return (int) getIntMethod.invoke(SystemPropertiesClass, key, def);
} catch (InvocationTargetException
| IllegalAccessException
| NoSuchMethodException
| ClassNotFoundException e) {
Log.e(TAG, "Failed to invoke SystemProperties.getInt()", e);
}
return def;
}

public static String getString(String key, String def) throws IllegalArgumentException {
try {
Class SystemPropertiesClass = Class.forName("android.os.SystemProperties");
Method getIntMethod =
SystemPropertiesClass.getDeclaredMethod("get", String.class, String.class);
getIntMethod.setAccessible(true);
return (String) getIntMethod.invoke(SystemPropertiesClass, key, def);
} catch (InvocationTargetException
| IllegalAccessException
| NoSuchMethodException
| ClassNotFoundException e) {
Log.e(TAG, "Failed to invoke SystemProperties.get()", e);
}
return def;
}

public static void set(String key, String value) throws IllegalArgumentException {
try {
Class SystemPropertiesClass = Class.forName("android.os.SystemProperties");
Method getIntMethod =
SystemPropertiesClass.getDeclaredMethod("set", String.class, String.class);
getIntMethod.setAccessible(true);
getIntMethod.invoke(SystemPropertiesClass, key, value);
} catch (InvocationTargetException
| IllegalAccessException
| NoSuchMethodException
| ClassNotFoundException e) {
Log.e(TAG, "Failed to invoke SystemProperties.set()", e);
}
}
}

+ 447
- 0
app/src/main/java/com/aispeech/nativedemo/rockchip/widget/RoundMenu.java Wyświetl plik

@@ -0,0 +1,447 @@
package com.aispeech.nativedemo.rockchip.widget;

import android.animation.Animator;
import android.animation.AnimatorListenerAdapter;
import android.animation.ArgbEvaluator;
import android.animation.ValueAnimator;
import android.content.Context;
import android.content.res.TypedArray;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Outline;
import android.graphics.Paint;
import android.graphics.Path;
import android.graphics.PathMeasure;
import android.graphics.Point;
import android.graphics.Rect;
import android.graphics.RectF;
import android.graphics.drawable.Drawable;
import android.util.AttributeSet;
import android.util.TypedValue;
import android.view.MotionEvent;
import android.view.View;
import android.view.ViewGroup;
import android.view.ViewOutlineProvider;
import android.view.animation.OvershootInterpolator;

import com.aispeech.nativedemo.R;
import com.aispeech.nativedemo.rockchip.util.DataUtils;

public class RoundMenu extends ViewGroup {
public static final int STATE_COLLAPSE = 0;//展开状态
public static final int STATE_EXPAND = 1;//收缩状态

private int collapsedRadius;//收缩时的半径
private int expandedRadius;//展开时的半径
private int mRoundColor;//收缩状态时的颜色 / 展开时外圈的颜色
private int mCenterColor;//展开时中心圆圈的颜色
private Drawable mCenterDrawable;//中心图标
private int mItemWidth;//子项的宽高
private float expandProgress = 0;//当前展开的进度(0-1)
private int state; //当前状态 (展开 / 收缩)
private int mDuration; //展开或收缩的动画时长
private int mItemAnimIntervalTime;//子View之间的动画间隔
private Point center;
private Paint mRoundPaint;
private Paint mCenterPaint;
private OvalOutline outlineProvider;
private ValueAnimator mExpandAnimator;
private ValueAnimator mColorAnimator;

private onStateListener mStateListener;
private boolean mCanListenerAnim;
private boolean mNeedExtend;
private long mLastClickTime;

private int mTempTransX = 10;

public RoundMenu(Context context) {
this(context, null);
}

public RoundMenu(Context context, AttributeSet attrs) {
this(context, attrs, 0);
}

public RoundMenu(Context context, AttributeSet attrs, int defStyleAttr) {
this(context, attrs, defStyleAttr, 0);
}

public RoundMenu(Context context, AttributeSet attrs, int defStyleAttr, int defStyleRes) {
super(context, attrs, defStyleAttr, defStyleRes);
init(context, attrs);
}

private void init(Context context, AttributeSet attrs) {
TypedArray ta = context.obtainStyledAttributes(attrs, R.styleable.RoundelMenu);
collapsedRadius = ta.getDimensionPixelSize(R.styleable.RoundelMenu_round_menu_collapsedRadius, dp2px(22));
expandedRadius = ta.getDimensionPixelSize(R.styleable.RoundelMenu_round_menu_expandedRadius, dp2px(84));
mRoundColor = ta.getColor(R.styleable.RoundelMenu_round_menu_roundColor, Color.GRAY);
mCenterColor = ta.getColor(R.styleable.RoundelMenu_round_menu_centerColor, Color.parseColor("#ffff8800"));
mDuration = ta.getInteger(R.styleable.RoundelMenu_round_menu_duration, 400);
mItemAnimIntervalTime = ta.getInteger(R.styleable.RoundelMenu_round_menu_item_anim_delay, 50);
mItemWidth = ta.getDimensionPixelSize(R.styleable.RoundelMenu_round_menu_item_width, dp2px(22));
ta.recycle();

if (collapsedRadius > expandedRadius) {
throw new IllegalArgumentException("expandedRadius must bigger than collapsedRadius");
}

mRoundPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mRoundPaint.setColor(mRoundColor);
mRoundPaint.setStyle(Paint.Style.FILL);
mCenterPaint = new Paint(Paint.ANTI_ALIAS_FLAG);
mCenterPaint.setColor(mRoundColor);
mCenterPaint.setStyle(Paint.Style.FILL);
setWillNotDraw(false);

outlineProvider = new OvalOutline();
setElevation(dp2px(5));
center = new Point();
mCenterDrawable = getResources().getDrawable(R.drawable.ic_close);
state = STATE_COLLAPSE;

initAnim();
}

private void initAnim() {
mExpandAnimator = ValueAnimator.ofFloat(0, 0);
mExpandAnimator.setInterpolator(new OvershootInterpolator());
mExpandAnimator.setDuration(mDuration);
mExpandAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
expandProgress = (float) animation.getAnimatedValue();
mRoundPaint.setAlpha(Math.min(255, (int) (expandProgress * 255)));

invalidateOutline();
invalidate();
}
});

mColorAnimator = ValueAnimator.ofObject(new ArgbEvaluator(), mRoundColor, mCenterColor);
mColorAnimator.setDuration(mDuration);
mColorAnimator.addUpdateListener(new ValueAnimator.AnimatorUpdateListener() {
@Override
public void onAnimationUpdate(ValueAnimator animation) {
mCenterPaint.setColor((Integer) animation.getAnimatedValue());
}

});
mColorAnimator.addListener(new AnimatorListenerAdapter() {
@Override
public void onAnimationCancel(Animator animation) {
super.onAnimationCancel(animation);
if (mCanListenerAnim && null != mStateListener) {
mStateListener.collapseEnd();
}
}

@Override
public void onAnimationEnd(Animator animation) {
super.onAnimationEnd(animation);
if (mCanListenerAnim && null != mStateListener) {
mStateListener.collapseEnd();
}
}
});
}


public float getExpandProgress() {
return expandProgress;
}

public void collapse(boolean animate) {
state = STATE_COLLAPSE;
for (int i = 0; i < getChildCount(); i++) {
getChildAt(i).setVisibility(View.GONE);
}
invalidate();
if (animate) {
startCollapseAnimation();
}
}


public void expand(boolean animate) {
state = STATE_EXPAND;
for (int i = 0; i < getChildCount(); i++) {
getChildAt(i).setVisibility(View.VISIBLE);
}
invalidate();
if (animate) {
startExpandAnimation();
} else {
for (int i = 0; i < getChildCount(); i++) {
getChildAt(i).setAlpha(1);
}
}
}

@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec) {
super.onMeasure(widthMeasureSpec, heightMeasureSpec);
int width = MeasureSpec.getSize(widthMeasureSpec);
int height = MeasureSpec.getSize(heightMeasureSpec);
setMeasuredDimension(width, height);
measureChildren(widthMeasureSpec, heightMeasureSpec);
}

@Override
protected void onLayout(boolean changed, int l, int t, int r, int b) {
if (getChildCount() == 0) {
return;
}
calculateMenuItemPosition();
for (int i = 0; i < getChildCount(); i++) {
View item = getChildAt(i);
item.layout(l + (int) item.getX(),
t + (int) item.getY(),
l + (int) item.getX() + item.getMeasuredWidth(),
t + (int) item.getY() + item.getMeasuredHeight());
}
}

@Override
protected void onFinishInflate() {
super.onFinishInflate();
for (int i = 0; i < getChildCount(); i++) {
View item = getChildAt(i);
item.setVisibility(View.GONE);
item.setAlpha(0);
item.setScaleX(1);
item.setScaleY(1);
}
}

@Override
protected void onAttachedToWindow() {
super.onAttachedToWindow();
mNeedExtend = true;
}

@Override
protected void onDetachedFromWindow() {
super.onDetachedFromWindow();
mNeedExtend = false;
}

@Override
public boolean onTouchEvent(MotionEvent event) {
Point touchPoint = new Point();
touchPoint.set((int) event.getX(), (int) event.getY());
int action = event.getActionMasked();
switch (action) {
case MotionEvent.ACTION_DOWN: {
long clickTime = System.currentTimeMillis();
if (clickTime - mLastClickTime < DataUtils.LIMIT_DOUBLE_CLICK_TIME) {
return super.onTouchEvent(event);
}
mLastClickTime = clickTime;
//计算触摸点与中心点的距离
double distance = getPointsDistance(touchPoint, center);
if (state == STATE_EXPAND) {
//展开状态下,如果点击区域与中心点的距离不处于子菜单区域
if (distance > (collapsedRadius + (expandedRadius - collapsedRadius) * expandProgress)
|| distance < collapsedRadius) {
collapse(true);//收起菜单
return true;
}
//展开状态下,如果点击区域处于子菜单区域,则不消费事件
return false;
} else {
//收缩状态下,如果点击区域处于中心圆圈范围内
if (distance < collapsedRadius) {
expand(true);//展开菜单
return true;
}
//收缩状态下,如果点击区域不在中心圆圈范围内,则不消费事件
return false;
}
}
}
return super.onTouchEvent(event);
}


@Override
protected void onSizeChanged(int w, int h, int oldw, int oldh) {
super.onSizeChanged(w, h, oldw, oldh);
setOutlineProvider(outlineProvider);
int x, y;
x = w / 2;
y = h / 2;
center.set(x, y);
//中心图标padding设为10dp
mCenterDrawable.setBounds(center.x + mTempTransX - (collapsedRadius - dp2px(10)),
center.y - (collapsedRadius - dp2px(10)),
center.x + mTempTransX + (collapsedRadius - dp2px(10)),
center.y + (collapsedRadius - dp2px(10))
);
}

@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
//绘制放大的圆
if (expandProgress > 0f) {
canvas.drawCircle(center.x + mTempTransX, center.y, collapsedRadius + (expandedRadius - collapsedRadius) * expandProgress, mRoundPaint);
}
//绘制中间圆
canvas.drawCircle(center.x + mTempTransX, center.y, collapsedRadius + (collapsedRadius * .2f * expandProgress), mCenterPaint);
int count = canvas.saveLayer(0, 0, getWidth(), getHeight(), null, Canvas.ALL_SAVE_FLAG);
//绘制中间的图标
canvas.rotate(90 * expandProgress, center.x + mTempTransX, center.y);
mCenterDrawable.draw(canvas);
canvas.restoreToCount(count);

if (mNeedExtend) {
mNeedExtend = false;
expand(true);
}
}

/**
* 展开动画
*/
void startExpandAnimation() {
mExpandAnimator.setFloatValues(getExpandProgress(), 1f);
mExpandAnimator.start();

mColorAnimator.setObjectValues(mColorAnimator.getAnimatedValue() == null ? mRoundColor : mColorAnimator.getAnimatedValue(), mCenterColor);
mCanListenerAnim = false;
mColorAnimator.start();

int delay = mItemAnimIntervalTime;
for (int i = 0; i < getChildCount(); i++) {
getChildAt(i).animate()
.setStartDelay(delay)
.setDuration(mDuration)
.alphaBy(0f)
.scaleXBy(0f)
.scaleYBy(0f)
.scaleX(1f)
.scaleY(1f)
.alpha(1f)
.start();
delay += mItemAnimIntervalTime;
}
}

/**
* 收缩动画
*/
void startCollapseAnimation() {
mExpandAnimator.setFloatValues(getExpandProgress(), 0f);
mExpandAnimator.start();

mColorAnimator.setObjectValues(mColorAnimator.getAnimatedValue() == null ? mCenterColor : mColorAnimator.getAnimatedValue(), mRoundColor);
mCanListenerAnim = true;
mColorAnimator.start();

int delay = mItemAnimIntervalTime;
for (int i = getChildCount() - 1; i >= 0; i--) {
getChildAt(i).animate()
.setStartDelay(delay)
.setDuration(mDuration)
.alpha(0)
.scaleX(0)
.scaleY(0)
.start();
delay += mItemAnimIntervalTime;
}
}


/**
* 计算每个子菜单的坐标
*/
private void calculateMenuItemPosition() {
float itemRadius = (expandedRadius + collapsedRadius) / 2f;
RectF area = new RectF(
center.x - itemRadius,
center.y - itemRadius,
center.x + itemRadius,
center.y + itemRadius);
Path path = new Path();
path.addArc(area, 90, 360);
PathMeasure measure = new PathMeasure(path, false);
float len = measure.getLength();
int divisor = getChildCount();
float divider = len / divisor;

for (int i = 0; i < getChildCount(); i++) {
float[] itemPoints = new float[2];
measure.getPosTan(i * divider + divider * 0.5f, itemPoints, null);
View item = getChildAt(i);
item.setX((int) itemPoints[0] - mItemWidth / 2);
item.setY((int) itemPoints[1] - mItemWidth / 2);
}
}

public int getState() {
return state;
}

public void setExpandedRadius(int expandedRadius) {
this.expandedRadius = expandedRadius;
requestLayout();
}


public void setCollapsedRadius(int collapsedRadius) {
this.collapsedRadius = collapsedRadius;
requestLayout();
}

public void setRoundColor(int color) {
this.mRoundColor = color;
mRoundPaint.setColor(mRoundColor);
invalidate();
}

public void setCenterColor(int color) {
this.mCenterColor = color;
mCenterPaint.setColor(color);
invalidate();
}

public class OvalOutline extends ViewOutlineProvider {

public OvalOutline() {
super();
}

@Override
public void getOutline(View view, Outline outline) {
int radius = (int) (collapsedRadius + (expandedRadius - collapsedRadius) * expandProgress);
Rect area = new Rect(
center.x - radius,
center.y - radius,
center.x + radius,
center.y + radius);
outline.setRoundRect(area, radius);
}
}


public static double getPointsDistance(Point a, Point b) {
int dx = b.x - a.x;
int dy = b.y - a.y;
return Math.sqrt(dx * dx + dy * dy);
}

public int dp2px(float dpVal) {
return (int) TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP, dpVal,
getContext().getResources().getDisplayMetrics());
}

public void setOnStateListener(onStateListener listener) {
mStateListener = listener;
}

public interface onStateListener {
public void collapseEnd();
}
}

+ 2
- 1
app/src/main/java/com/aispeech/nativedemo/shape/ShapeManager.java Wyświetl plik

@@ -33,7 +33,8 @@ import okhttp3.RequestBody;
public class ShapeManager {
private static final String TAG = "DDSManager";
private static ShapeManager mInstance;
private String url = "http://192.168.33.43:8999/RPC2";
private String url = "http://192.168.10.103:8999/RPC2";
// private String url = "http://192.168.10.103:8999";

private ShapeManager() {



+ 36
- 0
app/src/main/java/com/aispeech/nativedemo/utils/ResourceUtils.java Wyświetl plik

@@ -1,9 +1,15 @@
package com.aispeech.nativedemo.utils;

import android.content.Context;
import android.content.res.AssetManager;
import android.content.res.TypedArray;
import android.graphics.drawable.Drawable;

import java.io.BufferedInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;

public class ResourceUtils {
private static Context mContext;
@@ -43,5 +49,35 @@ public class ResourceUtils {
public static String[] getStringArrayRes(int resId) {
return mContext != null ? mContext.getResources().getStringArray(resId) : null;
}

/**
* 复制assets文件
* @param context
* @param assetName 资源名
* @param filePath 资源拷贝路径
*/
public static void copyFromAssets(Context context, String assetName, String filePath) {
AssetManager assetManager = context.getAssets();
try {
String[] list = assetManager.list("");
for (String item : list) {
if (item.equals(assetName)) {
FileOutputStream fileOutputStream = new FileOutputStream(new File(filePath, item));
BufferedInputStream bufferedInputStream = new BufferedInputStream(assetManager.open(item));
int len;
byte[] buf = new byte[1024];
while ((len = bufferedInputStream.read(buf)) > 0) {
fileOutputStream.write(buf, 0, len);
}
fileOutputStream.close();
bufferedInputStream.close();
}
}
} catch (IOException e) {
e.printStackTrace();
}
}


}

+ 44
- 8
app/src/main/java/com/aispeech/nativedemo/widget/AlertWindowView.java Wyświetl plik

@@ -49,6 +49,9 @@ import com.aispeech.nativedemo.utils.Utils;
import com.lenovo.lefacesdk.LefaceEngine;
import com.lenovo.lefacesdk.MultiAtt;

import org.json.JSONException;
import org.json.JSONObject;

import java.io.IOException;
import java.util.List;

@@ -167,9 +170,25 @@ public class AlertWindowView extends ConstraintLayout implements FaceManager.Upd
@Override
public void onClick(View v) {
// rebootDev();
selectType = 1;
reboot.setText("ChatGpt已选择");
open.setText("选择GLM-6B");
// selectType = 1;
// reboot.setText("ChatGpt已选择");
// open.setText("选择GLM-6B");
JSONObject jo = new JSONObject();
try {
jo.put("type", "djTtsText");
jo.put("data", "缔智元是一家缔造数字人员工的科技企业,致力于综合图像识别、自然语言交互、知识图谱、超写实3D渲染、物联网等前沿技术,助力企业的数字化与智能化变革");
WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
// JSONObject jo = new JSONObject();
// try {
// jo.put("type", "djBackground");
// jo.put("data", "https://www.baidu.com/img/PCtm_d9c8750bed0b3c7d089fa7d55720d6cf.png");
// WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
// } catch (JSONException e) {
// throw new RuntimeException(e);
// }
}
});
close = findViewById(R.id.close_camera);
@@ -184,9 +203,17 @@ public class AlertWindowView extends ConstraintLayout implements FaceManager.Upd
// str += skill.toString();
// }
// mWSManager.sendMsg(str);
// try {
// DDS.getInstance().getAgent().stopDialog();
// } catch (DDSNotInitCompleteException e) {
// throw new RuntimeException(e);
// }
JSONObject jo = new JSONObject();
try {
DDS.getInstance().getAgent().stopDialog();
} catch (DDSNotInitCompleteException e) {
jo.put("type", "animation");
jo.put("data", "waveHand");
WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
@@ -229,6 +256,14 @@ public class AlertWindowView extends ConstraintLayout implements FaceManager.Upd
// });
// }
// }).start();
JSONObject jo = new JSONObject();
try {
jo.put("type", "animation");
jo.put("data", "showRightHandFlat");
WebSocketManager.getInstance(MainActivity.instance).sendMsg(jo.toString());
} catch (JSONException e) {
throw new RuntimeException(e);
}
}
});
stopClient = findViewById(R.id.stop_client);
@@ -253,8 +288,10 @@ public class AlertWindowView extends ConstraintLayout implements FaceManager.Upd
}
});
mWebView = findViewById(R.id.webview);
// mWebView.setBackgroundColor(0);
// mWebView.getBackground().setAlpha(0);
mWebView.setBackgroundColor(0);
if(mWebView.getBackground() != null){
mWebView.getBackground().setAlpha(0);
}
//声明WebSettings子类
WebSettings webSettings = mWebView.getSettings();

@@ -435,7 +472,6 @@ public class AlertWindowView extends ConstraintLayout implements FaceManager.Upd
try {
Log.e(TAG, "-----------------detect face----------------");
textureBitmap = mTextureView.getBitmap();

List<MultiAtt> results = mFaceManager.detectMultiAtt(textureBitmap);
if (results != null && results.size() > 0) {
mFaceManager.filterPersonForFeatExtract(results);


+ 10
- 0
app/src/main/jni/Android.mk Wyświetl plik

@@ -0,0 +1,10 @@
LOCAL_PATH := $(call my-dir)

include $(CLEAR_VARS)
TARGET_PLATFORM := android-3
LOCAL_MODULE := hdmiinput_jni
LOCAL_SRC_FILES := $(LOCAL_PATH)/native.cpp
LOCAL_LDLIBS := -lm -llog
LOCAL_MODULE_TAGS := optional
LOCAL_ALLOW_UNDEFINED_SYMBOLS := true
include $(BUILD_SHARED_LIBRARY)

+ 2
- 0
app/src/main/jni/Application.mk Wyświetl plik

@@ -0,0 +1,2 @@
APP_STL := c++_static
APP_ABI := armeabi-v7as

+ 199
- 0
app/src/main/jni/native.cpp Wyświetl plik

@@ -0,0 +1,199 @@
/*
* Copyright (C) 2008 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

#define APK_VERSION "V1.3"
#define LOG_TAG "HdmiInput-navtive"
//#define LOG_NDEBUG 0

#include <android/log.h>
#include <jni.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
//#include <utils/Log.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <sys/ioctl.h>
#include <sys/mman.h>
#include <linux/videodev2.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <linux/v4l2-subdev.h>

//#define LOGE(msg,...) ALOGE("%s(%d): " msg ,__FUNCTION__,__LINE__,##__VA_ARGS__)
//#define LOGD(msg,...) ALOGD("%s(%d): " msg ,__FUNCTION__,__LINE__,##__VA_ARGS__)
//#define LOGV(msg,...) ALOGV("%s(%d): " msg ,__FUNCTION__,__LINE__,##__VA_ARGS__)

#define LOG(LEVEL,...) __android_log_print(LEVEL, LOG_TAG, __VA_ARGS__)
#define LOGI(...) LOG(ANDROID_LOG_INFO,__VA_ARGS__)
#define LOGV(...) LOG(ANDROID_LOG_VERBOSE,__VA_ARGS__)
#define LOGE(...) LOG(ANDROID_LOG_ERROR, __VA_ARGS__)
#define LOGD(...) LOG(ANDROID_LOG_DEBUG, __VA_ARGS__)

static int camFd;
static void getDeviceFormat(int *format)
{
struct v4l2_control control;
memset(&control, 0, sizeof(struct v4l2_control));
control.id = V4L2_CID_DV_RX_POWER_PRESENT;
int err = ioctl(camFd, VIDIOC_G_CTRL, &control);
if (err < 0) {
LOGV("Set POWER_PRESENT failed ,%d(%s)", errno, strerror(errno));
}

unsigned int noSignalAndSync = 0;
ioctl(camFd, VIDIOC_G_INPUT, &noSignalAndSync);
LOGV("noSignalAndSync ? %s",noSignalAndSync?"YES":"NO");

struct v4l2_dv_timings dv_timings;
memset(&dv_timings, 0 ,sizeof(struct v4l2_dv_timings));
err = ioctl(camFd, VIDIOC_SUBDEV_QUERY_DV_TIMINGS, &dv_timings);
if (err < 0) {
LOGV("Set VIDIOC_SUBDEV_QUERY_DV_TIMINGS failed ,%d(%s)", errno, strerror(errno));
}

format[0] = dv_timings.bt.width;
format[1] = dv_timings.bt.height;
format[2] = control.value && !noSignalAndSync;
}

static jintArray getFormat(JNIEnv *env, jobject thiz)
{
(void)thiz;
jintArray array = env->NewIntArray(3);
jint *result = new jint[3];
getDeviceFormat(result);
env->SetIntArrayRegion(array, 0, 3, result);
delete[] result;
return array;
}

static void openDevice(JNIEnv *env, jobject thiz)
{
(void)*env;
(void)thiz;

char video_name[64];
memset(video_name, 0, sizeof(video_name));
strcat(video_name, "/dev/v4l-subdev2");

camFd = open(video_name, O_RDWR);
if (camFd < 0) {
LOGE("open %s failed,erro=%s",video_name,strerror(errno));
} else {
LOGD("open %s success,fd=%d",video_name,camFd);
}
}

static void closeDevice(JNIEnv *env, jobject thiz)
{
(void)*env;
(void)thiz;
LOGD("close device");
if(camFd > 0) {
close(camFd);
}
}

static const char *classPathName = "com/android/rockchip/camera2/util/JniCameraCall";

static JNINativeMethod methods[] = {
{"getFormat", "()[I", (void *)getFormat},
{"openDevice", "()V", (void *)openDevice},
{"closeDevice", "()V", (void *)closeDevice},
};

/*
* Register several native methods for one class.
*/
static int registerNativeMethods(JNIEnv *env, const char *className,
JNINativeMethod *gMethods, int numMethods)
{
jclass clazz;

clazz = env->FindClass(className);
if (clazz == NULL)
{
LOGE("Native registration unable to find class '%s'", className);
return JNI_FALSE;
}
if (env->RegisterNatives(clazz, gMethods, numMethods) < 0)
{
LOGE("RegisterNatives failed for '%s'", className);
return JNI_FALSE;
}

return JNI_TRUE;
}

/*
* Register native methods for all classes we know about.
*
* returns JNI_TRUE on success.
*/
static int registerNatives(JNIEnv *env)
{
if (!registerNativeMethods(env, classPathName,
methods, sizeof(methods) / sizeof(methods[0])))
{
return JNI_FALSE;
}

return JNI_TRUE;
}

// ----------------------------------------------------------------------------

/*
* This is called by the VM when the shared library is first loaded.
*/

typedef union {
JNIEnv *env;
void *venv;
} UnionJNIEnvToVoid;

jint JNI_OnLoad(JavaVM *vm, void *reserved)
{
(void)reserved;
UnionJNIEnvToVoid uenv;
uenv.venv = NULL;
jint result = -1;
JNIEnv *env = NULL;

LOGI("JNI_OnLoad");
LOGI("Apk Version: %s", APK_VERSION);

if (vm->GetEnv(&uenv.venv, JNI_VERSION_1_4) != JNI_OK)
{
LOGE("ERROR: GetEnv failed");
goto bail;
}
env = uenv.env;

if (registerNatives(env) != JNI_TRUE)
{
LOGE("ERROR: registerNatives failed");
goto bail;
}

result = JNI_VERSION_1_4;

bail:
return result;
}


BIN
app/src/main/res/drawable/ic_close.png Wyświetl plik

Przed Po
Szerokość: 48  |  Wysokość: 48  |  Rozmiar: 1.9 KiB

+ 19
- 11
app/src/main/res/layout/activity_main.xml Wyświetl plik

@@ -4,22 +4,30 @@
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity">
tools:context=".MainActivity"
android:id="@+id/rootView">

<TextureView
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
android:rotation="90"
android:rotationY="180"
android:id="@+id/texture_hdmi"
android:layout_width="2000dp"
android:layout_height="1125dp" />

<WebView
android:id="@+id/webview"
android:layout_width="2160px"
android:layout_height="3800px" />

<com.aispeech.nativedemo.widget.CameraTextureView
android:id="@+id/texture"
android:layout_width="2048px"
android:layout_height="1536px" />

<WebView
android:id="@+id/webview"
android:layout_width="match_parent"
android:layout_height="match_parent"
app:layout_constraintTop_toBottomOf="@id/texture"
app:layout_constraintLeft_toLeftOf="@id/texture"
app:layout_constraintRight_toRightOf="@id/texture"
app:layout_constraintBottom_toBottomOf="@id/texture" />
android:layout_height="1536px"
app:layout_constraintTop_toBottomOf="@id/webview"/>

<ImageView
android:id="@+id/image"


+ 39
- 30
app/src/main/res/layout/activity_main_2.xml Wyświetl plik

@@ -6,80 +6,89 @@
android:layout_height="match_parent"
tools:context=".MainActivity2">

<TextureView
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
android:rotation="90"
android:rotationY="180"
android:id="@+id/texture_hdmi2"
android:layout_width="2000dp"
android:layout_height="1125dp" />

<WebView
android:id="@+id/webview2"
android:layout_width="1080px"
android:layout_height="1850px" />

<Button
android:id="@+id/close_camera"
android:id="@+id/close_camera2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="close"
android:text="动作:挥手"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
android:visibility="gone"/>
app:layout_constraintTop_toTopOf="parent" />

<Button
android:id="@+id/open_camera"
android:id="@+id/open_camera2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="open"
android:text="动作:右手平摊"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
android:visibility="gone"/>
app:layout_constraintTop_toTopOf="parent" />

<Button
android:id="@+id/reboot"
android:id="@+id/reboot2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="reboot"
android:text="动作:双手平摊"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent"
android:visibility="gone"/>
app:layout_constraintTop_toTopOf="parent" />

<Button
android:id="@+id/stop_web_socket"
android:id="@+id/stop_web_socket2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="stop server"
android:text="长文本"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
android:visibility="gone"/>
app:layout_constraintLeft_toLeftOf="parent" />

<Button
android:id="@+id/start_web_socket"
android:id="@+id/start_web_socket2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="start server"
android:text="短文本"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:visibility="gone"/>

<Button
android:id="@+id/kill_app"
android:id="@+id/kill_app2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="kill app"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:visibility="gone"/>
app:layout_constraintRight_toRightOf="parent" />

<Button
android:id="@+id/stop_client"
android:id="@+id/stop_client2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="stop client"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="@id/stop_web_socket"
app:layout_constraintRight_toRightOf="@id/kill_app"
android:visibility="gone"/>
app:layout_constraintLeft_toLeftOf="@id/stop_web_socket2"
app:layout_constraintRight_toRightOf="@id/kill_app2" />

<Button
android:id="@+id/start_client"
android:id="@+id/start_client2"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="start client"
android:text="短文本"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="@id/kill_app"
app:layout_constraintRight_toRightOf="@id/start_web_socket"
android:visibility="gone"/>
app:layout_constraintLeft_toLeftOf="@id/kill_app2"
app:layout_constraintRight_toRightOf="@id/start_web_socket2" />

</androidx.constraintlayout.widget.ConstraintLayout>

+ 27
- 0
app/src/main/res/layout/activity_main_chip.xml Wyświetl plik

@@ -0,0 +1,27 @@
<?xml version="1.0" encoding="utf-8"?>
<!--
~ Copyright (C) 2015 The Android Open Source Project
~
~ Licensed under the Apache License, Version 2.0 (the "License");
~ you may not use this file except in compliance with the License.
~ You may obtain a copy of the License at
~
~ http://www.apache.org/licenses/LICENSE-2.0
~
~ Unless required by applicable law or agreed to in writing, software
~ distributed under the License is distributed on an "AS IS" BASIS,
~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
~ See the License for the specific language governing permissions and
~ limitations under the License.
-->

<!-- Foreground is the transparent color. Without the foreground, UIs above SurfaceView
sometimes show some artifacts. -->
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:id="@+id/root_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:foreground="@android:color/transparent"
android:keepScreenOn="true">

</RelativeLayout>

+ 107
- 0
app/src/main/res/layout/activity_rockchip_camera2.xml Wyświetl plik

@@ -0,0 +1,107 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
xmlns:app="http://schemas.android.com/apk/res-auto"
android:id="@+id/root_view"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
tools:context=".rockchip.RockchipCamera2">

<TextureView
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintBottom_toBottomOf="parent"
android:rotation="90"
android:rotationY="180"
android:id="@+id/texture3"
android:layout_width="2000dp"
android:layout_height="1125dp" />
<!-- 1460 820-->

<!--<Button
android:id="@+id/btn_takepicture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="16dp"
android:layout_marginTop="16dp"
android:text="@string/take_picture" />-->

<WebView
android:id="@+id/webview3"
android:layout_width="1080px"
android:layout_height="1850px" />

<Button
android:id="@+id/close_camera3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="动作:挥手"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />

<Button
android:id="@+id/open_camera3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="动作:右手平摊"
app:layout_constraintRight_toRightOf="parent"
app:layout_constraintTop_toTopOf="parent" />

<Button
android:id="@+id/reboot3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="动作:双手平摊"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintTop_toTopOf="parent" />

<Button
android:id="@+id/stop_web_socket3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="长文本"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent" />

<Button
android:id="@+id/start_web_socket3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="短文本"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintRight_toRightOf="parent"
android:visibility="gone"/>

<Button
android:id="@+id/kill_app3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="kill app"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="parent"
app:layout_constraintRight_toRightOf="parent" />

<Button
android:id="@+id/stop_client3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="stop client"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="@id/stop_web_socket3"
app:layout_constraintRight_toRightOf="@id/kill_app3" />

<Button
android:id="@+id/start_client3"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="短文本"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintLeft_toLeftOf="@id/kill_app3"
app:layout_constraintRight_toRightOf="@id/start_web_socket3" />

</androidx.constraintlayout.widget.ConstraintLayout>

+ 12
- 0
app/src/main/res/values/attrs.xml Wyświetl plik

@@ -0,0 +1,12 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="RoundelMenu">
<attr name="round_menu_roundColor" format="color|reference" />
<attr name="round_menu_centerColor" format="color|reference" />
<attr name="round_menu_expandedRadius" format="dimension" />
<attr name="round_menu_collapsedRadius" format="dimension" />
<attr name="round_menu_duration" format="integer" />
<attr name="round_menu_item_anim_delay" format="integer" />
<attr name="round_menu_item_width" format="dimension" />
</declare-styleable>
</resources>

+ 16
- 0
app/src/main/res/values/themes.xml Wyświetl plik

@@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.RkCamera2" parent="Theme.MaterialComponents.DayNight.DarkActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_500</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/white</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_700</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

+ 3
- 21
build.gradle Wyświetl plik

@@ -1,24 +1,6 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
ext.kotlin_version = "1.3.72"
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.1.1"
classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:$kotlin_version"

// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}

allprojects {
repositories {
google()
jcenter()
}
plugins {
id 'com.android.application' version '7.2.1' apply false
id 'com.android.library' version '7.2.1' apply false
}

task clean(type: Delete) {


+ 4
- 3
gradle.properties Wyświetl plik

@@ -15,7 +15,8 @@ org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true
# Kotlin code style for this project: "official" or "obsolete":
kotlin.code.style=official
# Enables namespacing of each library's R class so that its R class includes only the
# resources declared in the library itself and none from the library's dependencies,
# thereby reducing the size of the R class for that library
android.nonTransitiveRClass=true

+ 2
- 3
gradle/wrapper/gradle-wrapper.properties Wyświetl plik

@@ -1,6 +1,5 @@
#Mon Feb 27 13:17:07 CST 2023
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-7.3.3-bin.zip
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip
zipStorePath=wrapper/dists

+ 15
- 0
settings.gradle Wyświetl plik

@@ -1,3 +1,18 @@
pluginManagement {
repositories {
gradlePluginPortal()
google()
mavenCentral()
}
}
dependencyResolutionManagement {
repositoriesMode.set(RepositoriesMode.FAIL_ON_PROJECT_REPOS)
repositories {
google()
mavenCentral()
}
}

include ':test'
include ':app'
rootProject.name = "nativedemo"

+ 7
- 7
test/build.gradle Wyświetl plik

@@ -23,17 +23,17 @@ android {

}

repositories {
flatDir {
dirs 'libs'//this way we can find the .aar file in libs folder
}
}
//repositories {
// flatDir {
// dirs 'libs'//this way we can find the .aar file in libs folder
// }
//}

dependencies {
implementation fileTree(dir: 'libs', include: ['*.jar', '*.aar'])
implementation fileTree(dir: 'libs', excludes: ['framework.jar'], include: ['*.jar', '*.aar'])

implementation 'androidx.appcompat:appcompat:1.3.1'
implementation 'com.google.android.exoplayer:exoplayer-core:2.6.0'
// implementation 'com.google.android.exoplayer:exoplayer-core:2.6.0'
testImplementation 'junit:junit:4.13.2'
androidTestImplementation 'androidx.test.ext:junit:1.1.3'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.4.0'


BIN
test/src/main/assets/vad_modify.bin Wyświetl plik


Ładowanie…
Anuluj
Zapisz