To configure the Android SDK, do the following:
Initialize settings, select cloud environment, and initialize SDK objects.
// Specify the cloud environment log-in credentials
// NOTE: replace USER_IDENTIFIER and PASSWORD with the user name and password that you have received from RN.
private static final String USER_IDENTIFIER = "obtain_from_RN";
private static final String PASSWORD = "obtain_from_RN";
private static final String DIRECTORY = "main";
// Set current mode/preset
private final ModeSettings.Identifier modeIdentifier = ModeSettings.Identifier.recognition;
@Override
public void onCreate(@Nullable Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//....
// Create the instance
settings = new Settings(getActivity().getApplication());
//
// Configuration
// settings.setUserIdentifier(USER_IDENTIFIER);
// settings.setUserPassword(PASSWORD);
// settings.setUserDirectory(DIRECTORY);
// settings.setCloudEnvironment(CloudEnvironment.fromId(CloudEnvironment.ID_PROD));
// To set a custom environment configuration
// URL coviServerUrl = new URL("https://...");
// URL eventServerUrl = new URL("https://...");
// URL objectServerUrl = new URL("https://...");
// URL rncvServerUrl = new URL("https://...");
// URL virgaServerUrl = new URL("https://...");
// settings.setCloudEnvironment(new CloudEnvironment(CloudEnvironment.ID_CUST, coviServerUrl, eventServerUrl, objectServerUrl, rncvServerUrl, virgaServerUrl));
}
@Nullable
@Override
public View onCreateView(@NonNull LayoutInflater inflater, @Nullable ViewGroup container, @Nullable Bundle savedInstanceState) {
//...
/**
* Initialize Object tracker
*/
objectTracker = new ObjectTracker(settings.objectTrackerConfiguration(modeIdentifier), getActivity(), settings);
objectTracker.setDelegate(this);
/**
* Initialize and set the face detector service.
*/
FaceDetectorConfiguration faceDetectorConfiguration = settings.faceDetectorConfiguration(modeIdentifier);
faceDetectorConfiguration.license = "YOUR_SAFR_LICENSE"; // If not set, attempts to use the RGB liveness detection feature will result in RuntimeException
//faceDetectorConfiguration.detectorModel = FDRuntimeOptions.FD_STANDARD; // Change detector model if needed
objectTracker.setFaceDetectorService(new EmbeddedFaceDetectorService(getContext(), faceDetectorConfiguration, FD_INSTANCES_COUNT));
/**
* Initialize RGB liveness action recognizer
*/
livenessActionRecognizer = new LivenessActionRecognizer(settings.livenessActionRecognizerConfiguration());
livenessActionRecognizer.setDelegate(this);
/**
* Initialize event reporting pipeline
*/
final ObjectEventDataStore dataStore = new ObjectEventDataStore(); // Create the event date store
objectEventLog = new ObjectEventLog(dataStore, ObjectEventLog.Context.live); // Initialize the objectEventLog with the data store with live context
final CloudEventStore cloudEventStore = new CloudEventStore(settings.cloudEventStoreConfiguration()); // Initialize the eventStore
peopleIndexer = new PeopleIndexer(settings.peopleIndexerConfig(modeIdentifier), cloudEventStore, objectEventLog, dataStore); //Initialize the peopleIndexer which orchestrates event processing
/**
* To play & process rtsp streams use aglView
*/
//initAglView();
/**
* Or play camera view.. check code for implementation
*/
initCameraPreview();
return view;
}
// Start/Stop peopleIndexer from onResume/OnPause callbacks
@Override
public void onResume() {
super.onResume();
peopleIndexer.startIndexing();
}
@Override
public void onPause() {
super.onPause();
peopleIndexer.stopIndexing();
}
Create the camera source and pass video frames to the object tracker.
/**
* Creates the camera.
*/
private void createCameraSource() {
//...
argusCameraSource = new CameraXSource(getActivity(), cameraXSourcePreview);
// Set ArgusCameraSourceCallback callback
argusCameraSource.setSourceCallback(this);
// Open camera with specific facing and profile
argusCameraSource.openCamera(facing, cp);
//...
}
The SDK provides CameraXSource and VisionCameraSource sources but the client app can provide its own camera implementation.
Hook camera to ObjectTracker in the callback.
@Override
public void cameraSourceOnFrameUpdate(@org.jetbrains.annotations.Nullable VideoFrame frame) {
// Pass camera frames to the objectTracker - never pass directly because the objectTracker uses its own threads.
videoFrame.reset();
videoFrame.shallowCopy(frame);
if (objectTracker != null) {
objectTracker.trackObjects(videoFrame);
}
}
@Override
public void cameraSourceOnGeometryChanged(float xScale, float yScale, int offsetLeft, int offsetTop, int cameraFacing) {
// Pass camera geometry info to a view which draws face info - this is essentially a mapping from camera to view coordinates
if (overlay != null) {
overlay.setCameraInfo(xScale, yScale, offsetLeft, offsetTop, cameraFacing);
}
}
@Override
public void cameraSourceOnCameraReady(@org.jetbrains.annotations.Nullable CameraProfile cameraProfile) {
// Tell the tracker to actually begin tracking once the camera is ready
if (objectTracker != null) {
objectTracker.beginTracking();
}
}
The startCameraSource() method should be invoked from Activity's onResume().
Process the results.
@Override
public void objectTrackerDidTrack(@NonNull ObjectTracker objectTracker, @NonNull TrackingResult trackingResult) {
// Process the tracking result
// Collect result
result.clear();
result.addAll(trackingResult.getAppeared());
result.addAll(trackingResult.getDisappeared());
result.addAll(trackingResult.getLingering());
result.addAll(trackingResult.getUpdated());
// Pass it to the overlay for display purposes
overlay.setTrackedObjects(result);
// Pass the tracking result to objectEventLog which logs and reports events to the cloud
if (objectEventlog != null) {
objectEventlog.update(trackingResult);
}
// Pass the tracking result to liveness recognizer
if (livenessActionRecognizer != null) {
livenessActionRecognizer.update(trackingResult);
}
}