
Building your first Android AR Application :
In this section, we will build a simple Android AR application and render some 3D objects in 360 degree and placed into the real world.
Create Project
Let’s dive right in and begin by creating a new project with an empty activity.

Updating Manifest
Add the following lines in your AndroidManifest.xml file:
Add the OpenGL version in manifest.

Add Dependency
For 3D view, you can use below dependency add in to Build.gradle file,

Adding the SurfaceView
Surfaceview automatically handles your sessions and the runtime checks necessary for the application to work.
If ARCore has not been installed on the user’s device, This urges the user to install ARCore. Also, if camera permission is not granted, it asks for camera permission as well. Hence, ArFragment is the best way to start building your very first Android ARCore application.
Also it shows a 3D surfaceview.

Add this file in assets folder,

Now we have to create a CustomViewer class for managing 3D views.
class CustomViewer {
companion object {
init {
Utils.init()
}
}
private lateinit var choreographer: Choreographer
private lateinit var modelViewer: ModelViewer
fun loadEntity() {
choreographer = Choreographer.getInstance()
}
fun setSurfaceView(mSurfaceView: SurfaceView) {
modelViewer = ModelViewer(mSurfaceView)
mSurfaceView.setOnTouchListener(modelViewer)
modelViewer.scene.skybox = Skybox.Builder().build(modelViewer.engine)
modelViewer.scene.skybox?.setColor(1.0f, 1.0f, 1.0f, 1.0f) //White color
}
fun loadGlb(context: Context, name: String) {
val buffer = readAsset(context, "models/${name}.glb")
modelViewer.apply {
loadModelGlb(buffer)
transformToUnitCube()
}
}
fun loadGlb(context: Context, dirName: String, name: String) {
val buffer = readAsset(context, "models/${dirName}/${name}.glb")
modelViewer.apply {
loadModelGlb(buffer)
transformToUnitCube()
}
}
fun loadGltf(context: Context, name: String) {
val buffer = context.assets.open("models/${name}.gltf").use { input ->
val bytes = ByteArray(input.available())
input.read(bytes)
ByteBuffer.wrap(bytes)
}
modelViewer.apply {
loadModelGltf(buffer) { uri -> readAsset(context, "models/$uri") }
transformToUnitCube()
}
}
fun loadGltf(context: Context, dirName: String, name: String) {
val buffer = context.assets.open("models/${dirName}/${name}.gltf").use { input ->
val bytes = ByteArray(input.available())
input.read(bytes)
ByteBuffer.wrap(bytes)
}
modelViewer.apply {
loadModelGltf(buffer) { uri -> readAsset(context, "models/${dirName}/$uri") }
transformToUnitCube()
}
}
fun loadIndirectLight(context: Context, ibl: String) {
// Create the indirect light source and add it to the scene.
val buffer = readAsset(context, "environments/venetian_crossroads_2k/${ibl}_ibl.ktx")
KTXLoader.createIndirectLight(modelViewer.engine, buffer).apply {
intensity = 50_000f
modelViewer.scene.indirectLight = this
}
}
fun loadEnviroment(context: Context, ibl: String) {
// Create the sky box and add it to the scene.
val buffer = readAsset(context, "environments/venetian_crossroads_2k/${ibl}_skybox.ktx")
KTXLoader.createSkybox(modelViewer.engine, buffer).apply {
modelViewer.scene.skybox = this
}
}
private fun readAsset(context: Context, assetName: String): ByteBuffer {
val input = context.assets.open(assetName)
val bytes = ByteArray(input.available())
input.read(bytes)
return ByteBuffer.wrap(bytes)
}
private val frameCallback = object : Choreographer.FrameCallback {
private val startTime = System.nanoTime()
override fun doFrame(currentTime: Long) {
val seconds = (currentTime - startTime).toDouble() / 1_000_000_000
choreographer.postFrameCallback(this)
modelViewer.animator?.apply {
if (animationCount > 0) {
applyAnimation(0, seconds.toFloat())
}
updateBoneMatrices()
}
modelViewer.render(currentTime)
}
}
fun onResume() {
choreographer.postFrameCallback(frameCallback)
}
fun onPause() {
choreographer.removeFrameCallback(frameCallback)
}
fun onDestroy() {
choreographer.removeFrameCallback(frameCallback)
}
}
Output :

You can click on View In AR button,

That’s it! We have built a fully functional Android AR app.