visit
pc — , Waterfall at Yosemite National Park (2015)
Code in the wild is always better to learn from than slide samples. Here I’ll be sharing code from to showcase a live Unidirectional Data Flow (UDF) pattern which makes the app more readable, easier to develop for, and debug.
The previous post, , outlines the strategy for loading a newsfeed and removing adjacent ads using UDF. This post shares both a basic live example, and a more complex structure to return multiple processes in one method using
MediatorLiveData
.By downloading the , you can run the code to explore, listen on-the-go, and watch the newsfeed of audiocast and video content. For access to features requiring authentication such as saving and dismissing content, you’ll need to implement the 5 easy-ish steps to set up a Firebase project outlined in .
pc—My Mom’s Kenya safari (1980s)
LiveData
provides a simple, Android lifecycle aware, toolset to handle automatically updating data for events.Loading, Content, Error Network Requests
Network requests returned with LCE states wrapped in LiveData objects ensure that the ViewModel business logic layer accounts for the loading, successful content loaded, and error states.
sealed class Lce<T> {
class Loading<T> : Lce<T>()
data class Content<T>(val packet: T) : Lce<T>()
data class Error<T>(val packet: T) : Lce<T>()
}
ContentSelected view event
In this first case when video content is selected in the Adapter it triggers a
ContentSelected
view event in the ContentFragment.kt. The event is processed in the ContentViewModel.kt. class ContentAdapter(...): PagedListAdapter<Content, ContentAdapter.ViewHolder>(DIFF_CALLBACK) {
val contentSelected: LiveData<Event<ContentSelected>> get() = _contentSelected
private val _contentSelected = MutableLiveData<Event<ContentSelected>>()
private fun createOnClickListener(content: Content) = OnClickListener { view ->
when (view.id) {
preview, contentTypeLogo -> _contentSelected.value =
Event(ContentSelected(view.getTag(ADAPTER_POSITION_KEY) as Int, content))
}
}
}
class ContentFragment : Fragment() {
private val viewEvent: LiveData<Event<ContentViewEvent>> get() = _viewEvent
private val _viewEvent = MutableLiveData<Event<ContentViewEvent>>()
private fun initAdapters() {
adapter = ContentAdapter(contentViewModel, _viewEvent).apply {
this.contentSelected.observe(viewLifecycleOwner, EventObserver { contentSelected ->
_viewEvent.value = Event(ContentSelected(
getAdapterPosition(contentSelected.position), contentSelected.content))
})
}
override fun onResume() {
super.onResume()
viewEvent.observe(viewLifecycleOwner, EventObserver { event ->
contentViewModel.processEvent(event)
})
}
Because the
contentType
is a video, the ViewModel updates the contentToPlay
LiveData value of the view state FeedViewState
with the ContentToPlay
object containing a video url. class ContentViewModel(application: Application) : AndroidViewModel(application) {
fun processEvent(event: ContentViewEvent) {
when (event) {
is ContentViewEvent.ContentSelected -> {
val contentSelected = ContentViewEvent.ContentSelected(event.position, event.content)
when (contentSelected.content.contentType) {
YOUTUBE -> {
setContentLoadingStatus(contentSelected.content.id, View.GONE)
_viewEffect.value = Event(NotifyItemChanged(contentSelected.position))
_feedViewState.value = _feedViewState.value?.copy(contentToPlay =
MutableLiveData<Event<ContentResult.ContentToPlay>>().apply {
this.value = Event(ContentResult.ContentToPlay(
contentSelected.position, contentSelected.content, "", ""))
})
}
}
}
}
}
}
data class FeedViewState(val contentToPlay: LiveData<Event<ContentResult.ContentToPlay>>, ...)
data class ContentToPlay(var position: Int, var content: Content, var filePath: String?,
val errorMessage: String) : Parcelable {...}
The Fragment observes the
FeedViewState
contentToPlay
value change, and launches the Fragment to play the video.class ContentFragment : Fragment() {
private fun observeViewState() {
contentViewModel.feedViewState.observe(viewLifecycleOwner, Observer { viewState ->
viewState.contentToPlay.observe(viewLifecycleOwner, EventObserver { contentToPlay ->
when (feedType) {
MAIN, DISMISSED ->
if (childFragmentManager.findFragmentByTag(CONTENT_DIALOG_FRAGMENT_TAG) == null)
ContentDialogFragment().newInstance(Bundle().apply {
putParcelable(CONTENT_TO_PLAY_KEY, contentToPlay)
}).show(childFragmentManager, CONTENT_DIALOG_FRAGMENT_TAG)
}
})
})
}
}
In addition to the view state being updated in the ViewModel above, a view effect,
NotifyItemChanged
is added and observed in the Fragment to update the RecyclerView cell that has been selected.
class ContentFragment : Fragment() {
private fun observeViewEffects() {
contentViewModel.viewEffect.observe(viewLifecycleOwner, EventObserver { effect ->
when (effect) {
is ContentViewEffect.NotifyItemChanged -> adapter.notifyItemChanged(effect.position)
}
})
}
}
PlayerLoad view event
In the first case the
ContentSelected
event is straightforward because there is a view event and one piece of data, the ContentToPlay
object, returned in the view state. The PlayerLoad
event created from the AudioFragment.kt, that retrieves the required information to play an audiocast, is more complicated because it requires two network requests. class AudioFragment : Fragment() {
private val viewEvent: LiveData<Event<ContentViewEvent>> get() = _viewEvent
private val _viewEvent = MutableLiveData<Event<ContentViewEvent>>()
override fun onCreate(savedInstanceState: Bundle?) {
...
contentToPlay = arguments!!.getParcelable(CONTENT_TO_PLAY_KEY)!!
if (savedInstanceState == null)
_viewEvent.value = Event(ContentViewEvent.PlayerLoad(
contentToPlay.content.id, contentToPlay.filePath!!,
contentToPlay.content.previewImage))
}
}
class ContentViewModel(application: Application) : AndroidViewModel(application) {
val playerViewState: LiveData<PlayerViewState> get() = _playerViewState
private val _playerViewState = MutableLiveData<PlayerViewState>()
fun processEvent(event: ContentViewEvent) {
when (event) {
is ContentViewEvent.PlayerLoad ->
_playerViewState.value = PlayerViewState(getContentPlayer(
event.contentId, event.filePath, event.previewImageUrl))
}
}
}
The
PlayerViewState
returned requires retrieving information from two sources for both the mp3 file to play audio, and a converted Bitmap image to display in the notification player.data class PlayerViewState(val contentPlayer: LiveData<Event<ContentResult.ContentPlayer>>)
sealed class ContentResult {
data class ContentPlayer(val uri: Uri, val image: ByteArray, val errorMessage: String): ContentResult(), Parcelable {...}
}
The ViewModel could achieve getting both pieces of information by making one network request, waiting for it to return successfully, then make the second request. However, this creates more complicated and nested code. In order for the ViewModel to observe LiveData from the network requests it must be observed by the UI layer in the AudioFragment. The first network request must be observed, then within that method, the second request needs to be observed. This creates nested
LCE
code within the ContentViewModel as well which gets ugly. A better solution is to return one LiveData object that contains both the mp3 and Bitmap image using .
combineLiveData
takes in two LiveData objects, getContentUri
returning the mp3, and bitmapToByteArray
returning the Bitmap image. combineLiveData
only returns the ContentPlayer
LiveData once each LiveData item passed into the method has been populated with info, indicated by an emitted boolean in the code below. class ContentViewModel(application: Application) : AndroidViewModel(application) {
private fun getContentPlayer(contentId: String, filePath: String, imageUrl: String) =
getContentUri(contentId, filePath).combineLiveData(bitmapToByteArray(imageUrl)) { a, b ->
Event(ContentResult.ContentPlayer(a.peekEvent().uri, b.peekEvent().image,
getLiveDataErrors(a, b)
))
}
/**
* Sets the value to the result of a function that is called when both `LiveData`s have data
* or when they receive updates after that.
*/
private fun <T, A, B> LiveData<A>.combineLiveData(other: LiveData<B>, onChange: (A, B) -> T) =
MediatorLiveData<T>().also { result ->
var source1emitted = false
var source2emitted = false
val mergeF = {
val source1Value = this.value
val source2Value = other.value
if (source1emitted && source2emitted)
result.value = onChange.invoke(source1Value!!, source2Value!!)
}
result.addSource(this) { source1emitted = true; mergeF.invoke() }
result.addSource(other) { source2emitted = true; mergeF.invoke() }
}
private fun getContentUri(contentId: String, filePath: String) =
switchMap(repository.getContentUri(contentId, filePath)) { lce ->
when (lce) {
is Lce.Loading -> MutableLiveData()
is Lce.Content -> MutableLiveData<Event<ContentResult.ContentUri>>().apply {
value = Event(ContentResult.ContentUri(lce.packet.uri, ""))
}
is Lce.Error -> {
Crashlytics.log(Log.ERROR, LOG_TAG, lce.packet.errorMessage)
MutableLiveData()
}
}
}
private fun bitmapToByteArray(url: String) = liveData {
emitSource(switchMap(repository.bitmapToByteArray(url)) { lce ->
when (lce) {
is Lce.Loading -> liveData {}
is Lce.Content -> liveData {
emit(Event(ContentResult.ContentBitmap(lce.packet.image, lce.packet.errorMessage)))
}
is Lce.Error -> liveData {
Crashlytics.log(Log.WARN, LOG_TAG,
"bitmapToByteArray error or null - ${lce.packet.errorMessage}")
}
}
})
}
private fun getLiveDataErrors(a: Event<ContentResult.ContentUri>, b: Event<ContentResult.ContentBitmap>) =
a.peekEvent().errorMessage.apply { if (this.isNotEmpty()) this }.apply {
b.peekEvent().errorMessage.also {
if (it.isNotEmpty()) this.plus(" " + it)
}
}
}
This allows for each of the two methods requiring a network request to handle their LCEs separately, and for one LiveData object to be observed from the
ContentPlayer
in the AudioFragment.class AudioFragment : Fragment() {
private lateinit var contentToPlay: ContentResult.ContentToPlay
private lateinit var contentViewModel: ContentViewModel
private fun observeViewState() {
contentViewModel.playerViewState.observe(viewLifecycleOwner, Observer { viewState ->
viewState?.contentPlayer?.observe(viewLifecycleOwner, EventObserver { contentPlayer ->
//Launch audiocast!
})
})
}
}
In addition to the live code I’ve created notes on and the pattern including documentation notes, videos, and samples.