Although phones these days have more memory than your last year laptop, mobile development is still a memory contrived environment.

Although Android is fairly lenient when it comes to the memory eaten by an application, it can still decide that you are using too much memory and early kill you or stop giving you more.

Even if you are lucky, another reason to optimize memory usage in your application is that, since both Android and Mono uses pause-based GC, the less time your GC has to be run, the less your application will be suspended to let the system reclaims memory.

This is particularly important if your application uses animations as they will be stopped during GC cleanup and make your application appears sluggish to the user. The same is true with ListView scrolling when you instantiate big objects for each row.

A case of big objects are Bitmap instances. Android has some optimization baked in for drawable resources but if your application needs to instantiate Bitmap manually with BitmapFactory (for example because you download them from the Internet) you will rapidly consume a lot of memory if you keep those around.

At the same time, you don’t want to have to toss these Bitmap instances completly as they can be expensive to fetch again (network cost). The solution is to serialize them to disk and only keep the “hot” ones in memory.

Unfortunately, although the Android documentation preconises this model, they don’t offer a default implementation in the framework and instead let you copy code from the samples included in the SDK.

Since I had a need for something similar, I cooked up a trimmed version of that cache with an added LRU queue on top to keep the most valuables Bitmap in memory. The code for the disk cache is given below:

using System;
using System.IO;
using System.Text;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Collections.Generic;

using Bitmap = Android.Graphics.Bitmap;
using Env = Android.OS.Environment;

namespace Foo
{
	public class DiskCache
	{
		enum JournalOp {
			Created = 'c',
			Modified = 'm',
			Deleted = 'd'
		}

		const string JournalFileName = ".journal";
		const string Magic = "MONOID";
		readonly Encoding encoding = Encoding.UTF8;
		string basePath;
		string journalPath;
		string version;
		Timer cleanupTimer;

		struct CacheEntry
		{
			public DateTime Origin;
			public TimeSpan TimeToLive;

			public CacheEntry (DateTime o, TimeSpan ttl)
			{
				Origin = o;
				TimeToLive = ttl;
			}
		}

		Dictionary<string, CacheEntry> entries = new Dictionary<string, CacheEntry> ();

		public DiskCache (string basePath, string version)
		{
			this.basePath = basePath;
			if (!Directory.Exists (basePath))
				Directory.CreateDirectory (basePath);
			this.journalPath = Path.Combine (basePath, JournalFileName);
			this.version = version;

			try {
				InitializeWithJournal ();
			} catch {
				Directory.Delete (basePath, true);
				Directory.CreateDirectory (basePath);
			}

			ThreadPool.QueueUserWorkItem (CleanCallback);
		}

		public static DiskCache CreateCache (Android.Content.Context ctx, string cacheName, string version = "1.0")
		{
			/*string cachePath = Env.ExternalStorageState == Env.MediaMounted
				|| !Env.IsExternalStorageRemovable ? ctx.ExternalCacheDir.AbsolutePath : ctx.CacheDir.AbsolutePath;*/
			string cachePath = ctx.CacheDir.AbsolutePath;

			return new DiskCache (Path.Combine (cachePath, cacheName), version);
		}

		void InitializeWithJournal ()
		{
			if (!File.Exists (journalPath)) {
				using (var writer = new StreamWriter (journalPath, false, encoding)) {
					writer.WriteLine (Magic);
					writer.WriteLine (version);
				}
				return;
			}

			string line = null;
			using (var reader = new StreamReader (journalPath, encoding)) {
				if (!EnsureHeader (reader))
					throw new InvalidOperationException ("Invalid header");
				while ((line = reader.ReadLine ()) != null) {
					try {
						var op = ParseOp (line);
						string key;
						DateTime origin;
						TimeSpan duration;

						switch (op) {
						case JournalOp.Created:
							ParseEntry (line, out key, out origin, out duration);
							entries.Add (key, new CacheEntry (origin, duration));
							break;
						case JournalOp.Modified:
							ParseEntry (line, out key, out origin, out duration);
							entries[key] = new CacheEntry (origin, duration);
							break;
						case JournalOp.Deleted:
							ParseEntry (line, out key);
							entries.Remove (key);
							break;
						}
					} catch {
						break;
					}
				}
			}
		}

		void CleanCallback (object state)
		{
			KeyValuePair<string, CacheEntry>[] kvps;
			lock (entries) {
				var now = DateTime.UtcNow;
				kvps = entries.Where (kvp => kvp.Value.Origin + kvp.Value.TimeToLive < now).Take (10).ToArray ();
				Android.Util.Log.Info ("DiskCacher", "Removing {0} elements from the cache", kvps.Length);
				foreach (var kvp in kvps) {
					entries.Remove (kvp.Key);
					try {
						File.Delete (Path.Combine (basePath, kvp.Key));
					} catch {}
				}
			}
		}

		bool EnsureHeader (StreamReader reader)
		{
			var m = reader.ReadLine ();
			var v = reader.ReadLine ();

			return m == Magic && v == version;
		}

		JournalOp ParseOp (string line)
		{
			return (JournalOp)line[0];
		}

		void ParseEntry (string line, out string key)
		{
			key = line.Substring (2);
		}

		void ParseEntry (string line, out string key, out DateTime origin, out TimeSpan duration)
		{
			key = null;
			origin = DateTime.MinValue;
			duration = TimeSpan.MinValue;

			var parts = line.Substring (2).Split (' ');
			if (parts.Length != 3)
				throw new InvalidOperationException ("Invalid entry");
			key = parts[0];

			long dateTime, timespan;

			if (!long.TryParse (parts[1], out dateTime))
				throw new InvalidOperationException ("Corrupted origin");
			else
				origin = new DateTime (dateTime);

			if (!long.TryParse (parts[2], out timespan))
				throw new InvalidOperationException ("Corrupted duration");
			else
				duration = TimeSpan.FromMilliseconds (timespan);
		}

		public void AddOrUpdate (string key, Bitmap bmp, TimeSpan duration)
		{
			key = SanitizeKey (key);
			lock (entries) {
				bool existed = entries.ContainsKey (key);
				using (var stream = new BufferedStream (File.OpenWrite (Path.Combine (basePath, key))))
					bmp.Compress (Bitmap.CompressFormat.Png, 100, stream);
				AppendToJournal (existed ? JournalOp.Modified : JournalOp.Created,
				                 key,
				                 DateTime.UtcNow,
				                 duration);
				entries[key] = new CacheEntry (DateTime.UtcNow, duration);
			}
		}

		public bool TryGet (string key, out Bitmap bmp)
		{
			key = SanitizeKey (key);
			lock (entries) {
				bmp = null;
				if (!entries.ContainsKey (key))
					return false;
				try {
					bmp = Android.Graphics.BitmapFactory.DecodeFile (Path.Combine (basePath, key));
				} catch {
					return false;
				}

				return true;
			}
		}

		void AppendToJournal (JournalOp op, string key)
		{
			using (var writer = new StreamWriter (journalPath, true, encoding)) {
				writer.Write ((char)op);
				writer.Write (' ');
				writer.Write (key);
				writer.WriteLine ();
			}
		}

		void AppendToJournal (JournalOp op, string key, DateTime origin, TimeSpan ttl)
		{
			using (var writer = new StreamWriter (journalPath, true, encoding)) {
				writer.Write ((char)op);
				writer.Write (' ');
				writer.Write (key);
				writer.Write (' ');
				writer.Write (origin.Ticks);
				writer.Write (' ');
				writer.Write ((long)ttl.TotalMilliseconds);
				writer.WriteLine ();
			}
		}

		string SanitizeKey (string key)
		{
			return new string (key
			                   .Where (c => (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9'))
			                   .ToArray ());
		}
	}
}

The three files making up the package can be found in this gist: https://gist.github.com/4323075.

The algorithm it’s based on is very simple and if you know how filesystem/databases work you will feel at home. The disk cache keeps a journal composed of lines of operation (create, delete, modify) associated with keyname and optionally timestamps.

When you do an operation on the cache, the content you are pushing is first serialized to disk and then an entry is created in the journal to commit the action. This order ensures that even in the unlikely case your application crashes in the middle, a given Bitmap content will never be corrupted (it will simply seem as if it didn’t exist).

When the application is restarted, the cache will open that journal and replay all the operations to arrive at the same internal state it had beforehand. To account for out-of-date entries, a thread is also spawned when the cache is re-created to cleanup files that have expired.

The code I’m giving is simpler and less feature-full than the original Java code but it suits my need and is much shorter. Still, among possible additional features you could add:

  • Journal trimming: for when the journal files grows too large, filter out ops line that cancel each other i.e. when encountering a delete operation, you can filter out all the previous create and modify operations on that same key.
  • Fsync support: for those who are really religious about consistency, you could add proper barrier to prevent the filesystem from not following the code order and writing out the log file before the Bitmap content.
  • More generic and better memory usage: see Jonathan comment in this gist