Skip to content

Instantly share code, notes, and snippets.

@Nathaniel100
Created May 27, 2016 10:21
Show Gist options
  • Save Nathaniel100/6dbedf0c24b1e4748bea395ef7c10bf9 to your computer and use it in GitHub Desktop.
Save Nathaniel100/6dbedf0c24b1e4748bea395ef7c10bf9 to your computer and use it in GitHub Desktop.
Based on Volley Cache

Cache

Based on Volley Cache

Cache interface

/**
 * An interface for a cache keyed by a String with a byte array as data
 */
interface Cache {
	public Entry get(String key);
	public void put(String key, Entry entry);
	public void initialize();
	public void invalidate(String key, boolean fullExpire);
	public void remove(String key);
	public void clear();

	public static class Entry {
		public byte[] data;
		public String etag;
		public long serverDate;
		public long lastModified;
		public long ttl
		public long softTtl;
		public Map<String, String> responseHeaders = Collections.emptyMap();

		public boolean isExpired() {
			return ttl < System.currentTimeMillis();
		}

		public boolean refreshNeeded() {
			return softTtl < System.currentTimeMillis();
		}
	}
}

DiskBasedCache

Cache implementation that caches files directly onto the hard disk in the specified directory. The default disk usage size is 5MB, but is configurable.

public class DiskBasedCache implements Cache {
	private static final long DEFAULT_LIMIT_SIZE = 5 * 1024 * 1024; // 5 Mb
	private static final int CACHE_MAGIC = 0x20150306;
	private static final float HYSTERESIS_FACTOR = .9f;

	private final File rootDirectory;
	private final long limitSize;
	private final long totalSize = 0;
	private Map<String, CacheHeader> entries = new LinkedHashMap<>(16, .75f, true);

	public DiskBasedCache(File rootDirectory) {
		this(rootDirectory, DEFAULT_LIMIT_SIZE);
	}

	public DiskBasedCache(File rootDirectory, long limitSize) {
		this.rootDirectory = rootDirectory;
		this.limitSize = limitSize;
	}

	@Override
	public synchronized Entry get(String key) {
		CacheHeader entry = entries.get(key);
		if(entry == null) {
			return null;
		}
		File file = getFileForKey(key);
		CountingInputStream is = null; 
		try {
			is = new CountingInputStream(new BufferedInputStream(new FileInputStream(file)));
			CacheHeader.readHeader(is);
			byte[] data = streamToBytes(is, (int)(file.length() - is.bytesRead));
			return entry.toCacheEntry(data);
		} catch(IOException | NegativeArraySizeException e) {
			Log.d("%s: %s", file.getAbsolutePath(), e.toString());
			remove(key);
			return null;
		} finally {
			if(is != null) {
				try {
					is.close();
				} catch(IOException ignored) {}
			}
		}
	}

	@Override
	public synchronized void put(String key, Entry entry) {
		pruneIfNeeded(entry.data.length);
		File file = getFileForKey(key);
		try {
			BufferedOutputStream os = new BufferedOutputStream(new FileOutputStream(file));
			CacheHeader e = new CacheHeader(key, entry);
			boolean success = e.writeHeader(os);
			if(!success) {
				os.close();
				Log.d("Failed to write header for %s", file.getAbsolutePath());
				throw new IOException();
			}
			os.write(entry.data);
			os.close();
			putEntry(key, e);
			return;
		} catch(IOException e) {
		}
		boolean deleted = file.delete();
		if(!deleted) {
			Log.d("Could not clean up file %s", file.getAbsolutePath());
		}
	}

	@Override
	public synchronized void initialize() {
		if(!rootDirectory.exists()) {
			if(!rootDirectory.mkdirs()) {
				Log.e("Unable to create cache dir: %s", rootDirectory.getAbsolutePath());
			}
		}

		File[] files = rootDirectory.listFiles();
		if(files == null) {
			return;
		}
		for(File file : files) {
			InputStream is = null; 
			try {
				is = new BufferedInputStream(new FileInputStream(file));
				CacheHeader entry = CacheHeader.readHeader(is);
				entry.size = file.length();
				putEntry(entry.key, entry);
			} catch(IOException e) {
				if(file != null) file.delete();
			} finally {
				if(is != null) {
					try {
						is.close();
					} catch(IOException ignored) {}
				}
			}
		}
	}

	@Override
	public synchronized void invalidate(String key, boolean fullExpire) {
		CacheHeader entry = entries.get(key);
		if(entry != null) {
			entry.softTtl = 0;
			if(fullExpire) {
				entry.ttl = 0;
			}
			put(key, entry);
		}
	}

	@Override
	public synchronized void remove(String key) {
		boolean deleted = getFileForKey(key).delete();
		removeEntry(key);
		if(!deleted) {
			Log.d("Could not delete cache entry for key=%s, filename=%s", key, getFilenameForKey(key));
		}
	}

	@Override
	public synchronized void clear() {
		File[] files = rootDirectory.listFiles();
		if (files != null) {
			for(File file : files) {
				file.delete();
			}
		}
		entries.clea();
		totalSize = 0;
		Log.d("Cache cleared");
	}

	private String getFileNameForKey(String key) {
		int firstHalfLength = key.length() / 2;
		String localFileName = String.valueOf(key.substring(0, firstHalfLength).hashCode());
		localFileName += String.valueOf(key.substring(firstHalfLength).hashCode());
		return localFileName;
	}

	private File getFileForKey(String key) {
		return new File(rootDirectory, getFileNameForKey(key));
	}

	private void removeEntry(String key) {
		CacheHeader entry = entries.get(key);
		if(entry != null) {
			totalSize -= entry.size;
			entries.remove(key);
		}
	}

	private void putEntry(String key, EntryHeader entry) {
		if(entries.contains(key)) {
			totalSize += (entry.size - entries.get(key).size);
		}  else {
			totalSize += entry.size;
		}
		entries.put(key, entry);
	}

	private void pruneIfNeeded(int neededSpace) {
		if(totalSize + neededSpace < limitSize) {
			return;
		}
		Log.v("Pruning old cache entries");
		long before = totalSize;
		int prunedFiles = 0;
		long startTime = SystemClock.elapseRealtime();

		Iterator<Map.Entry<String, CacheHeader>> iterator = entries.entrySet().iterator();
		while(iterator.hasNext()) {
			Map.Entry<String, CacheHeader> entry = iterator.next();
			CacheHeader e = entry.getValue();
			File file = getFileForKey(e.key);
			boolean deleted = file.delete();
			if(deleted) {
				totalSize -= e.size;
			} else {
				Log.d("Could not clean up file %s", file.getAbsolutePath());
			}
			iterator.remove();
			prunedFiles++;

			if(totalSize + neededSpace < limitSize * HYSTERESIS_FACTOR) {
				break;
			}
		}
		Log.v("pruned %d files, %d bytes, %d ms",
		 prunedFiles, (totalSize - before), SystemClock.elapseRealtime() - startTime);
	}

	private static class CountingInputStream extends FilterInputStream {
		private int bytesRead = 0;

		private CountingInputStream(InputStream in) {
			super(in);
		}

		@Override
		public int read() throws IOException {
			int result = super.read();
			if(result != -1) {
				bytesRead++;
			}
			return result;
		}

		@Override
		public int read(byte[] buffer, int offset, int count) throws IOException {
			int result = super.read(buffer, offset, count);
			if(result != -1) {
				bytesRead += result;
			}
			return result;
		}
	}

	private static byte[] streamToBytes(InputStream in, int length) throws IOException {
		byte[] bytes = new byte[length];
		int readLength = 0;
		int pos = 0;
		int left = length;
		while(left > 0 && (readLength = in.read(bytes, pos, left)) != -1) {
			pos += readLength;
			left -= readLength;
		}
		if (pos != length) {
			throw new IOException("Excepted " + length + " bytes, read " + pos + " bytes");
		}
		return bytes;
	}

	static class CacheHeader {
		public long size;
		public String key;
		public String etag;
		public long serverDate;
		public long lastModified;
		public long ttl;
		public long softTtl;
		public Map<String, String> responseHeaders;

		private CacheHeader() { }

		public CacheHeader(String key, Entry entry) {
			this.key = key;
			this.size = entry.data.length;
			this.etag = entry.etag;
			this.serverDate = entry.serverDate;
			this.lastModified = entry.lastModified;
			this.ttl = entry.ttl;
			this.softTtl = entry.softTtl;
			this.responseHeaders = entry.responseHeaders;
		}


	    public static CacheHeader readHeader(InputStream is) throws IOException {
	    	CacheHeader entry = new CacheHeader();
	    	int magic = readInt(is);
	    	if(magic != CACHE_MAGIC) {
	    		throw new IOException();
	    	}
	    	entry.key = readString(is);
	    	entry.etag = readString(is);
	    	if(entry.etag.equals("")) {
	    		entry.etag = null;
	    	}
	    	entry.serverDate = readLong(is)
	    	entry.lastModified = readLong(is);
	    	entry.ttl = readLong(is);
	    	entry.softTtl = readLong(is);
	    	entry.responseHeaders = readStringMap(is);
	    	return entry;
	    }

	    public boolean writeHeader(OutputStream os) {
	    	try {
	    		writeInt(os, CACHE_MAGIC);
	    		writeString(os, key);
	    		writeString(os, etag == null ? "" : etag);
	    		writeLong(os, serverDate);
	    		writeLong(os, lastModified);
	    		writeLong(os, ttl);
	    		writeLong(os, softTtl);
	    		writeStringMap(os, responseHeaders);
	    		os.flush();
	    		return true;
    		} catch(IOException e) {
    			Log.e("%s", e.toString());
    			return false;
    		}
	    }

	    public Entry toCacheEntry(byte[] data) {
	    	Entry e = new Entry();
	    	e.data = data;
	    	e.etag = etag;
	    	e.serverDate = serverDate;
	    	e.lastModified = lastModified;
	    	e.ttl = ttl;
	    	e.softTtl = softTtl;
	    	e.responseHeaders = responseHeaders;
	    }
	}

    private static void writeInt(OutputStream os, int n) throws IOException {
    	os.write((n >> 0) & 0xff);
    	os.write((n >> 8) & 0xff);
    	os.write((n >> 16) & 0xff);
    	os.write((n >> 24) & 0xff);
    }

    private static void writeLong(OutputStream os, long n) throws IOException {
    	os.write((byte)(n >>> 0));
    	os.write((byte)(n >>> 8));
    	os.write((byte)(n >>> 16));
    	os.write((byte)(n >>> 24));
    	os.write((byte)(n >>> 32));
    	os.write((byte)(n >>> 40));
    	os.write((byte)(n >>> 48));
    	os.write((byte)(n >>> 56));
    }

    private static void writeString(OutputStream os, String s) throws IOException {
    	byte[] bytes = s.getBytes("UTF-8");
    	writeLong(os, bytes.length);
    	os.write(bytes, 0, bytes.length);
    }

    private static void writeStringMap(OutputStream os, Map<String, String> m) throws IOException {
    	if(m != null) {
    		writeInt(os, m.size());
    		for(Map.Entry<String, String> entry : m.entrySet()) {
    			writeString(os, entry.getKey());
    			writeString(os, entry.getValue());
    		}
		} else {
			writeInt(os, 0);
		}
    }

	private static int read(InputStream is) throws IOException {
		int n = is.read();
		if( n == -1) {
			throw new IOException();
		}
		return n;
	}
    
    private static int readInt(InputStream is) throws IOException {
    	int n = 0;
    	n |= (read(is) << 0);
    	n |= (read(is) << 8);
    	n |= (read(is) << 16);
    	n |= (read(is) << 24);
    	return n;
    }

    private static long readLong(InputStream is) throws IOException {
    	long n = 0;
    	n |= ((read(is) & 0xFFL) << 0);
    	n |= ((read(is) & 0xFFL) << 8);
    	n |= ((read(is) & 0xFFL) << 16);
    	n |= ((read(is) & 0xFFL) << 24);
    	n |= ((read(is) & 0xFFL) << 32);
    	n |= ((read(is) & 0xFFL) << 40);
    	n |= ((read(is) & 0xFFL) << 48);
    	n |= ((read(is) & 0xFFL) << 56);
    	return n;
    }

    private static String readString(InputStream is) throws IOException {
    	int n = (int) readLong(is);
    	byte[] bytes = streamToBytes(is, n);
    	return new String(bytes, "UTF-8");
    }

    private static Map<String, String> readStringMap(InputStream is) throws IOException {
    	int n = readInt(is);
    	Map<String, String> m = (n == 0) 
    					? Collections.<String, String>emptyMap() 
    					: new HashMap<String, String>(size);
    	for(int i = 0; i < n; ++i) {
    		String key = readString(is).intern();
    		String value = readString(is).intern();
    		m.put(key, value);
    	}
    	return m;
    }

}
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment