The analysis of random replacement caches is an area that has recently attracted considerable attention in the field of probabilistic real-time systems. A major problem with performing static analysis on such a cache is that the relatively large number of successor states on a cache miss (equal to the cache associativity) renders approaches such as Collecting Semantics intractable. Other approaches must contend with non-trivial behaviours, such as the non-independence of accesses to the cache, which tends to lead to overly pessimistic or computationally expensive analyses. Utilising techniques from the field of Lossy Compression, where compactly representing large volumes of data without losing valuable data is the norm, this paper outlines a technique for applying compression to the Collecting Semantics of a Random Replacement Cache. This yields a Must and May analysis. Experimental evaluation shows that, with appropriate parameters, this technique is more accurate and significantly faster than current state-of-the-art techniques.

BibTex Entry

@inproceedings{Griffin2014,
 acmid = {2659809},
 address = {New York, NY, USA},
 articleno = {289},
 author = {David Griffin and Benjamin Lesage and Alan Burns and Robert I. Davis},
 booktitle = {Proceedings of the 22Nd International Conference on Real-Time Networks and Systems},
 doi = {10.1145/2659787.2659809},
 isbn = {978-1-4503-2727-5},
 link = {http://doi.acm.org/10.1145/2659787.2659809},
 location = {Versaille, France},
 numpages = {10},
 pages = {289:289--289:298},
 publisher = {ACM},
 series = {RTNS '14},
 title = {Static Probabilistic Timing Analysis of Random Replacement Caches Using Lossy Compression},
 year = {2014}
}