Question

I'm trying to remove some duplication from this code, and have it easily support functions with more parameters.

How would you improve this code and allow for more complex functions?

Also, I'm worried about my key generation, some objects won't serialize to a string distinctly, and just return their typename, not a unique value. Suggestions?

Edit: I've used ChaosPandion's answer, and got it down to this

using System;
using System.Web.Caching;

public static class Memoize
{
    public static Cache LocalCache = System.Web.HttpRuntime.Cache ?? System.Web.HttpContext.Current.Cache;

    public static TResult ResultOf<TArg1, TResult>(Func<TArg1, TResult> func, long durationInSeconds, TArg1 arg1)
    {
        var key = HashArguments(func.Method.Name, arg1);
        return ResultOf(key, durationInSeconds, () => func(arg1));
    }

    public static TResult ResultOf<TArg1, TArg2, TResult>(Func<TArg1, TArg2, TResult> func, long durationInSeconds, TArg1 arg1, TArg2 arg2)
    {
        var key = HashArguments(func.Method.Name, arg1, arg2);
        return ResultOf(key, durationInSeconds, () => func(arg1, arg2));
    }

    public static TResult ResultOf<TArg1, TArg2, TArg3, TResult>(Func<TArg1, TArg2, TArg3, TResult> func, long durationInSeconds, TArg1 arg1, TArg2 arg2, TArg3 arg3)
    {
        var key = HashArguments(func.Method.Name, arg1, arg2, arg3);
        return ResultOf(key, durationInSeconds, () => func(arg1, arg2, arg3));
    }

    public static TResult ResultOf<TArg1, TArg2, TArg3, TArg4, TResult>(Func<TArg1, TArg2, TArg3, TArg4, TResult> func, long durationInSeconds, TArg1 arg1, TArg2 arg2, TArg3 arg3, TArg4 arg4)
    {
        var key = HashArguments(func.Method.Name, arg1, arg2, arg3, arg4);
        return ResultOf(key, durationInSeconds, () => func(arg1, arg2, arg3, arg4));
    }

    private static TResult ResultOf<TResult>(string key, long durationInSeconds, Func<TResult> func)
    {
        return LocalCache.Get(key) != null
                   ? (TResult)LocalCache.Get(key)
                   : CacheResult(key, durationInSeconds, func());
    }

    public static void Reset()
    {
        var enumerator = LocalCache.GetEnumerator();
        while (enumerator.MoveNext())
            LocalCache.Remove(enumerator.Key.ToString());
    }

    private static T CacheResult<T>(string key, long durationInSeconds, T value)
    {
        LocalCache.Insert(key, value, null, DateTime.Now.AddSeconds(durationInSeconds), new TimeSpan());
        return value;
    }

    private static string HashArguments(params object[] args)
    {
        if (args == null)
            return "noargs";

        var result = 23;
        for (var i = 0; i < args.Length; i++)
        {
            var arg = args[i];
            if (arg == null)
            {
                result *= (31 * i + 1);
                continue;
            }
            result *= (31 * arg.GetHashCode());
        }
        return result.ToString();
    }
}
Was it helpful?

Solution

Here is my version which removes quite a bit of duplication and should produce a much more reliable range of keys.

EDIT

My latest version produces a pretty reliable distribution. Take a look at the example I placed in the Main method.

class Program
{

    public static class Memoize
    {
        public static Cache LocalCache = 
            System.Web.HttpRuntime.Cache ?? System.Web.HttpContext.Current.Cache;

        public static TResult ResultOf<TArg1, TResult>(
            Func<TArg1, TResult> func, 
            TArg1 arg1, 
            long durationInSeconds)
        {
            var key = HashArguments(
                func.Method.DeclaringType.GUID,
                typeof(TArg1).GUID, 
                (object)arg1);
            return Complete(key, durationInSeconds, () => func(arg1));
        }

        public static TResult ResultOf<TArg1, TArg2, TResult>(
            Func<TArg1, TArg2, TResult> func, 
            TArg1 arg1, 
            TArg2 arg2, 
            long durationInSeconds)
        {
            var key = HashArguments(
                func.Method.DeclaringType.GUID,
                typeof(TArg1).GUID, 
                (object)arg1, 
                typeof(TArg2).GUID, 
                (object)arg2);
            return Complete(key, durationInSeconds, () => func(arg1, arg2));
        }

        public static TResult ResultOf<TArg1, TArg2, TArg3, TResult>(
            Func<TArg1, TArg2, TArg3, TResult> func, 
            TArg1 arg1, 
            TArg2 arg2, 
            TArg3 arg3, 
            long durationInSeconds)
        {
            var key = HashArguments(
                func.Method.DeclaringType.GUID,
                typeof(TArg1).GUID, 
                (object)arg1, 
                typeof(TArg2).GUID, 
                (object)arg2, 
                typeof(TArg3).GUID, 
                (object)arg3);
            return Complete(key, durationInSeconds, () => func(arg1, arg2, arg3));
        }

        public static void Reset()
        {
            var enumerator = LocalCache.GetEnumerator();
            while (enumerator.MoveNext())
                LocalCache.Remove(enumerator.Key.ToString());
        }

        private static T CacheResult<T>(string key, long durationInSeconds, T value)
        {
            LocalCache.Insert(
                key, 
                value, 
                null, 
                DateTime.Now.AddSeconds(durationInSeconds), 
                new TimeSpan());
            return value;
        }

        static T Complete<T>(string key, long durationInSeconds, Func<T> valueFunc)
        {
            return LocalCache.Get(key) != null
               ? (T)LocalCache.Get(key)
               : CacheResult(key, durationInSeconds, valueFunc());
        }

        static string HashArguments(params object[] args)
        {
            if (args == null)
                return "null args";

            int result = 23;
            for (int i = 0; i < args.Length; i++)
            {
                var arg = args[i];
                if (arg == null)
                {
                    result = 31 * result + (i + 1);
                    continue;
                }
                result = 31 * result + arg.GetHashCode();
            }
            return result.ToString();
        }
    }

    static int test(int a, int b)
    {
        return a + b;
    }

    private static class Inner
    {
        public static int test(int a, int b)
        {
            return a + b;
        }
    }

    static int test(int a, object b)
    {
        return a + (int)b;
    }

    static void Main(string[] args)
    {
        Memoize.ResultOf<int, int, int>(test, 1, 2, 100000);
        Memoize.ResultOf<int, int, int>(test, 2, 1, 100000);
        Memoize.ResultOf<int, int, int>(Inner.test, 1, 2, 100000);
        Memoize.ResultOf<int, int, int>(Inner.test, 2, 1, 100000);
        Memoize.ResultOf<int, object, int>(test, 1, 2, 100000);
        Memoize.ResultOf<int, object, int>(test, 2, 1, 100000);
    }
}

OTHER TIPS

The basic idea in ChaosPandion's code is correct, but there are some fatal flaws that must be fixed in order for this to work reliably.

1) Rather than using a hash of hashes, you need to make a genuinely unique key. One way would be to concatenate the string representations of each element in the parameter array, with a delimiter. This way, once a result has been memoized, it will consistently be retrieved, without false positives.

Chaos' trick with the GUID of the method is useful here, but this avoids the problem of depending on GetHashCode being truly unique as well as counting on a fairly simple hash of hashes to remain unique. Instead, the full string will get hashed, but there will be a full character-by-character comparison to rule out mismatches.

Admittedly, concatenating a big string is not particularly elegant and might even impact performance. Moreover, this still leaves you with the issue of classes whose ToString does not give instance-specific information. However, there are solutions for this based on reflection.

2) A very simple optimization would be to call LocalCache.Get just once in Complete. This is a potentially expensive operation, so there's really no excuse for doubling the cost.

Other than that, go with what ChaosPandion suggested.

Licensed under: CC-BY-SA with attribution
Not affiliated with StackOverflow
scroll top