Markov chain model

A Markov chain is a system that has at least two states. The system switches at random between these states. I would like to define a Markov chain for a stock – AAPL (disclaimer I own AAPL shares). Let’s say that we have the states flat F, up U and down D. We can determine the states based on end of day close prices.

#!/usr/bin/env python

from matplotlib.finance import quotes_historical_yahoo

from datetime import date

import numpy

import sys

today = date.today()

start = (today.year - 1, today.month, today.day)

quotes = quotes_historical_yahoo('AAPL', start, today)

close =  [q[4] for q in quotes]

states = numpy.sign(numpy.diff(close))

NDIM = 3

SM = numpy.zeros((NDIM, NDIM))

signs = [-1, 0, 1]

k = int(1)

for i in xrange(len(signs)):

   #we start the transition from the state with the specified sign

   start_indices = numpy.where(states[:-1] == signs[i])[0]

   N = len(start_indices) + k * NDIM

   # skip since there are no transitions possible

   if N == 0:

      continue

  

   #find the values of states at the end positions

   end_values = states[start_indices + 1]

   for j in xrange(len(signs)):

      # number of occurrences of this transition

      occurrences = len(end_values[end_values == signs[j]])

      SM[i][j] = (occurrences + k)/float(N)

print SM

eig_out = numpy.linalg.eig(SM)

print eig_out

idx_vec = numpy.where(numpy.abs(eig_out[0] - 1) < 0.1)

print "Index eigenvalue 1", idx_vec

x = eig_out[1][:,idx_vec].flatten()

print "Steady state vector", x

print "Check", numpy.dot(SM, x)

[[ 0.54464286  0.00892857  0.44642857]

 [ 0.33333333  0.33333333  0.33333333]

 [ 0.35664336  0.00699301  0.63636364]]

(array([ 1.        ,  0.18832641,  0.32601342]), array([[ 0.57735027,  0.73359882,  0.00383995],

       [ 0.57735027, -0.35679605, -0.99982852],

       [ 0.57735027, -0.57838513,  0.01811603]]))

Index eigenvalue 1 (array([0]),)

Steady state vector [ 0.57735027  0.57735027  0.57735027]

Check [ 0.57735027  0.57735027  0.57735027]