@inproceedings{96f8c183bf5f4a0ab54d2b67cb20a0ec,
title = "From Markov Chains to Stochastic Games",
abstract = "Markov chains1 and Markov decision processes (MDPs) are special cases of stochastic games. Markov chains describe the dynamics of the states of a stochastic game where each player has a single action in each state. Similarly, the dynamics of the states of a stochastic game form a Markov chain whenever the players' strategies are stationary. Markov decision processes are stochastic games with a single player. In addition, the decision problem faced by a player in a stochastic game when all other players choose a fixed profile of stationary strategies is equivalent to an MDP.",
author = "Abraham Neyman",
year = "2003",
doi = "10.1007/978-94-010-0189-2\_2",
language = "אנגלית",
isbn = "978-94-010-0189-2",
series = "NATO science Series C: Mathematical and physical sciences",
publisher = "Springer",
pages = "9--25",
editor = "Abraham Neyman and Sylvain Sorin",
booktitle = "Stochastic Games and Applications",
address = "הולנד",
}