Stan  1.0
probability, sampling & optimization
pareto.hpp
Go to the documentation of this file.
1 #ifndef __STAN__PROB__DISTRIBUTIONS__PARETO_HPP__
2 #define __STAN__PROB__DISTRIBUTIONS__PARETO_HPP__
3 
4 #include <stan/agrad.hpp>
7 #include <stan/meta/traits.hpp>
9 #include <stan/prob/traits.hpp>
10 
11 
12 namespace stan {
13  namespace prob {
14 
15  // Pareto(y|y_m,alpha) [y > y_m; y_m > 0; alpha > 0]
16  template <bool propto,
17  typename T_y, typename T_scale, typename T_shape,
18  class Policy>
20  pareto_log(const T_y& y, const T_scale& y_min, const T_shape& alpha,
21  const Policy&) {
22  static const char* function = "stan::prob::pareto_log(%1%)";
23 
29 
30 
31  // check if any vectors are zero length
32  if (!(stan::length(y)
33  && stan::length(y_min)
34  && stan::length(alpha)))
35  return 0.0;
36 
37  // set up return value accumulator
38  double logp(0.0);
39 
40  // validate args (here done over var, which should be OK)
41  if (!check_not_nan(function, y, "Random variable", &logp, Policy()))
42  return logp;
43  if (!check_finite(function, y_min, "Scale parameter",
44  &logp, Policy()))
45  return logp;
46  if (!check_positive(function, y_min, "Scale parameter",
47  &logp, Policy()))
48  return logp;
49  if (!check_finite(function, alpha, "Shape parameter",
50  &logp, Policy()))
51  return logp;
52  if (!check_positive(function, alpha, "Shape parameter",
53  &logp, Policy()))
54  return logp;
55  if (!(check_consistent_sizes(function,
56  y,y_min,alpha,
57  "Random variable","Scale parameter","Shape parameter",
58  &logp, Policy())))
59  return logp;
60 
61  // check if no variables are involved and prop-to
63  return 0.0;
64 
65  VectorView<const T_y> y_vec(y);
66  VectorView<const T_scale> y_min_vec(y_min);
67  VectorView<const T_shape> alpha_vec(alpha);
68  size_t N = max_size(y, y_min, alpha);
69 
70  for (size_t n = 0; n < N; n++) {
71  if (y_vec[n] < y_min_vec[n])
72  return LOG_ZERO;
73  }
74 
75  // set up template expressions wrapping scalars into vector views
76  agrad::OperandsAndPartials<T_y,T_scale,T_shape> operands_and_partials(y, y_min, alpha);
77 
80  for (size_t n = 0; n < length(y); n++)
81  log_y[n] = log(value_of(y_vec[n]));
84  for (size_t n = 0; n < length(y); n++)
85  inv_y[n] = 1 / value_of(y_vec[n]);
87  log_y_min(length(y_min));
89  for (size_t n = 0; n < length(y_min); n++)
90  log_y_min[n] = log(value_of(y_min_vec[n]));
93  for (size_t n = 0; n < length(alpha); n++)
94  log_alpha[n] = log(value_of(alpha_vec[n]));
95 
98  for (size_t n = 0; n < length(alpha); n++)
99  inv_alpha[n] = 1 / value_of(alpha_vec[n]);
100 
102 
103  for (size_t n = 0; n < N; n++) {
104  const double alpha_dbl = value_of(alpha_vec[n]);
105  // log probability
107  logp += log_alpha[n];
109  logp += alpha_dbl * log_y_min[n];
111  logp -= alpha_dbl * log_y[n] + log_y[n];
112 
113  // gradients
115  operands_and_partials.d_x1[n] -= alpha_dbl * inv_y[n] + inv_y[n];
117  operands_and_partials.d_x2[n] += alpha_dbl / value_of(y_min_vec[n]);
119  operands_and_partials.d_x3[n] += 1 / alpha_dbl + log_y_min[n] - log_y[n];
120  }
121  return operands_and_partials.to_var(logp);
122  }
123 
124 
125  template <bool propto,
126  typename T_y, typename T_scale, typename T_shape>
127  inline
129  pareto_log(const T_y& y, const T_scale& y_min, const T_shape& alpha) {
130  return pareto_log<propto>(y,y_min,alpha,stan::math::default_policy());
131  }
132 
133  template <typename T_y, typename T_scale, typename T_shape,
134  class Policy>
135  inline
137  pareto_log(const T_y& y, const T_scale& y_min, const T_shape& alpha,
138  const Policy&) {
139  return pareto_log<false>(y,y_min,alpha,Policy());
140  }
141 
142  template <typename T_y, typename T_scale, typename T_shape>
143  inline
145  pareto_log(const T_y& y, const T_scale& y_min, const T_shape& alpha) {
146  return pareto_log<false>(y,y_min,alpha,stan::math::default_policy());
147  }
148 
149 
150  }
151 }
152 #endif
double value_of(const agrad::var &v)
Return the value of the specified variable.
var log(const var &a)
Return the natural log of the specified variable (cmath).
Definition: agrad.hpp:1730
boost::math::tools::promote_args< T_a, T_b >::type multiply_log(T_a a, T_b b)
double value_of(T x)
Return the value of the specified scalar argument converted to a double value.
bool check_not_nan(const char *function, const T_y &y, const char *name, T_result *result, const Policy &)
Checks if the variable y is nan.
bool check_consistent_sizes(const char *function, const T1 &x1, const T2 &x2, const char *name1, const char *name2, T_result *result, const Policy &)
bool check_positive(const char *function, const T_y &y, const char *name, T_result *result, const Policy &)
bool check_finite(const char *function, const T_y &y, const char *name, T_result *result, const Policy &)
Checks if the variable y is finite.
boost::math::policies::policy default_policy
Default error-handling policy from Boost.
return_type< T_y, T_scale, T_shape >::type pareto_log(const T_y &y, const T_scale &y_min, const T_shape &alpha, const Policy &)
Definition: pareto.hpp:20
Probability, optimization and sampling library.
Definition: agrad.cpp:6
size_t length(const T &x)
Definition: traits.hpp:111
size_t max_size(const T1 &x1, const T2 &x2)
Definition: traits.hpp:148
A variable implementation that stores operands and derivatives with respect to the variable.
VectorView< double *, is_vector< T1 >::value > d_x1
VectorView< double *, is_vector< T2 >::value > d_x2
T_return_type to_var(double logp)
VectorView< double *, is_vector< T3 >::value > d_x3
Metaprogram to determine if a type has a base scalar type that can be assigned to type double.
Definition: traits.hpp:39
Template metaprogram to calculate whether a summand needs to be included in a proportional (log) prob...
Definition: traits.hpp:33
boost::math::tools::promote_args< typename scalar_type< T1 >::type, typename scalar_type< T2 >::type, typename scalar_type< T3 >::type, typename scalar_type< T4 >::type, typename scalar_type< T5 >::type, typename scalar_type< T6 >::type >::type type
Definition: traits.hpp:368

     [ Stan Home Page ] © 2011–2012, Stan Development Team.