Statistical Ideas

Thursday, February 16, 2006

Fitting exponential power (Subbotin, 1923) distribution SAS/R

/* R code ******************************************************************/
library(MASS)
dep<- function(x,location,scale,tail) # Exponential power (=Subbotin)
{
const<- 2*tail^(1/tail-1) *gamma(1/tail)
z<- (x-location)/scale
exp(-abs(z)^tail/tail)/(scale*const)
}

set.seed(321)
x <- rt(100,df=3)
write.csv(x,file="c:\\temp\\x.csv")
fitdistr(x, dep, start=list(location=0, scale=1, tail=1.5))

/* SAS Code **************************************************************/
proc import datafile="c:\temp\x.csv" out=x; run;
proc nlmixed data=x;
const = 2*tail**(1/tail-1) *gamma(1/tail);
z = (x-location)/scale;
like = exp(-abs(z)**tail/tail)/(scale*const);
loglike = log(like);

model x ~ general(loglike);
run;

Friday, February 10, 2006

SAS shell for iterative analysis

/* Use the following data step to generate a set of random
normal variables with correlation 'rho' between any two
of them.*/

data a;
rho = .5;
c = (rho/(1-rho))**0.5;
do i = 1 to 100;
d = c * rannor(1810);
y = rannor(0) + d;
x1 = rannor(0) + d;
x2 = rannor(0) + d;
x3 = rannor(0) + d;
output;
end;
keep y x1 x2 x3 x4 x5 x6 x7 x8 x9 x10;

proc datasets; delete results pe; quit;

%macro mm(iv=);

%let i=1;
%let ivar= %scan(&iv,&i);
%do %while (&ivar NE);

proc reg data=a;
model y = &ivar;
ods output ParameterEstimates=pe;
quit;

proc datasets; append base=results data=pe; quit;

%let i=%eval(&i + 1);
%let ivar=%scan(&iv,&i);
%end;

%mend;

%mm(iv=x1 x2 x3);

Monday, January 30, 2006

Overdispersion for Binomial GLM with SAS/GLIMMIX

/**********************************************************************
* Overdispersion for Binomial Generalized Linar Model using
* example from Agresti, Categorical Data Analysis,
* second edition page 151, section 4.7.3
* I have also included the code to reproduce the
* example using GLIMMIX procedure
***********************************************************************/
data crab;
input color spine width satell weight;
weight=weight/1000; color=color-1;
datalines;
3 3 28.3 8 3050
4 3 22.5 0 1550
2 1 26.0 9 2300
4 3 24.8 0 2100
4 3 26.0 4 2600
3 3 23.8 0 2100
2 1 26.5 0 2350
4 2 24.7 0 1900
3 1 23.7 0 1950
4 3 25.6 0 2150
4 3 24.3 0 2150
3 3 25.8 0 2650
3 3 28.2 11 3050
5 2 21.0 0 1850
3 1 26.0 14 2300
2 1 27.1 8 2950
3 3 25.2 1 2000
3 3 29.0 1 3000
5 3 24.7 0 2200
3 3 27.4 5 2700
3 2 23.2 4 1950
2 2 25.0 3 2300
3 1 22.5 1 1600
4 3 26.7 2 2600
5 3 25.8 3 2000
5 3 26.2 0 1300
3 3 28.7 3 3150
3 1 26.8 5 2700
5 3 27.5 0 2600
3 3 24.9 0 2100
2 1 29.3 4 3200
2 3 25.8 0 2600
3 2 25.7 0 2000
3 1 25.7 8 2000
3 1 26.7 5 2700
5 3 23.7 0 1850
3 3 26.8 0 2650
3 3 27.5 6 3150
5 3 23.4 0 1900
3 3 27.9 6 2800
4 3 27.5 3 3100
2 1 26.1 5 2800
2 1 27.7 6 2500
3 1 30.0 5 3300
4 1 28.5 9 3250
4 3 28.9 4 2800
3 3 28.2 6 2600
3 3 25.0 4 2100
3 3 28.5 3 3000
3 1 30.3 3 3600
5 3 24.7 5 2100
3 3 27.7 5 2900
2 1 27.4 6 2700
3 3 22.9 4 1600
3 1 25.7 5 2000
3 3 28.3 15 3000
3 3 27.2 3 2700
4 3 26.2 3 2300
3 1 27.8 0 2750
5 3 25.5 0 2250
4 3 27.1 0 2550
4 3 24.5 5 2050
4 1 27.0 3 2450
3 3 26.0 5 2150
3 3 28.0 1 2800
3 3 30.0 8 3050
3 3 29.0 10 3200
3 3 26.2 0 2400
3 1 26.5 0 1300
3 3 26.2 3 2400
4 3 25.6 7 2800
4 3 23.0 1 1650
4 3 23.0 0 1800
3 3 25.4 6 2250
4 3 24.2 0 1900
3 2 22.9 0 1600
4 2 26.0 3 2200
3 3 25.4 4 2250
4 3 25.7 0 1200
3 3 25.1 5 2100
4 2 24.5 0 2250
5 3 27.5 0 2900
4 3 23.1 0 1650
4 1 25.9 4 2550
3 3 25.8 0 2300
5 3 27.0 3 2250
3 3 28.5 0 3050
5 1 25.5 0 2750
5 3 23.5 0 1900
3 2 24.0 0 1700
3 1 29.7 5 3850
3 1 26.8 0 2550
5 3 26.7 0 2450
3 1 28.7 0 3200
4 3 23.1 0 1550
3 1 29.0 1 2800
4 3 25.5 0 2250
4 3 26.5 1 1967
4 3 24.5 1 2200
4 3 28.5 1 3000
3 3 28.2 1 2867
3 3 24.5 1 1600
3 3 27.5 1 2550
3 2 24.7 4 2550
3 1 25.2 1 2000
4 3 27.3 1 2900
3 3 26.3 1 2400
3 3 29.0 1 3100
3 3 25.3 2 1900
3 3 26.5 4 2300
3 3 27.8 3 3250
3 3 27.0 6 2500
4 3 25.7 0 2100
3 3 25.0 2 2100
3 3 31.9 2 3325
5 3 23.7 0 1800
5 3 29.3 12 3225
4 3 22.0 0 1400
3 3 25.0 5 2400
4 3 27.0 6 2500
4 3 23.8 6 1800
2 1 30.2 2 3275
4 3 26.2 0 2225
3 3 24.2 2 1650
3 3 27.4 3 2900
3 2 25.4 0 2300
4 3 28.4 3 3200
5 3 22.5 4 1475
3 3 26.2 2 2025
3 1 24.9 6 2300
2 2 24.5 6 1950
3 3 25.1 0 1800
3 1 28.0 4 2900
5 3 25.8 10 2250
3 3 27.9 7 3050
3 3 24.9 0 2200
3 1 28.4 5 3100
4 3 27.2 5 2400
3 2 25.0 6 2250
3 3 27.5 6 2625
3 1 33.5 7 5200
3 3 30.5 3 3325
4 3 29.0 3 2925
3 1 24.3 0 2000
3 3 25.8 0 2400
5 3 25.0 8 2100
3 1 31.7 4 3725
3 3 29.5 4 3025
4 3 24.0 10 1900
3 3 30.0 9 3000
3 3 27.6 4 2850
3 3 26.2 0 2300
3 1 23.1 0 2000
3 1 22.9 0 1600
5 3 24.5 0 1900
3 3 24.7 4 1950
3 3 28.3 0 3200
3 3 23.9 2 1850
4 3 23.8 0 1800
4 2 29.8 4 3500
3 3 26.5 4 2350
3 3 26.0 3 2275
3 3 28.2 8 3050
5 3 25.7 0 2150
3 3 26.5 7 2750
3 3 25.8 0 2200
4 3 24.1 0 1800
4 3 26.2 2 2175
4 3 26.1 3 2750
4 3 29.0 4 3275
2 1 28.0 0 2625
5 3 27.0 0 2625
3 2 24.5 0 2000
;
options pagno=1;
proc genmod data=crab;
model satell = width / dist=poi link=log;
run;
proc genmod data=crab;
model satell = width / dist=poi link=log scale=pearson;
run;
proc glimmix data=crab;
model satell = width / dist=poi link=log solution;
random _residual_;
run;

Overdispersion for Poisson GLM with SAS/GLIMMIX

/***********************************************************************
* Overdispersion for Poisson Generalized Linar Model using
* example from Agresti, Categorical Data Analysis,
* second edition page 150, section 4.7.2
* I have also included the code to reproduce the
* example using GLIMMIX procedure
**********************************************************************/
data moore;
input litter group n y ;
datalines;
1 1 10 1
2 1 11 4
3 1 12 9
4 1 4 4
5 1 10 10
6 1 11 9
7 1 9 9
8 1 11 11
9 1 10 10
10 1 10 7
11 1 12 12
12 1 10 9
13 1 8 8
14 1 11 9
15 1 6 4
16 1 9 7
17 1 14 14
18 1 12 7
19 1 11 9
20 1 13 8
21 1 14 5
22 1 10 10
23 1 12 10
24 1 13 8
25 1 10 10
26 1 14 3
27 1 13 13
28 1 4 3
29 1 8 8
30 1 13 5
31 1 12 12
32 2 10 1
33 2 3 1
34 2 13 1
35 2 12 0
36 2 14 4
37 2 9 2
38 2 13 2
39 2 16 1
40 2 11 0
41 2 4 0
42 2 1 0
43 2 12 0
44 3 8 0
45 3 11 1
46 3 14 0
47 3 14 1
48 3 11 0
49 4 3 0
50 4 13 0
51 4 9 2
52 4 17 2
53 4 15 0
54 4 2 0
55 4 14 1
56 4 8 0
57 4 6 0
58 4 17 0
;
proc genmod data=moore;
class group;
model y/n = group / dist=bin link=identity noint;
run;
proc genmod data=moore;
class group;
model y/n = group / dist=bin link=identity noint scale=pearson;
run;
proc glimmix data=moore;
class group;
model y/n = group / dist=bin link=identity noint solution;
random _residual_;
run;

Thursday, January 26, 2006

Bernoulli / Lognormal mixture model for left and right censured data using SAS/NLMIXED

/******************************************************************************
* Fit a mixture model consiting of left and right censored lognormal distribution
* and point distribution located below the detection limit.
* The moulton data can be found under the post Bernoulli / Lognormal
* mixture model for left censured data using SAS/NLMIXED
* Reference: "A mixture model with detection limits for regression analyses of
* antibody response to vaccine"
* Moulton LH, Halsey NA, Biometrics 1995; 51: 1570–78
*******************************************************************************/

data moulton2;
* here we right censured all data above 10;
set moulton;
if iu>=10 then iu=10;
run;
proc sort data=moulton2; by iu; run;

proc nlmixed data=moulton2;
liu = log(iu);

if iu<=0.100 then obs=2; * left-censored;
else if iu>=10 then obs=3; * right-censored;
else obs=1; * not censored;

mu = int_mu + gamma1*ez + gamma2*hi + gamma3*fem;
mulogit = int_mulogit + beta1*ez + beta2*hi + beta3*fem;

p = 1/(1+exp(-mulogit)); * Logistic model;

if obs=1 then ll=log(p*(1/(sqrt(2*constant('pi')*sigma)))*exp(-(liu-mu)**2/(2*sigma)));
if obs=2 then ll=log((1-p)+p*probnorm((liu-mu)/sqrt(sigma))) ;
if obs=3 then ll=log(p*(1-probnorm((liu-mu)/sqrt(sigma))));

model liu ~ general(ll);
run;

Wednesday, January 25, 2006

Bernoulli / Lognormal mixture model for left censured data using SAS/NLMIXED

/******************************************************************************
* Fit a mixture model consiting of left censored lognormal distribution
* and point distribution located below the detection limit.
*
The NLMIXED code below reproduce Table 2 Model F in Moulton et al. paper.
* Reference: "A mixture model with detection limits for regression analyses of
* antibody response to vaccine"
* Moulton LH, Halsey NA, Biometrics 1995; 51: 1570–78

*******************************************************************************/

data moulton;
input iu ez hi fem @@;
cards;
0.100 0 0 0 0.100 0 1 1 0.275 0 0 1
0.100 0 0 0 0.100 0 1 1 0.275 0 0 1
0.100 0 0 0 0.100 1 0 0 0.275 0 0 1
0.100 0 0 0 0.100 1 0 0 0.275 0 1 1
0.100 0 0 0 0.100 1 0 0 0.275 1 0 0
0.100 0 0 0 0.100 1 0 0 0.275 1 0 0
0.100 0 0 0 0.100 1 0 0 0.275 1 0 0
0.100 0 0 0 0.100 1 0 0 0.275 1 0 0
0.100 0 0 0 0.100 1 0 0 0.275 1 0 0
0.100 0 0 0 0.100 1 0 0 0.275 1 0 1
0.100 0 0 0 0.100 1 0 0 0.275 1 1 0
0.100 0 0 0 0.100 1 0 0 0.275 1 1 0
0.100 0 0 0 0.100 1 0 0 0.275 1 1 1
0.100 0 0 0 0.100 1 0 1 0.275 1 1 1
0.100 0 0 1 0.100 1 0 1 0.275 1 1 1
0.100 0 0 1 0.100 1 0 1 0.300 0 1 0
0.100 0 0 1 0.100 1 0 1 0.300 0 1 0
0.100 0 0 1 0.100 1 0 1 0.300 1 0 0
0.100 0 0 1 0.100 1 0 1 0.300 1 0 0
0.100 0 0 1 0.100 1 1 0 0.300 1 1 0
0.100 0 0 1 0.100 1 1 0 0.300 1 1 0
0.100 0 0 1 0.100 1 1 0 0.300 1 1 0
0.100 0 0 1 0.100 1 1 0 0.300 1 1 1
0.100 0 0 1 0.100 1 1 1 0.325 0 0 0
0.100 0 0 1 0.100 1 1 1 0.325 0 0 0
0.100 0 0 1 0.100 1 1 1 0.325 0 0 0
0.100 0 0 1 0.100 1 1 1 0.325 0 0 0
0.100 0 0 1 0.100 1 1 1 0.325 0 0 1
0.100 0 0 1 0.100 1 1 1 0.325 1 1 1
0.100 0 0 1 0.100 1 1 1 0.325 1 1 1
0.100 0 0 1 0.100 1 1 1 0.350 0 0 0
0.100 0 0 1 0.200 0 0 1 0.350 0 1 0
0.100 0 1 0 0.200 1 1 0 0.350 0 1 1
0.100 0 1 0 0.200 1 1 0 0.350 0 1 1
0.100 0 1 0 0.225 0 0 0 0.350 1 0 0
0.100 0 1 0 0.225 0 0 0 0.350 1 1 0
0.100 0 1 0 0.225 0 1 0 0.350 1 1 1
0.100 0 1 0 0.225 1 0 0 0.350 1 1 1
0.100 0 1 0 0.225 1 0 0 0.350 1 1 1
0.100 0 1 0 0.225 1 0 0 0.375 0 0 0
0.100 0 1 0 0.225 1 1 1 0.375 0 1 0
0.100 0 1 0 0.225 1 1 1 0.375 0 1 0
0.100 0 1 0 0.250 0 0 0 0.375 0 1 0
0.100 0 1 0 0.250 0 1 0 0.375 0 1 0
0.100 0 1 1 0.250 0 1 0 0.400 0 0 0
0.100 0 1 1 0.250 0 1 1 0.400 0 0 0
0.100 0 1 1 0.250 1 0 1 0.400 0 0 1
0.100 0 1 1 0.250 1 0 1 0.400 0 0 1
0.100 0 1 1 0.250 1 0 1 0.400 0 0 1
0.100 0 1 1 0.250 1 0 1 0.400 0 0 1
0.100 0 1 1 0.250 1 1 1 0.400 0 1 0
0.100 0 1 1 0.275 0 0 0 0.400 1 0 0
0.100 0 1 1 0.275 0 0 0 0.400 1 0 1
0.100 0 1 1 0.275 0 0 0 0.400 1 0 1
0.100 0 1 1 0.275 0 0 1 0.400 1 1 0
0.400 1 1 0 0.825 0 0 0 1.950 1 1 0
0.400 1 1 1 0.825 0 1 1 1.975 1 1 0
0.400 1 1 1 0.825 1 1 0 2.050 0 1 0
0.400 1 1 1 0.850 0 1 0 2.075 1 1 0
0.425 0 0 0 0.850 1 0 0 2.100 0 0 0
0.425 0 0 1 0.875 0 1 0 2.100 0 1 1
0.425 0 1 0 0.875 0 1 1 2.215 1 1 0
0.425 0 1 1 0.900 1 0 1 2.175 0 1 0
0.425 1 0 1 0.925 0 1 0 2.300 1 1 1
0.425 1 0 1 0.925 0 1 0 2.350 0 0 1
0.425 1 1 0 0.925 1 1 1 2.400 0 1 1
0.425 1 1 0 0.925 1 1 1 2.450 1 0 1
0.450 0 1 0 0.950 1 0 0 2.525 0 0 0
0.450 0 1 1 0.950 1 1 0 2.550 1 1 1
0.450 1 1 0 0.950 1 1 1 2.600 1 0 0
0.450 1 1 1 0.975 0 1 0 2.600 1 1 0
0.475 0 1 0 1.000 0 1 0 2.625 0 1 1
0.475 0 1 0 1.000 0 1 1 2.625 1 1 0
0.475 0 1 0 1.000 1 0 1 2.700 0 0 0
0.475 0 1 1 1.025 0 0 0 2.700 0 0 1
0.475 0 1 1 1.025 0 1 0 2.800 1 0 1
0.475 1 0 0 1.025 0 1 0 3.050 0 1 1
0.475 1 0 1 1.025 1 1 0 3.275 0 0 1
0.475 1 0 1 1.025 1 1 1 3.275 1 0 0
0.500 0 0 1 1.050 0 0 1 3.450 0 1 1
0.500 0 1 0 1.050 0 1 1 3.475 0 1 1
0.500 0 1 1 1.100 1 0 1 2.525 1 1 0
0.500 1 1 1 1.125 1 1 1 3.875 0 0 0
0.500 1 1 1 1.125 1 1 1 3.875 1 1 1
0.525 0 1 1 1.150 0 0 0 4.375 1 0 1
0.550 0 0 0 1.150 0 1 0 4.400 1 0 0
0.550 1 0 0 1.150 1 0 0 4.500 1 1 1
0.550 1 0 1 1.175 1 0 1 4.950 0 0 0
0.575 1 0 0 1.200 1 0 0 4.950 0 0 1
0.575 1 0 0 1.300 0 1 1 4.975 1 1 1
0.575 1 0 1 1.300 1 0 1 5.375 1 1 1
0.575 1 1 0 1.300 1 1 1 5.400 0 0 0
0.575 1 1 0 1.325 1 0 1 5.425 1 0 1
0.600 0 1 0 1.350 0 1 0 5.650 0 1 0
0.600 1 0 0 1.400 1 1 1 5.800 1 1 1
0.650 0 0 1 1.425 1 0 1 6.450 0 1 0
0.650 1 0 1 1.450 0 1 1 6.550 1 0 1
0.675 0 0 0 1.475 0 1 0 6.775 0 0 1
0.725 0 0 1 1.500 0 1 1 6.825 0 1 0
0.725 1 1 0 1.500 1 1 1 7.050 0 0 1
0.750 1 0 0 1.525 0 0 1 7.250 0 1 1
0.775 0 0 1 1.525 1 0 1 7.500 1 0 1
0.775 1 1 1 1.525 1 0 1 7.775 0 0 1
0.800 0 0 0 1.575 0 1 0 8.450 0 1 0
0.800 0 1 0 1.600 1 1 0 8.550 1 0 1
0.800 0 1 0 1.600 1 1 0 9.575 0 0 1
0.800 1 0 0 1.700 1 0 0 10.300 1 0 1
0.800 1 0 1 1.725 0 0 0 12.300 0 0 0
0.800 1 1 0 1.800 0 0 0 13.800 0 0 1
0.800 1 1 1 1.900 0 1 1 15.475 1 0 0
run;

proc sort data=moulton; by iu; run;

proc nlmixed data=moulton;
title "Model F";
liu = log(iu);
obs = (iu>0.100);

mu = int_mu + gamma1*ez + gamma2*hi + gamma3*fem;
mulogit = int_mulogit + beta1*ez + beta2*hi + beta3*fem;

p = 1/(1+exp(-mulogit)); * Logistic model;

ll = (1-obs)*log((1-p)+p*probnorm((liu-mu)/sqrt(sigma))) +
(obs)*log(p*(1/(sqrt(2*constant('pi')*sigma)))*exp(-(liu-mu)**2/(2*sigma)));

model liu ~ general(ll);
run;

Repeated measures with clumping at zero using SAS/NLMIXED

/*****************************************************************************
* Below the SAS/NLMIXED code to fit repeated measures data with clumping at
* zero. This method fits a logistic regression for the zero nonzero part and a
* lognormal regression for the nonzero values. The authors have name this model
* the logistic-lognormal-normal mixed-distribution, I have also extended the code
* to fit a probit-lognormal-normal mixed-distribution.
* Reference: "Analysis of repeated measures data with clumping at zero"
* Tooze JA, Grunwald GK, Jones RH. (2002),
* Statistical Methods in Medical Research, 11:341-355.
*******************************************************************************/

pproc nlmixed data=a;
eta = beta00 + u00 ;
mu = beta10 + u10 ;

p = exp(eta) / (1+exp(eta)); * Logit;
*p = probnorm(eta); * Probit;

if x=0 then like=(1-p) + p*pdf("lognormal",x,mu,Vare);
else if x>0 then like=p*pdf("lognormal",x,mu,Vare);

loglike=log(like);

model x ~ general(loglike);

* with correlated random effects;
random u00 u10 ~ normal([0,0],[Var0,Cov01,Var1]) subject=person;
/* without correlated random effects */
* random u00 u10 ~ normal([0,0],[Var0,0,Var1]) subject=person;

estimate 'prob' exp(beta00)/(1+exp(beta00)); * inverse logit;
* estimate 'prob' probit(beta00); * inverse probit;

run;

Tuesday, January 24, 2006

Generate inital values for NLMIXED using GLIMMIX

/******************************************************************************
Logistic Regressions with Random Intercepts
Data from Getting Started Example from the Proc Glimmix Help page
This code generate initial values using proc glimmix, construct a data set
and provide them to proc nlmixed
******************************************************************************/
data multicenter;
input center group $ n sideeffect;
groupA=(group="A");
groupB=(group="B");
datalines;

1 A 32 14
1 B 33 18
2 A 30 4
2 B 28 8
3 A 23 14
3 B 24 9
4 A 22 7
4 B 22 10
5 A 20 6
5 B 21 12
6 A 19 1
6 B 20 3
7 A 17 2
7 B 17 6
8 A 16 7
8 B 15 9
9 A 13 1
9 B 14 5
10 A 13 3
10 B 13 1
11 A 11 1
11 B 12 2
12 A 10 1
12 B 9 0
13 A 9 2
13 B 9 6
14 A 8 1
14 B 8 1
15 A 7 1
15 B 8 0

;

proc glimmix data=multicenter;
class center group;
model sideeffect/n = groupA groupB / noint solution;
random intercept / subject=center;
ods output ParameterEstimates=EP (rename=(effect=parameter))
CovParms=CP (rename=(covparm=parameter)) ;
run;

data estimate;
length parameter $12.;
set ep cp;
if parameter='groupA' then parameter='beta1';
if parameter='groupB' then parameter='beta2';
if parameter='Intercept' then parameter='VarInt';
keep estimate parameter;
run;

proc nlmixed data=multicenter;
parms / data=estimate;
eta = beta1*groupA+ beta2*groupB + u;
p = exp(eta) / (1+exp(eta));
model sideeffect ~ binomial(n,p);
random u ~ normal(0,VarInt) subject=center;
run;

Wednesday, January 18, 2006

Concordance Correlation Coefficient (CCC) using SAS/NLMIXED

/* Code to generate a bivariate normal sample using SAS/BASE */
data mvn;
keep x y;
mu1=10; mu2=20; var1=4; var2=9; rho=.5;
std1=sqrt(var1); std2=sqrt(var2);
c=sqrt(1-rho**2);
do i = 1 to 500;
x = rannor(123);
y = rho*x+c*rannor(123);
x = mu1 + sqrt(var1)*x;
y = mu2 + sqrt(var2)*y;
output;
end;
run;

/* ML Estimate bivariate normal distribution using SAS/NLMIXED */
proc corr data=mvn vardef=n;
var x y;
run;

/* ML Estimate bivariate normal distribution using SAS/NLMIXED */
proc nlmixed data=mvn;
parms mx=1 my=1 sx=1 sy=1 rho=0;
bounds -1<=rho<=1;
con=1/(2*constant('pi')*sqrt((sx*sx)*(sy*sy)*(1-rho*rho)));
zx=(x-mx)/sqrt(sx*sx);
zy=(y-my)/sqrt(sy*sy);
hx=zx**2+zy**2-2*rho*zx*zy;

like =con*exp(-hx/(2*(1-rho**2)));
ll = log(like);

model ll ~ general(ll);
/****************************************************************************************
Assess agreement in terms of Accuracy, Scale dirrerential and precision pg 65-68
van Belle G. "Statistical Rules of Thumb" John Wiley & Sons 2002, http://www.vanbelle.org/
See also: Lin LI. "A concordance correlation coefficient to evaluate reproducibility"
Biometrics. 1989 Mar;45(1):255-68
****************************************************************************************/
estimate 'location shift (u)' (mx-my)/sqrt(2*sx*sy);
estimate 'scale shift (v)' (sx-sy)/sqrt(2*sx*sy);
estimate 'Accuracy' 1/(1 + ((mx-my)**2/(2*sx*sy) ) + ((sx-sy)**2/(2*sx*sy)) );
estimate 'Precision' rho;
estimate 'Concordance Correlation Coefficient' (1/(1 +((mx-my)**2/(2*sx*sy))+((sx-sy)**2/(2*sx*sy))))*(rho);
run;