From 7aaceb340922f91ff6141bc6530f56e10a4bd757 Mon Sep 17 00:00:00 2001 From: Josef Date: Sun, 20 Dec 2015 14:41:35 -0500 Subject: [PATCH 1/4] ENH: add one sample proportion notebook (test, confint, power), incomplete --- notebooks/proportion_one_power.ipynb | 2263 ++++++++++++++++++++++++++ notebooks/proportion_one_power.py | 762 +++++++++ 2 files changed, 3025 insertions(+) create mode 100644 notebooks/proportion_one_power.ipynb create mode 100644 notebooks/proportion_one_power.py diff --git a/notebooks/proportion_one_power.ipynb b/notebooks/proportion_one_power.ipynb new file mode 100644 index 0000000..6a20a17 --- /dev/null +++ b/notebooks/proportion_one_power.ipynb @@ -0,0 +1,2263 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "# One proportion: Hypothesis Tests, Sample Size and Power" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is a experimental notebook to try to organize various parts for hypothesis tests and related methods.\n", + "\n", + "This should roughly include the following\n", + "\n", + "- given a sample\n", + " - estimate parameter or effect size\n", + " - hypothesis test given Null and Alternative\n", + " - confidence interval\n", + "\n", + "- prospective or evaluative\n", + " - size of test and power\n", + " - sample size required\n", + "\n", + "- sensitivity to misspecification\n", + " - bias of estimate and of estimated variance\n", + " - size and power of hypothesis tests\n" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "from __future__ import division # for py2 compatibility, I'm using Python 3.4\n", + "import numpy as np\n", + "from scipy import stats\n", + "import statsmodels.stats.proportion as smprop\n", + "import statsmodels.stats.power as smpow\n", + "\n", + "import pandas as pd # to store results with labels" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Sample" + ] + }, + { + "cell_type": "code", + "execution_count": 13, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "p_true = 0.3\n", + "nobs = 30\n", + "p_null = p_true\n", + "\n", + "#y = np.random.binomial(nobs, p_true)\n", + "y = 7" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Assume we have observed 7 events in a sample of size 30. What are our estimates, confidence interval, and test whether the true proportion = 0.3." + ] + }, + { + "cell_type": "code", + "execution_count": 15, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "estimate: 0.23333333333333334\n" + ] + }, + { + "data": { + "text/html": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
lowerupper
beta0.0993380.422837
wilson0.1179240.409283
normal0.0819840.384682
agresti_coull0.1151620.412045
jeffrey0.1109210.404400
\n", + "
" + ], + "text/plain": [ + " lower upper\n", + "beta 0.099338 0.422837\n", + "wilson 0.117924 0.409283\n", + "normal 0.081984 0.384682\n", + "agresti_coull 0.115162 0.412045\n", + "jeffrey 0.110921 0.404400" + ] + }, + "execution_count": 15, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "count = y # alias\n", + "prop_mle = count / nobs\n", + "confint_methods = ['beta', 'wilson', 'normal', 'agresti_coull', 'jeffrey']\n", + "confints = [smprop.proportion_confint(count, nobs, alpha=0.05, method=method) for method in confint_methods]\n", + "ci_df = pd.DataFrame(confints, index=confint_methods, columns=['lower', 'upper'])\n", + "print('estimate: ', prop_mle)\n", + "ci_df" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Two sided hypothesis**" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.55100632188415744" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided')" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.86333169460343107, 0.38795512282614564)" + ] + }, + "execution_count": 19, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided')" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.79681907288959564, 0.42555611641912894)" + ] + }, + "execution_count": 20, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Equivalence**" + ] + }, + { + "cell_type": "code", + "execution_count": 28, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.025000000000000019, 0.025000000000000008, 0.025000000000000019)" + ] + }, + "execution_count": 28, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp = ci_df.loc['beta', :]\n", + "smprop.binom_tost(count, nobs, low, upp)" + ] + }, + { + "cell_type": "code", + "execution_count": 36, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "score (0.056053065870080819, 0.056053065870080819, 0.034949354314036465)\n", + "wald (0.061422217884672649, 0.0093912009590914284, 0.061422217884672649)\n" + ] + } + ], + "source": [ + "print('score', smprop.binom_tost(count, nobs, *ci_df.loc['wilson', :]))\n", + "print('wald ', smprop.binom_tost(count, nobs, *ci_df.loc['normal', :]))" + ] + }, + { + "cell_type": "code", + "execution_count": 32, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.067516018566243438,\n", + " (1.4945495650814802, 0.067516018566243438),\n", + " (-2.2785472457940248, 0.011346996472929981))" + ] + }, + "execution_count": 32, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztost(count, nobs, *ci_df.loc['wilson', :])" + ] + }, + { + "cell_type": "code", + "execution_count": 33, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.04134932724842634,\n", + " (1.7352380217245991, 0.04134932724842634),\n", + " (-2.4540616477331247, 0.0070626381724014512))" + ] + }, + "execution_count": 33, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztost(count, nobs, *ci_df.loc['beta', :])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**One-sided tests**\n", + "\n", + "\n", + "The null nypothesis and alternative hypothesis for alternative `'larger'` are\n", + "\n", + "H0: p = p0 \n", + "H1: p > p0\n", + "\n", + "where p0 = 0.3" + ] + }, + { + "cell_type": "code", + "execution_count": 50, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "exact: 0.840477017804\n", + "wald: 0.806022438587\n", + "score: 0.78722194179\n" + ] + } + ], + "source": [ + "te = smprop.binom_test(count, nobs, prop=p_null, alternative='larger')\n", + "tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='larger')\n", + "ts = smprop.proportions_ztest(count, nobs, value=p_null, alternative='larger', prop_var=p_null)\n", + "print('exact: ', te)\n", + "print('wald: ', tw[1])\n", + "print('score: ', ts[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The null nypothesis and alternative hypothesis for alternative `'smaller'` are\n", + "\n", + "H0: p = p0 \n", + "H1: p < p0\n", + "\n", + "where p0 = 0.3" + ] + }, + { + "cell_type": "code", + "execution_count": 51, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "exact: 0.281376708187\n", + "wald: 0.193977561413\n", + "score: 0.21277805821\n" + ] + } + ], + "source": [ + "te = smprop.binom_test(count, nobs, prop=p_null, alternative='smaller')\n", + "tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='smaller')\n", + "ts = smprop.proportions_ztest(count, nobs, value=p_null, alternative='smaller', prop_var=p_null)\n", + "print('exact: ', te)\n", + "print('wald: ', tw[1])\n", + "print('score: ', ts[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can look at null hypothesis that are further away from the observed proportion to see which hypothesis are rejected. The observed proportion is 0.23, our new null hypothesis value is 0.6. " + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "exact: 4.93250255898e-05\n", + "wald: 1.02554471155e-06\n", + "score: 5.86612629749e-06\n" + ] + } + ], + "source": [ + "p_null_ = 0.6\n", + "te = smprop.binom_test(count, nobs, prop=p_null_, alternative='smaller')\n", + "tw = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='smaller')\n", + "ts = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='smaller', prop_var=p_null)\n", + "print('exact: ', te)\n", + "print('wald: ', tw[1])\n", + "print('score: ', ts[1])" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "exact: 0.999990777679\n", + "wald: 0.999998974455\n", + "score: 0.999994133874\n" + ] + } + ], + "source": [ + "p_null_ = 0.6\n", + "te = smprop.binom_test(count, nobs, prop=p_null_, alternative='larger')\n", + "tw = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='larger')\n", + "ts = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='larger', prop_var=p_null)\n", + "print('exact: ', te)\n", + "print('wald: ', tw[1])\n", + "print('score: ', ts[1])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The `smaller` hypothesis is strongly rejected, which means that we reject the null hypothesis that the true proportion is 0.6 or larger in favor of the alternative hypothesis that the true proportion is smaller than 0.6.\n", + "\n", + "In the case `larger` alternative, the p-value is very large and we cannot reject the Null hypothesis that the true proportion is 0.6 (or smaller) in favor of the hypothesis that the true proportion is larger than 0.6. \n", + "\n", + "Non-inferiority and superiority tests are special cases of these one-sided tests where the specific case is defined in terms of deviations from a benchmark value. The null hypothesis for a non-inferiority test can be defined, for example, by being less than a specified amount 5% below a benchmark proportion. If we reject the test, then we conclude that the proportion is not worse than 5% below the benchmark, at the given confidence level of the test." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Aside: Inequality Null hypothesis**\n", + "\n", + "For most methods the p-values for the hypothesis tests are the same for the case when the null hypothesis is and inequality \n", + "\n", + "\n", + "The null nypothesis and alternative hypothesis for alternative `'larger'` specify that the true proportion is smaller than or equal to the hypothesized value versus the alternative that it is larger.\n", + "\n", + "H0': p <= p0 \n", + "H1': p > p0\n", + "\n", + "\n", + "The null nypothesis and alternative hypothesis for alternative 'smaller' are\n", + "\n", + "H0': p >= p0 \n", + "H1': p < p0\n", + "\n", + "\n", + "The score test is an exception to this. If the null hypothesis is a inequality, then the constrained maximum likelihood estimate will depend on whether the constraint of the null hypothesis is binding or not. If it is binding, then the score test is the same as for the test with an equality in the null hypothesis. If the constrained is not binding then the null parameter estimate is the same as the estimate used for the Wald test.\n", + "Because the equality is the worst case in these hypothesis test, it does not affect the validity of the tests. However, in the asymptotic tests it would add another option to define the variance used in the calculations, and the standard score test does not take the inequality into account in calculating the variance. This is not implemented, so we restrict ourselves to equality null hypothesis, even though the interpretation is mostly the same as for the inequality null hypothesis.\n" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Standard t-test**\n", + "\n", + "We can also use the standard t-test in large samples if we encode the data with 0 for no event and 1 for the success event. The t-test estimates the variance from the data and does not take the relationship between mean and variance explicitly into account. However, by the law of large numbers the mean, i.e. the proportion in the current case, will be asymptotically distributed as normal which can be approximated by the t-distribution." + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.84882088476305617, 0.40293351466675675, 29.0)" + ] + }, + "execution_count": 61, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "import statsmodels.stats.weightstats as smsw\n", + "yy = np.repeat([0, 1], [nobs - count, count])\n", + "ds = smsw.DescrStatsW(yy)\n", + "ds.ttest_mean(0.3)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "{'data': array([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n", + " 1, 1, 1, 1, 1, 1, 1]),\n", + " 'ddof': 0,\n", + " 'demeaned': array([-0.23333333, -0.23333333, -0.23333333, -0.23333333, -0.23333333,\n", + " -0.23333333, -0.23333333, -0.23333333, -0.23333333, -0.23333333,\n", + " -0.23333333, -0.23333333, -0.23333333, -0.23333333, -0.23333333,\n", + " -0.23333333, -0.23333333, -0.23333333, -0.23333333, -0.23333333,\n", + " -0.23333333, -0.23333333, -0.23333333, 0.76666667, 0.76666667,\n", + " 0.76666667, 0.76666667, 0.76666667, 0.76666667, 0.76666667]),\n", + " 'mean': 0.23333333333333334,\n", + " 'std': 0.42295258468165065,\n", + " 'std_mean': 0.078540323245317289,\n", + " 'sum': 7.0,\n", + " 'sum_weights': 30.0,\n", + " 'sumsquares': 5.3666666666666663,\n", + " 'var': 0.17888888888888888,\n", + " 'weights': array([ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", + " 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.,\n", + " 1., 1., 1., 1.])}" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "vars(ds)" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.84882088476305617, 0.79853324266662162, 29.0)" + ] + }, + "execution_count": 63, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.ttest_mean(0.3, alternative='larger')" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.84882088476305617, 0.20146675733337838, 29.0)" + ] + }, + "execution_count": 64, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.ttest_mean(0.3, alternative='smaller')" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, at the latter confidence interval. " + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.049342036125071341,\n", + " (1.706072280308607, 0.049342036125071341, 29.0),\n", + " (-2.4128139764969032, 0.011189332556866095, 29.0))" + ] + }, + "execution_count": 68, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.ttost_mean(*ci_df.loc['beta', :])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Sample Size and Power" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "First we illustrate the rejection region of a test which is the set of all observations at which we reject the null hypothesis.\n", + "Size of a test is the probability to sample an observation in the rejection region under the null hypothesis, power is the probability under the alternative hypothesis.\n", + "\n", + "The rejection region is a property of the hypothesis test, the following calculates it for the two-sided binomial and the two-sided ztest for a single proportion. This depends on the distribution that we use in the hypothesis test, exact distribution which is the binomial in this case or a normal or t distribution as large sample approximation.\n", + "Ones we have the rejection region, we can also use different distributions for evaluating the power either based on the exact distribution or an a large sample approximation or asymptotic distribution.\n", + "\n", + "The sample size that is required to achieve at least a desired power under a given alternative can be explicitly calculated in the special case one-sided tests where both the hypothesis test distribution and the distribution for the power calculations are the normal distribution. In almost all other cases we have to use an iterative solver to find the required sample size.\n", + "\n", + "Power and sample size calculation are currently only implemented for one approximation and for equivalence tests. In the following we illustrate several methods for calculating the power which will be useful for different cases depending on whether simplification or computational shortcuts exist or not.\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Rejection region**" + ] + }, + { + "cell_type": "code", + "execution_count": 103, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "rej = np.array([smprop.proportions_ztest(count_, nobs, value=p_null, alternative='two-sided', prop_var=p_null)[1] \n", + " for count_ in range(nobs + 1)])\n", + "rej_indicator = (rej < 0.05) #.astype(int)\n", + "np.column_stack((rej, rej_indicator))\n", + "rej_indicator_score = rej_indicator # keep for later use" + ] + }, + { + "cell_type": "code", + "execution_count": 104, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.00000000e+00, 2.98171757e-05, 1.00000000e+00],\n", + " [ 1.00000000e+00, 4.74743798e-04, 1.00000000e+00],\n", + " [ 2.00000000e+00, 4.23786890e-03, 1.00000000e+00],\n", + " [ 3.00000000e+00, 1.56869137e-02, 1.00000000e+00],\n", + " [ 4.00000000e+00, 4.70922546e-02, 1.00000000e+00],\n", + " [ 5.00000000e+00, 1.61064812e-01, 0.00000000e+00],\n", + " [ 6.00000000e+00, 3.18844777e-01, 0.00000000e+00],\n", + " [ 7.00000000e+00, 5.51006322e-01, 0.00000000e+00],\n", + " [ 8.00000000e+00, 8.42709221e-01, 0.00000000e+00],\n", + " [ 9.00000000e+00, 1.00000000e+00, 0.00000000e+00],\n", + " [ 1.00000000e+01, 6.92568023e-01, 0.00000000e+00],\n", + " [ 1.10000000e+01, 4.29152596e-01, 0.00000000e+00],\n", + " [ 1.20000000e+01, 2.35916547e-01, 0.00000000e+00],\n", + " [ 1.30000000e+01, 1.14625003e-01, 0.00000000e+00],\n", + " [ 1.40000000e+01, 7.02074908e-02, 0.00000000e+00],\n", + " [ 1.50000000e+01, 2.62538788e-02, 1.00000000e+00],\n", + " [ 1.60000000e+01, 8.48352453e-03, 1.00000000e+00],\n", + " [ 1.70000000e+01, 2.43702161e-03, 1.00000000e+00],\n", + " [ 1.80000000e+01, 9.38554917e-04, 1.00000000e+00],\n", + " [ 1.90000000e+01, 1.84952280e-04, 1.00000000e+00],\n", + " [ 2.00000000e+01, 5.94094211e-05, 1.00000000e+00],\n", + " [ 2.10000000e+01, 7.27783538e-06, 1.00000000e+00],\n", + " [ 2.20000000e+01, 1.23860162e-06, 1.00000000e+00],\n", + " [ 2.30000000e+01, 1.79774924e-07, 1.00000000e+00],\n", + " [ 2.40000000e+01, 2.19374043e-08, 1.00000000e+00],\n", + " [ 2.50000000e+01, 2.20771429e-09, 1.00000000e+00],\n", + " [ 2.60000000e+01, 1.78374751e-10, 1.00000000e+00],\n", + " [ 2.70000000e+01, 1.11214916e-11, 1.00000000e+00],\n", + " [ 2.80000000e+01, 5.02237102e-13, 1.00000000e+00],\n", + " [ 2.90000000e+01, 1.46182704e-14, 1.00000000e+00],\n", + " [ 3.00000000e+01, 2.05891132e-16, 1.00000000e+00]])" + ] + }, + "execution_count": 104, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "rej = np.array([smprop.binom_test(count_, nobs, prop=p_null, alternative='two-sided') for count_ in range(nobs + 1)])\n", + "rej_indicator = (rej < 0.05) #.astype(int)\n", + "np.column_stack((range(nobs + 1), rej, rej_indicator))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Power calculation - a general method" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In a general method we can use the rejection region of a hypothesis test directly to calculate the probability.\n", + "\n", + "We can use the set of values for which the null hypothesis is rejected instead of using a boolean indicator." + ] + }, + { + "cell_type": "code", + "execution_count": 105, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "x = np.arange(nobs + 1)\n", + "x_rej = x[rej_indicator]\n", + "x_rej_score = x[rej_indicator_score]" + ] + }, + { + "cell_type": "code", + "execution_count": 113, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "binom [ 0 1 2 3 4 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30]\n", + "score [ 0 1 2 3 4 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30]\n" + ] + } + ], + "source": [ + "print('binom', x_rej)\n", + "print('score', x_rej_score)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The rejection region of the score test is larger than the one of the exact binomial test. The score test rejects also if 14 events are observed." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "For the current case we use the exact binomial distribution to calculate the power. The null hypothesis in this example is a two-sided test for p = 0.3. Use p1 for the proportion at which the power or rejection probability is calculated. First we check the size of the test, i.e. p1 = p_null = 0.3" + ] + }, + { + "cell_type": "code", + "execution_count": 114, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.047092254594638484" + ] + }, + "execution_count": 114, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p1 = 0.3\n", + "stats.binom.pmf(x_rej, nobs, p1).sum()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Because we are using the exact test, the probability of rejection under the null is smaller than the required alpha = 0.05. In this example the exact probability is close to the 0.05 threshold. In contrast to this, the score test is liberal in this example and rejects with probability 0.07 instead of the required 0.05." + ] + }, + { + "cell_type": "code", + "execution_count": 112, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.07020749078421995" + ] + }, + "execution_count": 112, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "stats.binom.pmf(x_rej_score, nobs, p1).sum()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This method with explicit enumeration of the rejection values can be used for any distribution but will require more computational time than explicit calculations that take advantage of the specific structure. In the case of one sample or one parameter hypothesis test, the rejection region consist of two tail intervals. If we have the boundary of the rejection region available, then we can directly use the cumulative distribution or the survival function to calculate the tail probabilities.\n", + "\n", + "In the case of the binomial distribution with probability p_null under the null hypothesis has tail probabilities at most alpha / 2 in each tail (for equal tailed hypothesis tests)." + ] + }, + { + "cell_type": "code", + "execution_count": 129, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(4.0, 14.0)" + ] + }, + "execution_count": 129, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "lowi, uppi = stats.binom.interval(0.95, nobs, p_null)\n", + "lowi, uppi" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Detour: open or close interval**\n", + "\n", + "The cdf is defined by a weak inequality cdf(t) = Prob(x <= t), the survival function sf is defined by a strict inequality sf(t) = Prob(x > t) so that cdf(t) + sf(t) = 1. Whether the inequalities are strict or weak does not make a difference in continuous distributions that don't have mass points. However, it does make a difference for discrete distribution. If we want a tail probability of alpha, then the cdf has this tail probability including the boundary point, while the sf excludes the boundary point. So to have a upper tail probability alpha for a t such that Prob(x >= t) < alpha but close to it, we need to use sf(t - 1). similarly, we have to subtract one if we want an open interval for the cdf at the lower tail.\n", + "\n", + "Specifically, define the lower and upper thresholds that are in the rejection region\n", + "\n", + "low = max{x: prob(x <= t) <= alpha / 2 \n", + "upp = min{x: prob(x >= t) <= alpha / 2 \n", + "\n", + "Because of the discreteness of the sample space having tail probabilities equal to alpha / 2 is in general not possible.\n" + ] + }, + { + "cell_type": "code", + "execution_count": 130, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "low, upp = lowi, uppi" + ] + }, + { + "cell_type": "code", + "execution_count": 132, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(4.0, 14.0)" + ] + }, + "execution_count": 132, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "stats.binom.ppf(0.025, nobs, p_null), stats.binom.isf(0.025, nobs, p_null)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we reject at 4 and smaller and reject at 14 and larger, then the probability of rejection is larger than 0.025 in each tail:" + ] + }, + { + "cell_type": "code", + "execution_count": 133, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.030154943102089313, 0.040052547682131269)" + ] + }, + "execution_count": 133, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "stats.binom.cdf(low, nobs, p_null), stats.binom.sf(upp - 1, nobs, p_null)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "If we shrink the rejection region in each tail by one, so we reject at 3 and smaller and reject at 15 and larger, then the probability of rejection is smaller than 0.025 in each tail. The total rejection probability is at 0.026 smaller than 0.05 and shows the typical case that exact tests are conservative, i.e. reject less often than alpha, often considerably less:" + ] + }, + { + "cell_type": "code", + "execution_count": 137, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.0093165673111771617, 0.016937311492549543, 0.026253878803726705)" + ] + }, + "execution_count": 137, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prob_low = stats.binom.cdf(low - 1, nobs, p_null)\n", + "prob_upp = stats.binom.sf(upp, nobs, p_null)\n", + "prob_low, prob_upp, prob_low + prob_upp" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "In this case we can increase the lower rejection threshold by one and still stay below the total rejection probability of 0.05, although in this case the rejection probability in the lower tail is larger than 0.025. In this example the same also works on the other side by expanding only the rejection region in the upper tail." + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.030154943102089313, 0.016937311492549543, 0.047092254594638852)" + ] + }, + "execution_count": 138, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prob_low = stats.binom.cdf(low, nobs, p_null)\n", + "prob_upp = stats.binom.sf(upp, nobs, p_null)\n", + "prob_low, prob_upp, prob_low + prob_upp" + ] + }, + { + "cell_type": "code", + "execution_count": 139, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.0093165673111771617, 0.040052547682131269, 0.049369114993308427)" + ] + }, + "execution_count": 139, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "prob_low = stats.binom.cdf(low - 1, nobs, p_null)\n", + "prob_upp = stats.binom.sf(upp - 1, nobs, p_null)\n", + "prob_low, prob_upp, prob_low + prob_upp" + ] + }, + { + "cell_type": "code", + "execution_count": 124, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.95290774540536105" + ] + }, + "execution_count": 124, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "stats.binom.cdf(upp, nobs, p_null) - stats.binom.cdf(low, nobs, p_null)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "TODO: why does binom_test reject at 4? \n", + "binom_test is used from scipy.stats for the two-sided alternative." + ] + }, + { + "cell_type": "code", + "execution_count": 142, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.0093165673111771617, 0.030154943102089313)" + ] + }, + "execution_count": 142, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.binom_test(3, nobs, prop=p_null, alternative='smaller'), \\\n", + "smprop.binom_test(4, nobs, prop=p_null, alternative='smaller')" + ] + }, + { + "cell_type": "code", + "execution_count": 144, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.047092254594638852" + ] + }, + "execution_count": 144, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.binom_test(4, nobs, prop=p_null, alternative='two-sided')\n", + "# we get the same answer as in R\n", + "# in R binom.test(4,30, 0.3, alternative=\"two.sided\") --> 0.04709225" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The binomial test is not a centered test. It looks like it adds the probability from the further away tail for all x that have lower pmf than the observed value.\n", + "check with Fay for the three different ways of defining two-tailed tests and confints.\n", + "\n", + "The pvalue for the centered test is based on doubling the probability of the smaller tail. Given that it does not exist, we can implement it quickly, and check against R's exactci package, which matches our results." + ] + }, + { + "cell_type": "code", + "execution_count": 151, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "def binom_test_centered(count, nobs, prop=0.5):\n", + " \"\"\"two-sided centered binomial test\"\"\"\n", + " prob_low = stats.binom.cdf(count, nobs, p_null)\n", + " prob_upp = stats.binom.sf(count - 1, nobs, p_null)\n", + " return 2 * min(prob_low, prob_upp)" + ] + }, + { + "cell_type": "code", + "execution_count": 152, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.018633134622354323, 0.060309886204178625)" + ] + }, + "execution_count": 152, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "binom_test_centered(3, nobs, prop=p_null), binom_test_centered(4, nobs, prop=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 153, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.16894012072030201, 0.080105095364262538)" + ] + }, + "execution_count": 153, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "binom_test_centered(13, nobs, prop=p_null), binom_test_centered(14, nobs, prop=p_null)" + ] + }, + { + "cell_type": "raw", + "metadata": {}, + "source": [ + "results from R library exactci, with centered binomial test\n", + "\n", + "> be = binom.exact(3, 30, p = 0.3)\n", + "> be$p.value\n", + "[1] 0.01863313\n", + "> be = binom.exact(4, 30, p = 0.3)\n", + "> be$p.value\n", + "[1] 0.06030989\n", + "> be = binom.exact(13, 30, p = 0.3)\n", + "> be$p.value\n", + "[1] 0.1689401\n", + "> be = binom.exact(14, 30, p = 0.3)\n", + "> be$p.value\n", + "[1] 0.0801051" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Back to power" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "After this more extended detour we go back to our power calculations. So assuming we know the critical values of our rejection region, we can calculate the power using the cdf and sf function of the binomial distribution." + ] + }, + { + "cell_type": "code", + "execution_count": 155, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "def power_binom_reject(low, upp, prop, nobs):\n", + " \"\"\" calculate the power of a test given the rejection intervals\n", + " \n", + " This assumes that the rejection region is the union of the lower \n", + " tail up to and including low, and the upper tail starting at and\n", + " including upp.\n", + " \n", + " The Binomial distribution is used to calculate the power.\n", + " \n", + " Parameters\n", + " ----------\n", + " low\n", + " upp\n", + " prop : float in interval (0, 1)\n", + " proportion parameter for the binomial distribution\n", + " nobs : int\n", + " number of trials for binomial distribution\n", + " \n", + " Returns\n", + " -------\n", + " power : float\n", + " Probability of rejection if the true proportion is `prop`.\n", + " \n", + " Notes\n", + " -----\n", + " Works in vectorized form with appropriate arguments, i.e. \n", + " nonscalar arguments are numpy arrays that broadcast correctly.\n", + " \n", + " \"\"\"\n", + " prob_low = prob_upp = 0 # initialize\n", + " if low is not None:\n", + " prob_low = stats.binom.cdf(low, nobs, prop)\n", + " if upp is not None:\n", + " prob_upp = stats.binom.sf(upp - 1, nobs, prop)\n", + " return prob_low + prob_upp" + ] + }, + { + "cell_type": "code", + "execution_count": 162, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "binom 4 15 0.0470922545946\n", + "binom_central 3 15 0.0262538788037\n", + "score 4 14 0.0702074907842\n" + ] + } + ], + "source": [ + "for test, l, u in [('binom ', 4, 15), ('binom_central', 3, 15), ('score ', 4, 14)]:\n", + " print(test, l, u, power_binom_reject(l, u, p_null, nobs))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## trying more\n", + "\n", + "The rest below is just some unsorted experiments to try a few more things." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "TODO: The following is not correct because when we change the sample size, then the rejection region also changes." + ] + }, + { + "cell_type": "code", + "execution_count": 164, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[0.047092254594638852,\n", + " 0.047775312714239675,\n", + " 0.051602724514375933,\n", + " 0.05859877650596669,\n", + " 0.068789625299809865,\n", + " 0.082186755965548378,\n", + " 0.098772671226915978,\n", + " 0.118489659792336,\n", + " 0.1412321565494524,\n", + " 0.16684288140347131,\n", + " 0.19511265080608356,\n", + " 0.22578351411938377,\n", + " 0.25855468655666075,\n", + " 0.29309063531712998,\n", + " 0.32903062340607336,\n", + " 0.36599901949754499,\n", + " 0.4036157318615764,\n", + " 0.44150620799796952,\n", + " 0.47931054705529291,\n", + " 0.51669138801043579]" + ] + }, + "execution_count": 164, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can also calculate this in vectorized form for the set of sample sizes and all three tests:" + ] + }, + { + "cell_type": "code", + "execution_count": 166, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.04709225, 0.02625388, 0.07020749],\n", + " [ 0.04777531, 0.03102743, 0.07728123],\n", + " [ 0.05160272, 0.03820442, 0.0883212 ],\n", + " [ 0.05859878, 0.04792633, 0.10324071],\n", + " [ 0.06878963, 0.06032282, 0.12191353],\n", + " [ 0.08218676, 0.07549525, 0.14416465],\n", + " [ 0.09877267, 0.09350311, 0.16976553],\n", + " [ 0.11848966, 0.11435385, 0.19843379],\n", + " [ 0.14123216, 0.1379965 , 0.2298369 ],\n", + " [ 0.16684288, 0.16431907, 0.26359926],\n", + " [ 0.19511265, 0.19314968, 0.29931183],\n", + " [ 0.22578351, 0.22426089, 0.33654338],\n", + " [ 0.25855469, 0.25737665, 0.37485255],\n", + " [ 0.29309064, 0.29218144, 0.41379979],\n", + " [ 0.32903062, 0.32833054, 0.45295869],\n", + " [ 0.36599902, 0.36546115, 0.49192593],\n", + " [ 0.40361573, 0.40320337, 0.53032969],\n", + " [ 0.44150621, 0.4411907 , 0.56783618],\n", + " [ 0.47931055, 0.47906961, 0.60415428],\n", + " [ 0.51669139, 0.51650774, 0.63903825]])" + ] + }, + "execution_count": 166, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count." + ] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.39840953644479782, 0.69032832946419354)" + ] + }, + "execution_count": 81, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.56343616981901101, 0.57313791338407638)" + ] + }, + "execution_count": 77, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.79681907288959564, 0.42555611641912894)" + ] + }, + "execution_count": 78, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(8.293564511085938e-17, 0.99999999999999989)" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(5.864435705996961e-17, 1.0)" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "?smprop.proportion_confint()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "smprop.proportion_confint()" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.34470140912721514" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from statsmodels.stats.proportion import proportion_effectsize\n", + "es = proportion_effectsize(0.4, 0.5)\n", + "smpow.NormalIndPower().solve_power(es, nobs1=60, alpha=0.05, ratio=0)\n", + "# R pwr 0.3447014091272153" + ] + }, + { + "cell_type": "code", + "execution_count": 14, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "259.154426739506" + ] + }, + "execution_count": 14, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 25, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.94849873047984967,\n", + " ((0.75834970530451862,\n", + " 0.86051080550098236,\n", + " -2.5599758686988578,\n", + " 1.6821766224528543),))" + ] + }, + "execution_count": 25, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", + " variance_prop=None, discrete=True, continuity=0,\n", + " critval_continuity=0)\n", + " " + ] + }, + { + "cell_type": "code", + "execution_count": 39, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.95000875677852759,\n", + " ((0.75207687201030093,\n", + " 0.86590768457946699,\n", + " -1.7341121433755891,\n", + " 2.3848884863189261),))" + ] + }, + "execution_count": 39, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm',\n", + " variance_prop=None, discrete=False, continuity=0,\n", + " critval_continuity=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 41, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.95811685170327532,\n", + " ((0.75220160721176865,\n", + " 0.86582602622195959,\n", + " -1.812033626524528,\n", + " 2.4628099694678625),))" + ] + }, + "execution_count": 41, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm',\n", + " variance_prop=None, discrete=False, continuity=1,\n", + " critval_continuity=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 49, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.95447058338704227,\n", + " ((158.0, 181.0, 5695.1110612524499, 6528.3652241583422),))" + ] + }, + "execution_count": 49, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom',\n", + " variance_prop=None, discrete=False, continuity=0,\n", + " critval_continuity=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.91140841850002685,\n", + " ((0.76242701448039996,\n", + " 0.85913192581906617,\n", + " -1.4383338187061427,\n", + " 2.2137816599366142),))" + ] + }, + "execution_count": 55, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", + " variance_prop=None, discrete=False, continuity=1,\n", + " critval_continuity=0)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 71, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(49.0, 51.0)" + ] + }, + "execution_count": 71, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs = 0.4, 0.6, 100\n", + "smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(12.0, 28.0)" + ] + }, + "execution_count": 59, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "value, nobs = 0.4, 50\n", + "smprop.binom_test_reject_interval(value, nobs, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 70, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.39832112950330101, 0.6016788704966991)" + ] + }, + "execution_count": 70, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportion_confint(50, 100, method='beta')" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(78.0, 84.0)" + ] + }, + "execution_count": 72, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs = 0.7, 0.9, 100\n", + "smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.65848903119285485, ((78.0, 85.0, 1930.0, 2105.0),))" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom',\n", + " variance_prop=None, discrete=False, continuity=0,\n", + " critval_continuity=0)" + ] + }, + { + "cell_type": "code", + "execution_count": 78, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.61042723749210825" + ] + }, + "execution_count": 78, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8\n", + "smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 79, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.71661671146632" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8\n", + "smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 132, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# from Lachine 1981 equ (3) and (4)\n", + "\n", + "from scipy import stats\n", + "def sample_size_normal_greater(diff, std_null, std_alt, alpha=0.05, power=0.9):\n", + " crit_alpha, crit_pow = stats.norm.isf(alpha), stats.norm.isf(1 - power)\n", + " return ((crit_alpha * std_null + crit_pow * std_alt) / np.abs(diff))**2\n", + "\n", + "def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05):\n", + " crit_alpha = stats.norm.isf(alpha)\n", + " crit_pow = (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt\n", + " return stats.norm.cdf(crit_pow)" + ] + }, + { + "cell_type": "code", + "execution_count": 140, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "856.38473506679793" + ] + }, + "execution_count": 140, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes\n", + "n_frac1 = 0.5\n", + "n_frac2 = 1 - frac1\n", + "\n", + "# if defined by ratio: n2 = ratio * n1\n", + "ratio = 1\n", + "n_frac1 = 1 / ( 1. + ratio)\n", + "n_frac2 = 1 - frac1\n", + "\n", + "\n", + "# If we use fraction of nobs, then sample_size return nobs is total number of observations\n", + "diff = 0.2\n", + "std_null = std_alt = 1 * np.sqrt(1 / 0.5 + 1 / 0.5)\n", + "nobs = sample_size_normal_greater(diff, std_null, std_alt, alpha=0.05, power=0.9)\n", + "nobs" + ] + }, + { + "cell_type": "code", + "execution_count": 134, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.89999999999999991" + ] + }, + "execution_count": 134, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "#nobs = 858\n", + "power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 135, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(1.6448536269514729, 1.2815515655446004)" + ] + }, + "execution_count": 135, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "alpha=0.05; power=0.9\n", + "stats.norm.isf(alpha), stats.norm.isf(1 - power)\n" + ] + }, + { + "cell_type": "code", + "execution_count": 136, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "1.2815515655446004" + ] + }, + "execution_count": 136, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "crit_alpha = stats.norm.isf(alpha)\n", + "(np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt" + ] + }, + { + "cell_type": "code", + "execution_count": 137, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.89999999999999991" + ] + }, + "execution_count": 137, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "stats.norm.cdf(_)" + ] + }, + { + "cell_type": "code", + "execution_count": 138, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(array([ 30., 49.]), array([ 51., 70.]))" + ] + }, + "execution_count": 138, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.4.3" + } + }, + "nbformat": 4, + "nbformat_minor": 0 +} diff --git a/notebooks/proportion_one_power.py b/notebooks/proportion_one_power.py new file mode 100644 index 0000000..d37b005 --- /dev/null +++ b/notebooks/proportion_one_power.py @@ -0,0 +1,762 @@ + +# coding: utf-8 + +# # One proportion: Hypothesis Tests, Sample Size and Power + +# This is a experimental notebook to try to organize various parts for hypothesis tests and related methods. +# +# This should roughly include the following +# +# - given a sample +# - estimate parameter or effect size +# - hypothesis test given Null and Alternative +# - confidence interval +# +# - prospective or evaluative +# - size of test and power +# - sample size required +# +# - sensitivity to misspecification +# - bias of estimate and of estimated variance +# - size and power of hypothesis tests +# + +# In[12]: + +from __future__ import division # for py2 compatibility, I'm using Python 3.4 +import numpy as np +from scipy import stats +import statsmodels.stats.proportion as smprop +import statsmodels.stats.power as smpow + +import pandas as pd # to store results with labels + + +# ## Sample + +# In[13]: + +p_true = 0.3 +nobs = 30 +p_null = p_true + +#y = np.random.binomial(nobs, p_true) +y = 7 + + +# Assume we have observed 7 events in a sample of size 30. What are our estimates, confidence interval, and test whether the true proportion = 0.3. + +# In[15]: + +count = y # alias +prop_mle = count / nobs +confint_methods = ['beta', 'wilson', 'normal', 'agresti_coull', 'jeffrey'] +confints = [smprop.proportion_confint(count, nobs, alpha=0.05, method=method) for method in confint_methods] +ci_df = pd.DataFrame(confints, index=confint_methods, columns=['lower', 'upper']) +print('estimate: ', prop_mle) +ci_df + + +# **Two sided hypothesis** + +# In[16]: + +smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided') + + +# In[19]: + +smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided') + + +# In[20]: + +smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided', prop_var=p_null) + + +# **Equivalence** + +# In[28]: + +low, upp = ci_df.loc['beta', :] +smprop.binom_tost(count, nobs, low, upp) + + +# In[36]: + +print('score', smprop.binom_tost(count, nobs, *ci_df.loc['wilson', :])) +print('wald ', smprop.binom_tost(count, nobs, *ci_df.loc['normal', :])) + + +# In[32]: + +smprop.proportions_ztost(count, nobs, *ci_df.loc['wilson', :]) + + +# In[33]: + +smprop.proportions_ztost(count, nobs, *ci_df.loc['beta', :]) + + +# **One-sided tests** +# +# +# The null nypothesis and alternative hypothesis for alternative `'larger'` are +# +# H0: p = p0 +# H1: p > p0 +# +# where p0 = 0.3 + +# In[50]: + +te = smprop.binom_test(count, nobs, prop=p_null, alternative='larger') +tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='larger') +ts = smprop.proportions_ztest(count, nobs, value=p_null, alternative='larger', prop_var=p_null) +print('exact: ', te) +print('wald: ', tw[1]) +print('score: ', ts[1]) + + +# The null nypothesis and alternative hypothesis for alternative `'smaller'` are +# +# H0: p = p0 +# H1: p < p0 +# +# where p0 = 0.3 + +# In[51]: + +te = smprop.binom_test(count, nobs, prop=p_null, alternative='smaller') +tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='smaller') +ts = smprop.proportions_ztest(count, nobs, value=p_null, alternative='smaller', prop_var=p_null) +print('exact: ', te) +print('wald: ', tw[1]) +print('score: ', ts[1]) + + +# We can look at null hypothesis that are further away from the observed proportion to see which hypothesis are rejected. The observed proportion is 0.23, our new null hypothesis value is 0.6. + +# In[67]: + +p_null_ = 0.6 +te = smprop.binom_test(count, nobs, prop=p_null_, alternative='smaller') +tw = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='smaller') +ts = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='smaller', prop_var=p_null) +print('exact: ', te) +print('wald: ', tw[1]) +print('score: ', ts[1]) + + +# In[65]: + +p_null_ = 0.6 +te = smprop.binom_test(count, nobs, prop=p_null_, alternative='larger') +tw = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='larger') +ts = smprop.proportions_ztest(count, nobs, value=p_null_, alternative='larger', prop_var=p_null) +print('exact: ', te) +print('wald: ', tw[1]) +print('score: ', ts[1]) + + +# The `smaller` hypothesis is strongly rejected, which means that we reject the null hypothesis that the true proportion is 0.6 or larger in favor of the alternative hypothesis that the true proportion is smaller than 0.6. +# +# In the case `larger` alternative, the p-value is very large and we cannot reject the Null hypothesis that the true proportion is 0.6 (or smaller) in favor of the hypothesis that the true proportion is larger than 0.6. +# +# Non-inferiority and superiority tests are special cases of these one-sided tests where the specific case is defined in terms of deviations from a benchmark value. The null hypothesis for a non-inferiority test can be defined, for example, by being less than a specified amount 5% below a benchmark proportion. If we reject the test, then we conclude that the proportion is not worse than 5% below the benchmark, at the given confidence level of the test. + +# **Aside: Inequality Null hypothesis** +# +# For most methods the p-values for the hypothesis tests are the same for the case when the null hypothesis is and inequality +# +# +# The null nypothesis and alternative hypothesis for alternative `'larger'` specify that the true proportion is smaller than or equal to the hypothesized value versus the alternative that it is larger. +# +# H0': p <= p0 +# H1': p > p0 +# +# +# The null nypothesis and alternative hypothesis for alternative 'smaller' are +# +# H0': p >= p0 +# H1': p < p0 +# +# +# The score test is an exception to this. If the null hypothesis is a inequality, then the constrained maximum likelihood estimate will depend on whether the constraint of the null hypothesis is binding or not. If it is binding, then the score test is the same as for the test with an equality in the null hypothesis. If the constrained is not binding then the null parameter estimate is the same as the estimate used for the Wald test. +# Because the equality is the worst case in these hypothesis test, it does not affect the validity of the tests. However, in the asymptotic tests it would add another option to define the variance used in the calculations, and the standard score test does not take the inequality into account in calculating the variance. This is not implemented, so we restrict ourselves to equality null hypothesis, even though the interpretation is mostly the same as for the inequality null hypothesis. +# + +# In[ ]: + + + + +# **Standard t-test** +# +# We can also use the standard t-test in large samples if we encode the data with 0 for no event and 1 for the success event. The t-test estimates the variance from the data and does not take the relationship between mean and variance explicitly into account. However, by the law of large numbers the mean, i.e. the proportion in the current case, will be asymptotically distributed as normal which can be approximated by the t-distribution. + +# In[61]: + +import statsmodels.stats.weightstats as smsw +yy = np.repeat([0, 1], [nobs - count, count]) +ds = smsw.DescrStatsW(yy) +ds.ttest_mean(0.3) + + +# In[62]: + +vars(ds) + + +# In[63]: + +ds.ttest_mean(0.3, alternative='larger') + + +# In[64]: + +ds.ttest_mean(0.3, alternative='smaller') + + +# In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, at the latter confidence interval. + +# In[68]: + +ds.ttost_mean(*ci_df.loc['beta', :]) + + +# In[ ]: + + + + +# In[ ]: + + + + +# ## Sample Size and Power + +# First we illustrate the rejection region of a test which is the set of all observations at which we reject the null hypothesis. +# Size of a test is the probability to sample an observation in the rejection region under the null hypothesis, power is the probability under the alternative hypothesis. +# +# The rejection region is a property of the hypothesis test, the following calculates it for the two-sided binomial and the two-sided ztest for a single proportion. This depends on the distribution that we use in the hypothesis test, exact distribution which is the binomial in this case or a normal or t distribution as large sample approximation. +# Ones we have the rejection region, we can also use different distributions for evaluating the power either based on the exact distribution or an a large sample approximation or asymptotic distribution. +# +# The sample size that is required to achieve at least a desired power under a given alternative can be explicitly calculated in the special case one-sided tests where both the hypothesis test distribution and the distribution for the power calculations are the normal distribution. In almost all other cases we have to use an iterative solver to find the required sample size. +# +# Power and sample size calculation are currently only implemented for one approximation and for equivalence tests. In the following we illustrate several methods for calculating the power which will be useful for different cases depending on whether simplification or computational shortcuts exist or not. +# + +# **Rejection region** + +# In[103]: + +rej = np.array([smprop.proportions_ztest(count_, nobs, value=p_null, alternative='two-sided', prop_var=p_null)[1] + for count_ in range(nobs + 1)]) +rej_indicator = (rej < 0.05) #.astype(int) +np.column_stack((rej, rej_indicator)) +rej_indicator_score = rej_indicator # keep for later use + + +# In[104]: + +rej = np.array([smprop.binom_test(count_, nobs, prop=p_null, alternative='two-sided') for count_ in range(nobs + 1)]) +rej_indicator = (rej < 0.05) #.astype(int) +np.column_stack((range(nobs + 1), rej, rej_indicator)) + + +# ### Power calculation - a general method + +# In a general method we can use the rejection region of a hypothesis test directly to calculate the probability. +# +# We can use the set of values for which the null hypothesis is rejected instead of using a boolean indicator. + +# In[105]: + +x = np.arange(nobs + 1) +x_rej = x[rej_indicator] +x_rej_score = x[rej_indicator_score] + + +# In[113]: + +print('binom', x_rej) +print('score', x_rej_score) + + +# The rejection region of the score test is larger than the one of the exact binomial test. The score test rejects also if 14 events are observed. + +# For the current case we use the exact binomial distribution to calculate the power. The null hypothesis in this example is a two-sided test for p = 0.3. Use p1 for the proportion at which the power or rejection probability is calculated. First we check the size of the test, i.e. p1 = p_null = 0.3 + +# In[114]: + +p1 = 0.3 +stats.binom.pmf(x_rej, nobs, p1).sum() + + +# Because we are using the exact test, the probability of rejection under the null is smaller than the required alpha = 0.05. In this example the exact probability is close to the 0.05 threshold. In contrast to this, the score test is liberal in this example and rejects with probability 0.07 instead of the required 0.05. + +# In[112]: + +stats.binom.pmf(x_rej_score, nobs, p1).sum() + + +# This method with explicit enumeration of the rejection values can be used for any distribution but will require more computational time than explicit calculations that take advantage of the specific structure. In the case of one sample or one parameter hypothesis test, the rejection region consist of two tail intervals. If we have the boundary of the rejection region available, then we can directly use the cumulative distribution or the survival function to calculate the tail probabilities. +# +# In the case of the binomial distribution with probability p_null under the null hypothesis has tail probabilities at most alpha / 2 in each tail (for equal tailed hypothesis tests). + +# In[129]: + +lowi, uppi = stats.binom.interval(0.95, nobs, p_null) +lowi, uppi + + +# **Detour: open or close interval** +# +# The cdf is defined by a weak inequality cdf(t) = Prob(x <= t), the survival function sf is defined by a strict inequality sf(t) = Prob(x > t) so that cdf(t) + sf(t) = 1. Whether the inequalities are strict or weak does not make a difference in continuous distributions that don't have mass points. However, it does make a difference for discrete distribution. If we want a tail probability of alpha, then the cdf has this tail probability including the boundary point, while the sf excludes the boundary point. So to have a upper tail probability alpha for a t such that Prob(x >= t) < alpha but close to it, we need to use sf(t - 1). similarly, we have to subtract one if we want an open interval for the cdf at the lower tail. +# +# Specifically, define the lower and upper thresholds that are in the rejection region +# +# low = max{x: prob(x <= t) <= alpha / 2 +# upp = min{x: prob(x >= t) <= alpha / 2 +# +# Because of the discreteness of the sample space having tail probabilities equal to alpha / 2 is in general not possible. +# + +# In[130]: + +low, upp = lowi, uppi + + +# In[132]: + +stats.binom.ppf(0.025, nobs, p_null), stats.binom.isf(0.025, nobs, p_null) + + +# If we reject at 4 and smaller and reject at 14 and larger, then the probability of rejection is larger than 0.025 in each tail: + +# In[133]: + +stats.binom.cdf(low, nobs, p_null), stats.binom.sf(upp - 1, nobs, p_null) + + +# If we shrink the rejection region in each tail by one, so we reject at 3 and smaller and reject at 15 and larger, then the probability of rejection is smaller than 0.025 in each tail. The total rejection probability is at 0.026 smaller than 0.05 and shows the typical case that exact tests are conservative, i.e. reject less often than alpha, often considerably less: + +# In[137]: + +prob_low = stats.binom.cdf(low - 1, nobs, p_null) +prob_upp = stats.binom.sf(upp, nobs, p_null) +prob_low, prob_upp, prob_low + prob_upp + + +# In this case we can increase the lower rejection threshold by one and still stay below the total rejection probability of 0.05, although in this case the rejection probability in the lower tail is larger than 0.025. In this example the same also works on the other side by expanding only the rejection region in the upper tail. + +# In[138]: + +prob_low = stats.binom.cdf(low, nobs, p_null) +prob_upp = stats.binom.sf(upp, nobs, p_null) +prob_low, prob_upp, prob_low + prob_upp + + +# In[139]: + +prob_low = stats.binom.cdf(low - 1, nobs, p_null) +prob_upp = stats.binom.sf(upp - 1, nobs, p_null) +prob_low, prob_upp, prob_low + prob_upp + + +# In[124]: + +stats.binom.cdf(upp, nobs, p_null) - stats.binom.cdf(low, nobs, p_null) + + +# TODO: why does binom_test reject at 4? +# binom_test is used from scipy.stats for the two-sided alternative. + +# In[142]: + +smprop.binom_test(3, nobs, prop=p_null, alternative='smaller'), smprop.binom_test(4, nobs, prop=p_null, alternative='smaller') + + +# In[144]: + +smprop.binom_test(4, nobs, prop=p_null, alternative='two-sided') +# we get the same answer as in R +# in R binom.test(4,30, 0.3, alternative="two.sided") --> 0.04709225 + + +# The binomial test is not a centered test. It looks like it adds the probability from the further away tail for all x that have lower pmf than the observed value. +# check with Fay for the three different ways of defining two-tailed tests and confints. +# +# The pvalue for the centered test is based on doubling the probability of the smaller tail. Given that it does not exist, we can implement it quickly, and check against R's exactci package, which matches our results. + +# In[151]: + +def binom_test_centered(count, nobs, prop=0.5): + """two-sided centered binomial test""" + prob_low = stats.binom.cdf(count, nobs, p_null) + prob_upp = stats.binom.sf(count - 1, nobs, p_null) + return 2 * min(prob_low, prob_upp) + + +# In[152]: + +binom_test_centered(3, nobs, prop=p_null), binom_test_centered(4, nobs, prop=p_null) + + +# In[153]: + +binom_test_centered(13, nobs, prop=p_null), binom_test_centered(14, nobs, prop=p_null) + +results from R library exactci, with centered binomial test + +> be = binom.exact(3, 30, p = 0.3) +> be$p.value +[1] 0.01863313 +> be = binom.exact(4, 30, p = 0.3) +> be$p.value +[1] 0.06030989 +> be = binom.exact(13, 30, p = 0.3) +> be$p.value +[1] 0.1689401 +> be = binom.exact(14, 30, p = 0.3) +> be$p.value +[1] 0.0801051 +# ## Back to power + +# After this more extended detour we go back to our power calculations. So assuming we know the critical values of our rejection region, we can calculate the power using the cdf and sf function of the binomial distribution. + +# In[155]: + +def power_binom_reject(low, upp, prop, nobs): + """ calculate the power of a test given the rejection intervals + + This assumes that the rejection region is the union of the lower + tail up to and including low, and the upper tail starting at and + including upp. + + The Binomial distribution is used to calculate the power. + + Parameters + ---------- + low + upp + prop : float in interval (0, 1) + proportion parameter for the binomial distribution + nobs : int + number of trials for binomial distribution + + Returns + ------- + power : float + Probability of rejection if the true proportion is `prop`. + + Notes + ----- + Works in vectorized form with appropriate arguments, i.e. + nonscalar arguments are numpy arrays that broadcast correctly. + + """ + prob_low = prob_upp = 0 # initialize + if low is not None: + prob_low = stats.binom.cdf(low, nobs, prop) + if upp is not None: + prob_upp = stats.binom.sf(upp - 1, nobs, prop) + return prob_low + prob_upp + + +# In[162]: + +for test, l, u in [('binom ', 4, 15), ('binom_central', 3, 15), ('score ', 4, 14)]: + print(test, l, u, power_binom_reject(l, u, p_null, nobs)) + + +# In[ ]: + + + + +# In[ ]: + + + + +# ## trying more +# +# The rest below is just some unsorted experiments to try a few more things. + +# In[ ]: + + + + +# TODO: The following is not correct because when we change the sample size, then the rejection region also changes. + +# In[164]: + +[power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)] + + +# We can also calculate this in vectorized form for the set of sample sizes and all three tests: + +# In[166]: + +power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None]) + + +# In[ ]: + + + + +# In[ ]: + + + + +# ## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count. + +# In[81]: + +smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null) + + +# In[77]: + +smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null) + + +# In[78]: + +smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null) + + +# In[79]: + +smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null) + + +# In[80]: + +smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null) + + +# In[ ]: + + + + +# In[4]: + +get_ipython().magic('pinfo smprop.proportion_confint') + + +# In[ ]: + +smprop.proportion_confint() + + +# In[11]: + +from statsmodels.stats.proportion import proportion_effectsize +es = proportion_effectsize(0.4, 0.5) +smpow.NormalIndPower().solve_power(es, nobs1=60, alpha=0.05, ratio=0) +# R pwr 0.3447014091272153 + + +# In[14]: + +smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9) + + +# In[ ]: + + + + +# In[ ]: + + + + +# In[ ]: + + + + +# In[25]: + +low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82 +smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', + variance_prop=None, discrete=True, continuity=0, + critval_continuity=0) + + + +# In[39]: + +low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8 +smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', + variance_prop=None, discrete=False, continuity=0, + critval_continuity=0) + + +# In[41]: + +low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8 +smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', + variance_prop=None, discrete=False, continuity=1, + critval_continuity=0) + + +# In[49]: + +low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8 +smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', + variance_prop=None, discrete=False, continuity=0, + critval_continuity=0) + + +# In[55]: + +low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8 +smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', + variance_prop=None, discrete=False, continuity=1, + critval_continuity=0) + + +# In[ ]: + + + + +# In[ ]: + + + + +# In[71]: + +low, upp, nobs = 0.4, 0.6, 100 +smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) + + +# In[59]: + +value, nobs = 0.4, 50 +smprop.binom_test_reject_interval(value, nobs, alpha=0.05) + + +# In[70]: + +smprop.proportion_confint(50, 100, method='beta') + + +# In[72]: + +low, upp, nobs = 0.7, 0.9, 100 +smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) + + +# In[76]: + +low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 +smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', + variance_prop=None, discrete=False, continuity=0, + critval_continuity=0) + + +# In[78]: + +low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 +smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) + + +# In[79]: + +low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8 +smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) + + +# In[ ]: + + + + +# In[ ]: + + + + +# In[ ]: + + + + +# In[132]: + +# from Lachine 1981 equ (3) and (4) + +from scipy import stats +def sample_size_normal_greater(diff, std_null, std_alt, alpha=0.05, power=0.9): + crit_alpha, crit_pow = stats.norm.isf(alpha), stats.norm.isf(1 - power) + return ((crit_alpha * std_null + crit_pow * std_alt) / np.abs(diff))**2 + +def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): + crit_alpha = stats.norm.isf(alpha) + crit_pow = (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt + return stats.norm.cdf(crit_pow) + + +# In[140]: + +# Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes +n_frac1 = 0.5 +n_frac2 = 1 - frac1 + +# if defined by ratio: n2 = ratio * n1 +ratio = 1 +n_frac1 = 1 / ( 1. + ratio) +n_frac2 = 1 - frac1 + + +# If we use fraction of nobs, then sample_size return nobs is total number of observations +diff = 0.2 +std_null = std_alt = 1 * np.sqrt(1 / 0.5 + 1 / 0.5) +nobs = sample_size_normal_greater(diff, std_null, std_alt, alpha=0.05, power=0.9) +nobs + + +# In[134]: + +#nobs = 858 +power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05) + + +# In[135]: + +alpha=0.05; power=0.9 +stats.norm.isf(alpha), stats.norm.isf(1 - power) + + +# In[136]: + +crit_alpha = stats.norm.isf(alpha) +(np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt + + +# In[137]: + +stats.norm.cdf(_) + + +# In[138]: + +smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05) + + +# In[ ]: + + + From 74e75dd256c50b265ab0d073c2148b7ce00cbff8 Mon Sep 17 00:00:00 2001 From: Josef Date: Mon, 21 Dec 2015 12:14:21 -0500 Subject: [PATCH 2/4] ENH: one proportion notebook, more power --- notebooks/proportion_one_power.ipynb | 1808 +++++++++++++++++++------- notebooks/proportion_one_power.py | 489 +++++-- 2 files changed, 1762 insertions(+), 535 deletions(-) diff --git a/notebooks/proportion_one_power.ipynb b/notebooks/proportion_one_power.ipynb index 6a20a17..f1e4f08 100644 --- a/notebooks/proportion_one_power.ipynb +++ b/notebooks/proportion_one_power.ipynb @@ -31,7 +31,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 1, "metadata": { "collapsed": false }, @@ -46,6 +46,17 @@ "import pandas as pd # to store results with labels" ] }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "%matplotlib inline" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -55,7 +66,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 3, "metadata": { "collapsed": false }, @@ -78,7 +89,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 4, "metadata": { "collapsed": false }, @@ -141,7 +152,7 @@ "jeffrey 0.110921 0.404400" ] }, - "execution_count": 15, + "execution_count": 4, "metadata": {}, "output_type": "execute_result" } @@ -165,7 +176,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 5, "metadata": { "collapsed": false }, @@ -176,7 +187,7 @@ "0.55100632188415744" ] }, - "execution_count": 16, + "execution_count": 5, "metadata": {}, "output_type": "execute_result" } @@ -187,7 +198,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 6, "metadata": { "collapsed": false }, @@ -198,7 +209,7 @@ "(-0.86333169460343107, 0.38795512282614564)" ] }, - "execution_count": 19, + "execution_count": 6, "metadata": {}, "output_type": "execute_result" } @@ -209,7 +220,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 7, "metadata": { "collapsed": false }, @@ -220,7 +231,7 @@ "(-0.79681907288959564, 0.42555611641912894)" ] }, - "execution_count": 20, + "execution_count": 7, "metadata": {}, "output_type": "execute_result" } @@ -238,7 +249,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 8, "metadata": { "collapsed": false }, @@ -249,7 +260,7 @@ "(0.025000000000000019, 0.025000000000000008, 0.025000000000000019)" ] }, - "execution_count": 28, + "execution_count": 8, "metadata": {}, "output_type": "execute_result" } @@ -261,7 +272,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 9, "metadata": { "collapsed": false }, @@ -282,7 +293,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 10, "metadata": { "collapsed": false }, @@ -295,7 +306,7 @@ " (-2.2785472457940248, 0.011346996472929981))" ] }, - "execution_count": 32, + "execution_count": 10, "metadata": {}, "output_type": "execute_result" } @@ -306,7 +317,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 11, "metadata": { "collapsed": false }, @@ -319,7 +330,7 @@ " (-2.4540616477331247, 0.0070626381724014512))" ] }, - "execution_count": 33, + "execution_count": 11, "metadata": {}, "output_type": "execute_result" } @@ -345,7 +356,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 12, "metadata": { "collapsed": false }, @@ -383,7 +394,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -416,7 +427,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -443,7 +454,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -474,9 +485,9 @@ "source": [ "The `smaller` hypothesis is strongly rejected, which means that we reject the null hypothesis that the true proportion is 0.6 or larger in favor of the alternative hypothesis that the true proportion is smaller than 0.6.\n", "\n", - "In the case `larger` alternative, the p-value is very large and we cannot reject the Null hypothesis that the true proportion is 0.6 (or smaller) in favor of the hypothesis that the true proportion is larger than 0.6. \n", + "In the case of the `larger` alternative, the p-value is very large and we cannot reject the null hypothesis that the true proportion is 0.6 (or smaller) in favor of the hypothesis that the true proportion is larger than 0.6. \n", "\n", - "Non-inferiority and superiority tests are special cases of these one-sided tests where the specific case is defined in terms of deviations from a benchmark value. The null hypothesis for a non-inferiority test can be defined, for example, by being less than a specified amount 5% below a benchmark proportion. If we reject the test, then we conclude that the proportion is not worse than 5% below the benchmark, at the given confidence level of the test." + "Non-inferiority and superiority tests are special cases of these one-sided tests. Often, the specific case is defined in terms of deviations from a benchmark value. The null hypothesis for a non-inferiority test can be defined, for example, by being less than a specified amount, say 5%, below a benchmark proportion. If we reject the test, then we conclude that the proportion is not worse than 5% below the benchmark, at the given confidence level of the test." ] }, { @@ -485,8 +496,7 @@ "source": [ "**Aside: Inequality Null hypothesis**\n", "\n", - "For most methods the p-values for the hypothesis tests are the same for the case when the null hypothesis is and inequality \n", - "\n", + "In the above definition of the null hypothesis we used an equality. For most methods the p-values for the hypothesis tests are the same for the case when the null hypothesis is an inequality \n", "\n", "The null nypothesis and alternative hypothesis for alternative `'larger'` specify that the true proportion is smaller than or equal to the hypothesized value versus the alternative that it is larger.\n", "\n", @@ -501,7 +511,9 @@ "\n", "\n", "The score test is an exception to this. If the null hypothesis is a inequality, then the constrained maximum likelihood estimate will depend on whether the constraint of the null hypothesis is binding or not. If it is binding, then the score test is the same as for the test with an equality in the null hypothesis. If the constrained is not binding then the null parameter estimate is the same as the estimate used for the Wald test.\n", - "Because the equality is the worst case in these hypothesis test, it does not affect the validity of the tests. However, in the asymptotic tests it would add another option to define the variance used in the calculations, and the standard score test does not take the inequality into account in calculating the variance. This is not implemented, so we restrict ourselves to equality null hypothesis, even though the interpretation is mostly the same as for the inequality null hypothesis.\n" + "Because the equality is the worst case in these hypothesis test, it does not affect the validity of the tests. However, in the asymptotic tests it would add another option to define the variance used in the calculations, and the standard score test does not take the inequality into account in calculating the variance. This is not implemented, so we restrict ourselves to equality null hypothesis, even though the interpretation is mostly the same as for the inequality null hypothesis.\n", + "\n", + "Reference for a score analysis with inequality null hypothesis for the case of comparing two proportions, see ...\n" ] }, { @@ -524,7 +536,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -535,7 +547,7 @@ "(-0.84882088476305617, 0.40293351466675675, 29.0)" ] }, - "execution_count": 61, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -549,7 +561,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -578,7 +590,7 @@ " 1., 1., 1., 1.])}" ] }, - "execution_count": 62, + "execution_count": 17, "metadata": {}, "output_type": "execute_result" } @@ -589,7 +601,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -600,7 +612,7 @@ "(-0.84882088476305617, 0.79853324266662162, 29.0)" ] }, - "execution_count": 63, + "execution_count": 18, "metadata": {}, "output_type": "execute_result" } @@ -611,7 +623,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -622,7 +634,7 @@ "(-0.84882088476305617, 0.20146675733337838, 29.0)" ] }, - "execution_count": 64, + "execution_count": 19, "metadata": {}, "output_type": "execute_result" } @@ -635,12 +647,12 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, at the latter confidence interval. " + "In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, which is 0.025 when we use the latter's confidence interval for the equivalence margins. " ] }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -653,7 +665,7 @@ " (-2.4128139764969032, 0.011189332556866095, 29.0))" ] }, - "execution_count": 68, + "execution_count": 20, "metadata": {}, "output_type": "execute_result" } @@ -662,14 +674,59 @@ "ds.ttost_mean(*ci_df.loc['beta', :])" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We used a full sample with individual observations in the above. However, `DescrStatsW` allows us to use weights and we can specify the sample by the frequency of each level of the observation. The results are the same as before." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 21, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], - "source": [] + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.84882088476305606, 0.20146675733337843, 29.0)" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds2 = smsw.DescrStatsW([0, 1], weights=[nobs - count, count])\n", + "ds2.ttest_mean(0.3, alternative='smaller')" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.049342036125071362,\n", + " (1.7060722803086068, 0.049342036125071362, 29.0),\n", + " (-2.4128139764969028, 0.011189332556866103, 29.0))" + ] + }, + "execution_count": 22, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds2.ttost_mean(*ci_df.loc['beta', :])" + ] }, { "cell_type": "code", @@ -711,7 +768,7 @@ }, { "cell_type": "code", - "execution_count": 103, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -726,7 +783,7 @@ }, { "cell_type": "code", - "execution_count": 104, + "execution_count": 24, "metadata": { "collapsed": false }, @@ -767,7 +824,7 @@ " [ 3.00000000e+01, 2.05891132e-16, 1.00000000e+00]])" ] }, - "execution_count": 104, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } @@ -796,7 +853,7 @@ }, { "cell_type": "code", - "execution_count": 105, + "execution_count": 25, "metadata": { "collapsed": true }, @@ -809,7 +866,7 @@ }, { "cell_type": "code", - "execution_count": 113, + "execution_count": 26, "metadata": { "collapsed": false }, @@ -844,7 +901,7 @@ }, { "cell_type": "code", - "execution_count": 114, + "execution_count": 27, "metadata": { "collapsed": false }, @@ -855,7 +912,7 @@ "0.047092254594638484" ] }, - "execution_count": 114, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } @@ -874,7 +931,7 @@ }, { "cell_type": "code", - "execution_count": 112, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -885,7 +942,7 @@ "0.07020749078421995" ] }, - "execution_count": 112, + "execution_count": 28, "metadata": {}, "output_type": "execute_result" } @@ -905,7 +962,7 @@ }, { "cell_type": "code", - "execution_count": 129, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -916,7 +973,7 @@ "(4.0, 14.0)" ] }, - "execution_count": 129, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } @@ -944,7 +1001,7 @@ }, { "cell_type": "code", - "execution_count": 130, + "execution_count": 30, "metadata": { "collapsed": true }, @@ -955,7 +1012,7 @@ }, { "cell_type": "code", - "execution_count": 132, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -966,7 +1023,7 @@ "(4.0, 14.0)" ] }, - "execution_count": 132, + "execution_count": 31, "metadata": {}, "output_type": "execute_result" } @@ -984,7 +1041,7 @@ }, { "cell_type": "code", - "execution_count": 133, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -995,7 +1052,7 @@ "(0.030154943102089313, 0.040052547682131269)" ] }, - "execution_count": 133, + "execution_count": 32, "metadata": {}, "output_type": "execute_result" } @@ -1013,7 +1070,7 @@ }, { "cell_type": "code", - "execution_count": 137, + "execution_count": 33, "metadata": { "collapsed": false }, @@ -1024,7 +1081,7 @@ "(0.0093165673111771617, 0.016937311492549543, 0.026253878803726705)" ] }, - "execution_count": 137, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } @@ -1044,7 +1101,7 @@ }, { "cell_type": "code", - "execution_count": 138, + "execution_count": 34, "metadata": { "collapsed": false }, @@ -1055,7 +1112,7 @@ "(0.030154943102089313, 0.016937311492549543, 0.047092254594638852)" ] }, - "execution_count": 138, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } @@ -1068,7 +1125,7 @@ }, { "cell_type": "code", - "execution_count": 139, + "execution_count": 35, "metadata": { "collapsed": false }, @@ -1079,7 +1136,7 @@ "(0.0093165673111771617, 0.040052547682131269, 0.049369114993308427)" ] }, - "execution_count": 139, + "execution_count": 35, "metadata": {}, "output_type": "execute_result" } @@ -1092,7 +1149,7 @@ }, { "cell_type": "code", - "execution_count": 124, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -1103,7 +1160,7 @@ "0.95290774540536105" ] }, - "execution_count": 124, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } @@ -1122,7 +1179,7 @@ }, { "cell_type": "code", - "execution_count": 142, + "execution_count": 37, "metadata": { "collapsed": false }, @@ -1133,7 +1190,7 @@ "(0.0093165673111771617, 0.030154943102089313)" ] }, - "execution_count": 142, + "execution_count": 37, "metadata": {}, "output_type": "execute_result" } @@ -1145,7 +1202,7 @@ }, { "cell_type": "code", - "execution_count": 144, + "execution_count": 38, "metadata": { "collapsed": false }, @@ -1156,7 +1213,7 @@ "0.047092254594638852" ] }, - "execution_count": 144, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -1179,7 +1236,7 @@ }, { "cell_type": "code", - "execution_count": 151, + "execution_count": 39, "metadata": { "collapsed": true }, @@ -1194,7 +1251,7 @@ }, { "cell_type": "code", - "execution_count": 152, + "execution_count": 40, "metadata": { "collapsed": false }, @@ -1205,7 +1262,7 @@ "(0.018633134622354323, 0.060309886204178625)" ] }, - "execution_count": 152, + "execution_count": 40, "metadata": {}, "output_type": "execute_result" } @@ -1216,7 +1273,7 @@ }, { "cell_type": "code", - "execution_count": 153, + "execution_count": 41, "metadata": { "collapsed": false }, @@ -1227,7 +1284,7 @@ "(0.16894012072030201, 0.080105095364262538)" ] }, - "execution_count": 153, + "execution_count": 41, "metadata": {}, "output_type": "execute_result" } @@ -1260,7 +1317,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "## Back to power" + "## Exact Power" ] }, { @@ -1272,7 +1329,7 @@ }, { "cell_type": "code", - "execution_count": 155, + "execution_count": 42, "metadata": { "collapsed": false }, @@ -1317,7 +1374,7 @@ }, { "cell_type": "code", - "execution_count": 162, + "execution_count": 43, "metadata": { "collapsed": false }, @@ -1348,20 +1405,85 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 44, "metadata": { "collapsed": true }, "outputs": [], - "source": [] + "source": [ + "def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), item=None, use_idx=False):\n", + " \"\"\"calculate power for proportion test by explicit numeration of sample space\n", + " \n", + " \n", + " argument `item` is currently to avoid having to figure out the return of test_func\n", + " None if return is pvalue, integer for index of pvalue if tuple is returned\n", + " \n", + " \"\"\"\n", + " sample_space = np.arange(nobs + 1)\n", + " try:\n", + " # TODO: how do we vectorize, if res were a instance with pvalue attribute, then it would be easier.\n", + " res = test_func(sample_space, nobs, *args)\n", + " #if len(res) > 1 and not res.shape == sample_space.shape:\n", + " # assume p-value is the second term\n", + " if item is not None:\n", + " res = res[item]\n", + " except Exception:\n", + " # assume test_func is not vectorized\n", + " if item is None:\n", + " res = [test_func(x, nobs, p_null, *args) for x in sample_space]\n", + " else:\n", + " res = [test_func(x, nobs, p_null, *args)[item] for x in sample_space]\n", + " \n", + " pvalues = np.asarray(res)\n", + " rej_indicator = (pvalues <= alpha)\n", + " if use_idx:\n", + " # This evaluates the pmf at all points, useful for non-interval rejection regions\n", + " x_rej = sample_space[rej_indicator]\n", + " power = stats.binom.pmf(x_rej, nobs, prop).sum()\n", + " return power, x_rej\n", + " else:\n", + " # use critical values, assumes standard two tails, two-sided only for now\n", + " c = np.nonzero(np.diff(rej_indicator))[0]\n", + " if len(c) == 2:\n", + " low = c[0]\n", + " upp = c[1] + 1\n", + " else:\n", + " raise NotImplementedError('currently only two sided hypothesis tests')\n", + " \n", + " power = power_binom_reject(low, upp, prop, nobs)\n", + " \n", + " return power, (low, upp)\n", + " " + ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## trying more\n", - "\n", - "The rest below is just some unsorted experiments to try a few more things." + "We can use this function to check the size of the two binomial tests. Both results are what we already had before and agree with the results of R packages." + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(0.047092254594638852, (4, 15))\n", + "(0.047092254594638484, array([ 0, 1, 2, 3, 4, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,\n", + " 27, 28, 29, 30]))\n" + ] + } + ], + "source": [ + "print(power_binom_proptest(smprop.binom_test, p_null, p_null, nobs))\n", + "print(power_binom_proptest(smprop.binom_test, p_null, p_null, nobs, use_idx=True))\n", + "# 0.04709225 R library MESS: power.binom.test(n = 30, p0 = 0.3, pa = 0.3)" ] }, { @@ -1373,130 +1495,92 @@ "outputs": [], "source": [] }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "TODO: The following is not correct because when we change the sample size, then the rejection region also changes." - ] - }, { "cell_type": "code", - "execution_count": 164, + "execution_count": 46, "metadata": { "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "[0.047092254594638852,\n", - " 0.047775312714239675,\n", - " 0.051602724514375933,\n", - " 0.05859877650596669,\n", - " 0.068789625299809865,\n", - " 0.082186755965548378,\n", - " 0.098772671226915978,\n", - " 0.118489659792336,\n", - " 0.1412321565494524,\n", - " 0.16684288140347131,\n", - " 0.19511265080608356,\n", - " 0.22578351411938377,\n", - " 0.25855468655666075,\n", - " 0.29309063531712998,\n", - " 0.32903062340607336,\n", - " 0.36599901949754499,\n", - " 0.4036157318615764,\n", - " 0.44150620799796952,\n", - " 0.47931054705529291,\n", - " 0.51669138801043579]" - ] - }, - "execution_count": 164, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "(0.026253878803726705, (3, 15))\n", + "(0.026253878803726503, array([ 0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,\n", + " 28, 29, 30]))\n" + ] } ], "source": [ - "[power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)]" + "print(power_binom_proptest(binom_test_centered, p_null, p_null, nobs))\n", + "print(power_binom_proptest(binom_test_centered, p_null, p_null, nobs, use_idx=True))\n", + "# 0.02625388 from exactci: powerBinom(n = 30, p0 = 0.3, p1 = 0.3, strict=TRUE)" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We can also calculate this in vectorized form for the set of sample sizes and all three tests:" + "We obtain the power of the test at a proportion that is different from the proportion of the null hypothesis. Using the minlike binomial test the power if the true proportion is 0.5 is 0.57, the power for the central binomial test differs only in the 5th decimal from this." ] }, { "cell_type": "code", - "execution_count": 166, + "execution_count": 47, "metadata": { "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "array([[ 0.04709225, 0.02625388, 0.07020749],\n", - " [ 0.04777531, 0.03102743, 0.07728123],\n", - " [ 0.05160272, 0.03820442, 0.0883212 ],\n", - " [ 0.05859878, 0.04792633, 0.10324071],\n", - " [ 0.06878963, 0.06032282, 0.12191353],\n", - " [ 0.08218676, 0.07549525, 0.14416465],\n", - " [ 0.09877267, 0.09350311, 0.16976553],\n", - " [ 0.11848966, 0.11435385, 0.19843379],\n", - " [ 0.14123216, 0.1379965 , 0.2298369 ],\n", - " [ 0.16684288, 0.16431907, 0.26359926],\n", - " [ 0.19511265, 0.19314968, 0.29931183],\n", - " [ 0.22578351, 0.22426089, 0.33654338],\n", - " [ 0.25855469, 0.25737665, 0.37485255],\n", - " [ 0.29309064, 0.29218144, 0.41379979],\n", - " [ 0.32903062, 0.32833054, 0.45295869],\n", - " [ 0.36599902, 0.36546115, 0.49192593],\n", - " [ 0.40361573, 0.40320337, 0.53032969],\n", - " [ 0.44150621, 0.4411907 , 0.56783618],\n", - " [ 0.47931055, 0.47906961, 0.60415428],\n", - " [ 0.51669139, 0.51650774, 0.63903825]])" - ] - }, - "execution_count": 166, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "(0.57226196210831415, (4, 15))\n", + "(0.57226196210830982, array([ 0, 1, 2, 3, 4, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,\n", + " 27, 28, 29, 30]))\n" + ] } ], "source": [ - "power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None])" + "print(power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs))\n", + "print(power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs, use_idx=True))\n", + "# 0.572262 R library MESS: power.binom.test(n = 30, p0 = 0.3, pa = 0.5)" ] }, { "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, + "execution_count": 48, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], - "source": [] + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(0.57223643921315681, (3, 15))\n", + "(0.57223643921315248, array([ 0, 1, 2, 3, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27,\n", + " 28, 29, 30]))\n" + ] + } + ], + "source": [ + "print(power_binom_proptest(binom_test_centered, p_null, 0.5, nobs))\n", + "print(power_binom_proptest(binom_test_centered, p_null, 0.5, nobs, use_idx=True))\n", + "# 0.5722364 from exactci: powerBinom(n = 30, p0 = 0.3, p1 = 0.5, strict=TRUE)" + ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count." + "surprisingly this also works in vectorized for to calculate the power for a set of alternatives." ] }, { "cell_type": "code", - "execution_count": 81, + "execution_count": 49, "metadata": { "collapsed": false }, @@ -1504,21 +1588,46 @@ { "data": { "text/plain": [ - "(-0.39840953644479782, 0.69032832946419354)" + "array([[ 0.1 , 0.82450516, 0.64743921],\n", + " [ 0.15 , 0.52447582, 0.32166697],\n", + " [ 0.2 , 0.25546448, 0.12294203],\n", + " [ 0.25 , 0.10061913, 0.04019886],\n", + " [ 0.3 , 0.04709225, 0.02625388],\n", + " [ 0.35 , 0.07270746, 0.067086 ],\n", + " [ 0.4 , 0.17687913, 0.17568238],\n", + " [ 0.45 , 0.35539727, 0.35519769],\n", + " [ 0.5 , 0.57226196, 0.57223644],\n", + " [ 0.55 , 0.76909418, 0.76909177],\n", + " [ 0.6 , 0.90294333, 0.90294317],\n", + " [ 0.65 , 0.96991631, 0.9699163 ],\n", + " [ 0.7 , 0.99362965, 0.99362965],\n", + " [ 0.75 , 0.99918101, 0.99918101],\n", + " [ 0.8 , 0.99994761, 0.99994761]])" ] }, - "execution_count": 81, + "execution_count": 49, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null)" + "p1 = np.linspace(0.1, 0.8, 15)\n", + "pbminlike = power_binom_proptest(smprop.binom_test, p_null, p1, nobs)\n", + "pbcentral = power_binom_proptest(binom_test_centered, p_null, p1, nobs)\n", + "pow_bt = np.column_stack((p1, pbminlike[0], pbcentral[0]))\n", + "pow_bt" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "to check this let's use a list comprehension and explicitly loop over all alternative proportions" ] }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 50, "metadata": { "collapsed": false }, @@ -1526,21 +1635,42 @@ { "data": { "text/plain": [ - "(-0.56343616981901101, 0.57313791338407638)" + "[(0.82450515656089784, (4, 15)),\n", + " (0.52447582459902431, (4, 15)),\n", + " (0.25546448035198721, (4, 15)),\n", + " (0.10061913372879633, (4, 15)),\n", + " (0.047092254594638908, (4, 15)),\n", + " (0.072707457651031357, (4, 15)),\n", + " (0.17687912757321209, (4, 15)),\n", + " (0.3553972715831355, (4, 15)),\n", + " (0.57226196210831415, (4, 15)),\n", + " (0.76909417955357151, (4, 15)),\n", + " (0.90294333311242181, (4, 15)),\n", + " (0.96991630631719361, (4, 15)),\n", + " (0.99362965380106261, (4, 15)),\n", + " (0.99918101085734667, (4, 15)),\n", + " (0.99994761271288934, (4, 15))]" ] }, - "execution_count": 77, + "execution_count": 50, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null)" + "[power_binom_proptest(smprop.binom_test, p_null, p1_, nobs) for p1_ in p1]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "And finally a plot." ] }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 51, "metadata": { "collapsed": false }, @@ -1548,21 +1678,756 @@ { "data": { "text/plain": [ - "(-0.79681907288959564, 0.42555611641912894)" + "" ] }, - "execution_count": 78, + "execution_count": 51, "metadata": {}, "output_type": "execute_result" - } - ], - "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null)" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAegAAAFwCAYAAABzZegiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4VFXixvHvSSUBAgEpAoIRRAQFRZqydKU3KQIqIAqi\ngL3iWlgLltWfDUEsIKICikhRqkIEEQxLU6RLB0Gk94TM+f0xwc0iZZK5kzvl/TxPHpNwc+a9huTl\nnnOLsdYiIiIiwSXK7QAiIiLydypoERGRIKSCFhERCUIqaBERkSCkghYREQlCKmgREZEgdN6CNsZ8\naIzZZYz5+RzbvGWMWWeMWWaMucrZiCIiIpHHlyPokUCzs/2hMaYFUN5aeynQF3jXoWwiIiIR67wF\nba39Adh3jk3aAR9nbfsTUMgYU8KZeCIiIpHJiTXo0sDWbB9vz/qciIiI5JJOEhMREQlCMQ6MsR24\nKNvHZbI+9zfGGN34W0REIo611uT0a3wtaJP1diaTgf7AOGNMHWC/tXbX2QYK54dzDBo0iEGDBrkd\nI2C0f6ErnPcNwnf/jh7P4P0ZP/LB0H9Tql5d9h3bx8H0fRzO3MdRzz6Om31kRO/jZOw+iDuESU8i\nOiOZ2Mxk8tlkEqOSKRiTTFJcMsn5kimaP5niBZMpUSiZUsnJlC1WhHLFkyl9QRJxsdGu7We4fv9O\nMSbH3Qz4UNDGmM+AhkBRY8wW4BkgDrDW2vestVONMS2NMeuBI0CvXCUREREWrdnG29Om8e3mafye\nbzaJxyuQcNiSeHw/RRKKcOkF5YOyZMV55y1oa+3NPmwzwJk4IiKR5fCxdIZP+4Gxi6fxy7HppMf9\nTtmMprS99EbuafkuVS4uHvZHmHJmTqxBS5aGDRu6HSGgtH+hK5z3DUJv/xas3MKQGdOYvWUaOxPm\nkP9YJWoUasGQeu/To0nNvx0Jh9r+5VS4719umbxcEzbG2HBegxYROZODR07w7rR5fL5kGr8cn0ZG\n3G7KZTSjxaUtuKdlUy4vW8ztiBJAxphcnSSmghYRCYAfVmzinRnTmLNtGrsSvqfAscrULNyCnte1\n4JbG1xATratcI4UKWkTERfsPH2fY1Ll8sXQav6ZPIyNmHymZzWl5aXPubdWUS8sUdTuiuEQFLSKS\nx2Yv+41hs6Yxd8d0/kiYS8GjV1KrSAtuq9uCrg2v1lGyACpoEZE8MWvxOh4ZP4SV6dPIjD7EJZ7m\ntLysOfe2uoHypYq4HU+CUG4LWmdxi4j46OlPvub5X27nH/nuYlSzcXSuV01HyRIwKmgRkfM4memh\n+QuDmXPoXd5tPIk7W1zrdiSJACpoEZFz2LHnEDVfuI2DdgeL+qVR/dJSbkeSCKG5GRGRs/hu6Xou\neeFaCkYXYfvzqSpnyVMqaBGRM3h+7HRuGFuXDmUGsPLl90jKH+92JIkwmuIWEcnG47G0evEVZh54\nk7fqjWdAm3puR5IIpYIWEcnyx74j1HjudvbYDSy8K42al5VxO5JEME1xi4gAqcs3UO7Z64iLSmD7\nc/NUzuI6FbSIRLxXxn9Lk8+uo1Wp3qx9ZSSFC+RzO5KIprhFJHJ5PJZ2L/8fU/e9ymvXjeX+9g3d\njiTyFxW0iESkPw8cpcazffjDs4q5vRdSt0o5tyOJ/A9NcYtIxJn/62bKDvoHBsOWQT+onCUoqaBF\nJKK8MTGV+qPqcEOJW/nt36O5oFCi25FEzkhT3CISETweS+dX32binsG8WOsTHu10vduRRM5JBS0i\nYW//4ePUGHQX2zOXMqfXAupXTXE7ksh5qaBFJKwtWrONRu92oIhJYfPTP1I8Ob/bkUR8ojVoEQlb\nQ7/+gTof1qLeBR3Z9OpYlbOEFB1Bi0jY8Xgst7z+LuP+GMSz1UfxZNfmbkcSyTEVtIiElYNHTlBr\n0AA2ZS5gVvf5NLm6gtuRRHJFBS0iYWPJuh00GNqRJFOKDf9cQKmiBd2OJJJrWoMWkbDw3rQF1Hyv\nFnWKtGbzv79QOUvI0xG0iIS8nm9+wOgdT/BktRE8e2trt+OIOEIFLSIhrddbIxiz5RW+6TqPFjUv\nczuOiGOMtTbvXswYm5evJyLhbcXGXVR990o+b/0tnepVdTuOyBkZY7DWmhx/nQpaREJVykO3Uixf\nKdJeeMXtKCJnlduC1hS3iISkV8Z/y1bzAz89/KvbUUQCQmdxi0jI2X/4OE8uuJsnrhqiu4NJ2NIU\nt4iEnPrPPM2GQyvZ9n/j3Y4icl6a4haRiDA1bTU/nBhG2l3L3I4iElCa4haRkOHxWG7+7C46FH2a\nGhVLux1HJKBU0CISMu4cOooMc4TP7u/ndhSRgNMUt4iEhDVb/2Tk1sf5uM1U4mKj3Y4jEnA6SUxE\nQsKlD/eiYGxhlrz4uttRRHJEJ4mJSNh6Y2IqG813bNE1zxJBtAYtIkHt4JETPDbvLh654i09oUoi\niqa4RSSoNXn2OVbu+w+/vz7J7SgiuaIpbhEJO7MWr2PO0TeZ32eJ21FE8pymuEUkKHk8li6j76ZN\n4Se4tnJZt+OI5DkVtIgEpQHvfcZxs4dxD97rdhQRV2iKW0SCzm879jJ8w8N80GIS+eL0a0oik04S\nE5Ggc/mjdxIbFcfPLw1xO4qI33SSmIiEhWHfzGct37DxwZVuRxFxldagRSRoHD6WzgPf9eW+y96g\nbPFCbscRcZUKWkSCRqfX/o8kW5ZXe3VyO4qI6zTFLSJBIXX5BmYefpXUXouIisrxcp1I2NERtIi4\nzuOxdP6oP80KPkL9qiluxxEJCipoEXHdQyO+4JDZxpcPPeh2FJGgoSluEXHV5l37eWvtAwy9/gsS\n88W6HUckaOg6aBFx1ZWP9eekPcmqV4a7HUUkIELmOmiPB6I0sS4iwIczfmKlncD6B3TNs8jp8rwq\nl+ihNCICHE8/yYDpfbnrktdIuTDZ7TgiQSfPC3rGjLx+RREJRp1fe5NEW4y37+zmdhSRoORTQRtj\nmhtjVhtj1hpjHjvDnycZYyYbY5YZY34xxtx2trFU0CIy/9fNfHPgRT7vOUzXPIucxXlPEjPGRAFr\ngSbADmAR0NVauzrbNgOBJGvtQGPMBcAaoIS19uRpY9kCBSzbt0NSksN7IiIhweOxlHqoHVck1+Lb\np590O45IwOX2JDFfjqBrAeustZuttRnAWKDdadtYoGDW+wWBPaeX8ynXXguzZ+c0poiEi4EfT2Sf\nWceEhx5xO4pIUPOloEsDW7N9vC3rc9kNASobY3YAy4H7zjZYs2aa5haJVDv2HOK1X+/l1YbDScof\n73YckaDm1ElizYCl1tpSwNXAO8aYAmfcMKugdTm0SORp9epTXMIN3NO2vttRRIKeL9dBbwfKZvu4\nTNbnsusFvAhgrf3NGLMRqAT85/TBvvhiEH/+CffeCx07NqRhw4a5Ci4ioeWT7xaz3DOGVff+6nYU\nkYBKTU0lNTXV73F8OUksGu9JX02A34E0oJu1dlW2bd4B/rDW/ssYUwJvMVez1u49bSxrreX22+Hq\nq+Gee/zOLyIhID0jk+RHa3NzhXt4v39Pt+OI5KmAnSRmrc0EBgAzgV+BsdbaVcaYvsaYO7M2ex64\nzhjzMzALePT0cs5O69AikaXb6+8QZwsy/O4ebkcRCRmu3It7zx5ISYHduyFe54mIhLVFa7ZRe+RV\nfN3hB1rWquR2HJE8F8jLrBxXtChUrgzz57vx6iKSl9oPv496+fqrnEVyyLXHVmiaWyT8PTV6CrvN\nL0x6eKDbUURCjgpaRALij31HePHnATx/7VAKF8jndhyRkOPa86BPnoTixWHlSihZMs8iiEgeqfnP\nR9hzfCcbXhvtdhQRV4XUGjRATAw0bgwzZ7qVQEQC5fO5y1l8chRT7nnN7SgiIcu1ggZNc4uEo/SM\nTG7/qi/dSw2mysXF3Y4jErJcL+hZs8DjcTOFiDhp4MdfAR4+HHC721FEQpqrBV22rPeSqyVL3Ewh\nIk768Jch3FH5IWKiXf31IhLyXP8Jat5c09wi4eLLH37hUNw6Xuzewe0oIiHP9YLWOrRI+Hhy8hAa\n5O9LYr5Yt6OIhDzXLrM65ehRKFECtm+HpKQ8iyIiDtv4+z7Kv3UJy/qsouolunZS5JSQu8zqlMRE\nuPZamD3b7SQi4o/7R31E2fSWKmcRh7he0KBpbpFQdzLTw9Td7/DE9QPcjiISNoKqoPNwtl1EHPTC\nuOnEeQrTu1kdt6OIhI2gKOgqVSA9HdavdzuJiOTGkEVv0638AKKicrzMJiJnERQFbQw0bappbpFQ\nNGvxOvbELebfPbu4HUUkrARFQYPWoUVC1WPjh1I77g6SCya4HUUkrLh+mdUpe/ZASgrs3g3x8XkW\nSUT8sHPvYUq9Uo553ZdQt0o5t+OIBKWQvczqlKJFoXJlmD/f7SQi4qsHP/qEkicaqJxFAiBoCho0\nzS0SSjwey4RtQ3ioni6tEgkEFbSI5Mobk1IBywPtG7kdRSQsBVVB16oFW7bAzp1uJxGR83lt3hBu\nLK1Lq0QCJagKOiYGGjeGmTPdTiIi57Jg5RZ+j5/D6726ux1FJGwFVUGDprlFQsFDY96lGt0pWaSA\n21FEwlZQFvSsWeDxuJ1ERM5k/+HjLEz/gJc69nc7ikhYC7qCLlvWe8nV0qVuJxGRM3lk1DiKpFen\nWY2KbkcRCWtBV9CgaW6RYOXxWD5b/zYDatzjdhSRsBeUBd28OUyf7nYKETndyJlppEfv44mbmrsd\nRSTsBWVB16/vneI+eNDtJCKS3fOz3qbFBf2Ii412O4pI2AvKgk5MhGuvhdmz3U4iIqes2LiLzXHf\n8GbP292OIhIRgrKgQevQIsHmvtHvc1lmZ1IuTHY7ikhECPqCzsOHbYnIWRw9nkHqoXd5ro3uuy2S\nV4K2oKtUgfR0WL/e7SQi8s9PJlIwozyd6lV1O4pIxAjagjYGmjbVNLdIMBix4m3uuFJHzyJ5KWgL\nGrQOLRIMPp+7nMOxG3julvZuRxGJKEFd0NdfD3Pneqe6RcQdT095h0YF7yIxX6zbUUQiSlAXdNGi\nUKkSzJ/vdhKRyPTbjr2sjfmCN3r0cTuKSMQJ6oIG3VVMxE33jxrJxemtueLiEm5HEYk4QV/QWocW\ncUd6RibT9wzlyWY6OUzEDUFf0LVqwZYtsHOn20lEIsvz46YRl1mEXjfUcjuKSEQK+oKOiYHGjWHm\nTLeTiESWof8Zwi0V7sEY43YUkYgU9AUNmuYWyWvTF61lb/xSXul5k9tRRCJWyBT0rFng8bidRCQy\nPD7hHa6N703hAvncjiISsWLcDuCLsmW9l1wtXQrXXON2GpHwtmPPIX5mNAu6LXc7ikhEC4kjaNA0\nt0heefCj0Vx4ohG1L7/I7SgiEU0FLSJ/8XgsX20fwiP173E7ikjEC5mCbtAAliyBgwfdTiISvv7v\nqzkYori3bQO3o4hEvJAp6MREuPZamD3b7SQi4ev/fnibjhcNICpKl1aJuC3PC3rX4V25/lpNc4sE\nzvxfN7Mzfi6v9bzV7SgiggsFPWfTnFx/7amCttbBQCICwMNj3uWqqB6ULFLA7SgighsFvTH3BV2l\nivfRk+vXOxhIRNh78Bg/ZXzIvzv3dzuKiGTJ84KevSn3i8jGQNOmmuYWcdojo8ZxQXoNmlxdwe0o\nIpIlzwt6//H9bD2wNddfr3VoEWd5PJYxG95mQC09tUokmOR5QTe6uJFf69DXXw9z53qnukXEfx9M\nX8jJ6AM8cVNzt6OISDYhV9BFi0KlSjB/voOhRCLY4O+G0LJYf2KiQ+aqS5GIkOc/kY1TGjN742ys\nH6dia5pbxBnLf9vJlripvHlbL7ejiMhp8rygKxatSEZmBhv3b8z1GM2bw/TpDoYSiVD3f/Iel3u6\nUK5EYbejiMhp8rygjTE0SmnE7I25P5u7Vi3YsgV27nQwmEiEOXIsg7lHhvNcW11aJRKMfCpoY0xz\nY8xqY8xaY8xjZ9mmoTFmqTFmhTHmnIvMjS9u7Nc6dEwMNG4MM2fmegiRiPfEJxNISq9Ih7pXuh1F\nRM7gvAVtjIkChgDNgCpAN2NMpdO2KQS8A7S21l4BdD7XmKeOoLUOLeKej34dQu9qurRKJFj5cgRd\nC1hnrd1src0AxgLtTtvmZuBLa+12AGvtn+caMKVwCvHR8azZsyY3mQFvQc+aBR5ProcQiVhjU5dx\nJHYTz91y+o+yiAQLXwq6NJD9ziLbsj6XXUWgiDFmjjFmkTGm+7kGdGIdumxZ7yVXS5fmegiRiPXM\nN0NonHQ3+eJi3I4iImfh1EliMUB1oAXQHHjKGHPOewb6uw4NmuYWyY312/eyLuZL3ujR2+0oInIO\nvvzzeTtQNtvHZbI+l9024E9r7XHguDFmLlAN+NtjLQYNGgTAwRMHmbl3Jp5OHqJM7v6d0KwZvPQS\nPPFErr5cJCLdN+pDUjLaUrlccbejiISl1NRUUlNT/R7HnO9ELWNMNLAGaAL8DqQB3ay1q7JtUwl4\nG+/RczzwE9DFWrvytLFs9ter+HZFxt80nqolquYq/NGjUKIEbN8OSUm5GkIkoqRnZJJ/YAU+aP45\nPa+v6XYckYhgjMFaa3L6dec9dLXWZgIDgJnAr8BYa+0qY0xfY8ydWdusBmYAPwMLgfdOL+czaXSx\nf+vQiYlQpw7M8W+mXCRiPDt2KvGZxVXOIiHAp7lla+10a+1l1tpLrbUvZX1uuLX2vWzbvGqtrWKt\nrWqtfduXcRun+L8OrbuKifhu2OK36V7xHrdjiIgPXL07fsOLGzJ381wyPZm5HuPUiWJ+XFItEhGm\npq1mf/xyXu5xztsUiEiQcLWgSxQoQemCpVm6M/fXSlWp4n305Pq/nY4mItkNnDCUa/P1ISl/vNtR\nRMQHrj9fzt91aGOgaVNdbiVyLtv/PMQv5hNev+Uut6OIiI9cL2gn1qF1PbTIuT3w0ceUPtGEmhXL\nuB1FRHzkekE3uLgB87fMJyMzI9djXH89zJ3rneoWkf/l8Vgm7RjCow11cphIKHG9oIskFKFCkQos\n2rEo12MULQqVKsH8+Q4GEwkTb02eiyGa/q3ruR1FRHLA9YIG/9ehQdPcImfz9g8jaF7iDqKicnyf\nBBFxUVAUtNahRQJjyx8H2BA7iZduvtXtKCKSQ0FR0PXK1eOnbT9x/OTxXI9RuzZs3gw7dzoYTCTE\nPfHpOEqfuJ5KFxVzO4qI5FBQFHRSfBJXFL+ChdsW5nqMmBho3BhmznQwmEiIm7h5BHfWvN3tGCKS\nC0FR0KB1aBGnTfzxV47FbeWxTk3djiIiuRA0Be3UOvSsWeDxOBRKJIQ99/UI6uS7jfhYX54qKyLB\nJmgKum7Zuiz9fSlH0o/keoyyZb2XXC3N/Z1DRcLC4WPpLM38hGc79HI7iojkUtAUdGJsItUvrM78\nrf5dzKxpbhF4duw3JKVXoslVFdyOIiK5FDQFDd516DkbdbmViL8++vlDulx6h9sxRMQPQVXQjVMa\nM3uTfyeKNWgAS5bAwYMOhRIJMYvX7uDPfD/y/M0d3Y4iIn4IqoKuU6YOK3ev5MDxA7keIzER6tSB\nOf4diIuErCfGfcxlnk4UK5zf7Sgi4oegKuj4mHhql67NvC3z/BpH09wSqTwey5z9I3i4sa59Fgl1\nQVXQ4Mw6dPPmMH06WOtQKJEQ8c6UH4iysfS6obbbUUTET0FX0E6sQ1ep4n305Pr1DoUSCRFvzhtB\ns+K368EYImEg6Aq6RqkabNi3gT1H9+R6DGOgaVNNc0tk2frHQX6LnciL3bq7HUVEHBB0BR0bHUvd\ni+ry/ebv/RqnRQv45huHQomEgH9+9jmlTjSicrnibkcREQcEXUGDc9dDz58Phw45FEokyH21aQR9\naujkMJFwEZQF7cQ6dFIS1K3rPVlMJNxNXrCKo3GbeLxTc7ejiIhDgrKgryp5FTsO7WDX4V1+jdOu\nHUya5FAokSD27JQR1M7Xg3xxejCGSLgIyoKOjoqmQbkGfj/dqm1bmDoVMjIcCiYShI4cy2BJ5mj+\n1V7T2yLhJCgLGpxZhy5VCi69FObOdSiUSBB6btxUCqZfyg3VK7odRUQcFLQF7cQ6NGiaW8LfyGUf\n0rmCjp5Fwk3QFnSV4lXYf3w/Ww9s9WucUwWtu4pJOFqy7nd2J8zjhZs7ux1FRBwWtAUdZaK809x+\nrkNXrgxxcbBsmUPBRILIP8eNpmJmR0okF3A7iog4LGgLGnCkoI3RNLeEJ4/H8t2+D3lID8YQCUtB\nXdCNUxoze+NsrJ/z0ypoCUfDvv4RQxR3NL3W7SgiEgBBXdAVi1YkIzODjfs3+jXOddfB9u2waZMz\nuUSCwRtzR9C0mB6MIRKugrqgjTF/HUX7IzoaWreGyZMdCibish1/HmZ97ARe7KoHY4iEq6AuaHBm\nHRo0zS3hZeCnn1PyRH2uuLik21FEJECCv6BTGjmyDn3DDbBoEezd61AwERd9tXEEfa65w+0YIhJA\nQV/QKYVTiI+OZ82eNX6Nk5gIjRt7b/0pEsq++WkNR+LXM7BTC7ejiEgABX1BG2P+Oor2l6a5JRz8\na9IIasb1ICE+1u0oIhJAQV/QAI0vbuzIOnTr1jBzJhw/7kAoERccPZ7B4pMfM6idrn0WCXchUdCN\nUrwPzvBYj1/jFCsG1arBbP8PxkVc8fy46RTIuITmNSq5HUVEAiwkCrpMUhmKJBRhxR8r/B5L09wS\nykYs/ZCO5XX0LBIJQqKgwXu5lVPr0JMng8e/g3GRPLds/U52JaYy+Jab3I4iInkgZAq6cYoz69AV\nKkDRopCW5kAokTz0xNhPqHiyAyWTC7odRUTyQMgUdMOLGzJ381wyPZl+j6Vpbgk1Ho/lu70jeLCR\nprdFIkXIFHSJAiUoXbA0S3cu9XssFbSEmuFTF4LJpE+zum5HEZE8EjIFDc6tQ9eoAQcOwNq1DoQS\nyQOvp47g+gv0YAyRSBJSBe3UOnRUFLRtq6NoCQ079x5hXex4XuzSw+0oIpKHQqqgG1zcgPlb5pOR\nmeH3WJrmllAxcPQXlDjxD6pecqHbUUQkD4VUQRdJKEKFIhVYtGOR32M1agQrVsCuXQ4EEwmgLzeM\n4I6rdXKYSKQJqYIG59ah4+OhWTP4+msHQokEyLS0tRzJt4Ynb2rtdhQRyWMhV9BOrUODprkl+A2a\nNJJrYrvrwRgiESjkCrpeuXr8tO0njp/0/4kXLVtCaiocOeJ/LhGnHTtxkv9kjNKDMUQiVMgVdFJ8\nElcUv4KF2xb6PVbhwlC7tvcJVyLB5oVxM8ifUY6WNSu7HUVEXBByBQ3OrUODprkleH24ZAQdLtHR\ns0ikCsmCdnIdum1b74liJ086MpyII37Z8Ac7E79j8M1d3I4iIi4JyYKuW7YuS39fypF0/xePy5aF\ncuVg/nwHgok4ZOCYT7j0ZHtKFU1yO4qIuCQkCzoxNpHqF1Zn/lZnWlXT3BJMPB7LrD8/5P4Gmt4W\niWQhWdDgXYees9HZy62sdWQ4Eb+8Py0NG53OXS3quR1FRFzkU0EbY5obY1YbY9YaYx47x3Y1jTEZ\nxpgOzkU8s8YpjZm9yZkTxapWBY/He2cxEbf935wRNCnSSw/GEIlw5y1oY0wUMARoBlQBuhljKp1l\nu5eAGU6HPJM6ZeqwcvdKDhw/4PdYxmiaW4LDrr1HWRf7BS926el2FBFxmS9H0LWAddbazdbaDGAs\n0O4M290DjAf+cDDfWcXHxFO7dG3mbZnnyHgqaAkGT3wynuInruWq8qXdjiIiLvOloEsDW7N9vC3r\nc38xxpQC2ltrhwF5Ni/n5Dp0vXqwYQNs2+bIcCK5Mv63Edx2lU4OExHnThJ7A8i+Np0nJe3kOnRM\nDLRqBZMnOzKcSI7NWLSewwkrebpLG7ejiEgQiPFhm+1A2Wwfl8n6XHY1gLHGGANcALQwxmRYa/9W\nd4MGDfrr/YYNG9KwYcMcRs72oqVqsGHfBvYc3UPRxKK5HueUdu3gvfegXz+/hxLJsUETP6J6zK0k\nxse5HUVE/JCamkpqaqrf4xh7nmuLjDHRwBqgCfA7kAZ0s9auOsv2I4Ep1toJZ/gze77Xy6mWn7ak\nd/XedLjc/xPHDx+GUqVg61YoVMiBcCI+On4ik/xPlmNip+m0qX2F23FExEHGGKy1OZ5ZPu8Ut7U2\nExgAzAR+BcZaa1cZY/oaY+4805fkNIQ/nFyHLlAA6teHadMcGU7EZ4M/n0liZimVs4j8xZcpbqy1\n04HLTvvc8LNsm6dnuDROaUyPiT0cG+/U2dxduzo2pMh5fbB4BDdecofbMUQkiITsncROuarkVew4\ntINdh3c5Ml6bNjB9OqSnOzKcyHmt2Lib3xNn8eIt+lehiPxXyBd0dFQ0Dco1cOzpViVLwuWXgwPr\n+yI+GTjmU8qfbEPpojrxQUT+K+QLGpxdhwbdtETyjsdjmbl7BPfV0/S2iPyvsChoJ6+HBj08Q/LO\niOn/wUYfoV+r+m5HEZEgExYFXaV4FfYf38/WA1vPv7EPKlWC/Plh8WJHhhM5q9dmj6BRci+io8Li\nR1FEHBQWvxWiTJR3mtuhdWiA9u01zS2B9ce+o6yJHcdgPRhDRM4gLAoacLygtQ4tgfbkJ19R7ERt\nrqlwkdtRRCQIhU1BN05pzOyNs3HqTmW1a8OuXd4HaIgEwufrP6RnNT0YQ0TOLGwKumLRimRkZrBx\n/0ZHxouOhrZtdRQtgfHt4g0cTPiFp29q63YUEQlSYVPQxpi/jqKdomluCZSnv/qI6jG3UCAh3u0o\nIhKkwqagwfl16CZNYOlS+PNPx4YU4ejxk/x0/COebNXL7SgiEsTCqqCdXodOSIDrr4dvvnFkOBEA\nHvpwPEmecrS/tprbUUQkiIVVQackpxAfHc+aPWscG1PT3OKkzEzLyHUv8/C1j7kdRUSCXFgVNECj\nlEaOrkO3agXffQfHjjk2pESw5z6bhYlJ5/GOLd2OIiJBLuwKuvHFjR1dhy5aFKpXh2+/dWxIiVDW\nwutpr3BHpUd15zAROa+w+y3RKMX74AyP9Tg2pqa5xQkfTF3M0YS1/LtHN7ejiEgICLuCLpNUhiIJ\nRVjxxwrwaBkPAAAgAElEQVTHxmzXDqZMgcxMx4aUCPT0jJe58cIHSIiLczuKiISAsCto8F5u5eQ6\ndEoKlCgBCxc6NqREmK9/XM8fiXMYekcft6OISIgIy4JunOLsOjTo4Rninwe+eJWGBe/igqQCbkcR\nkRARlgXd8OKGzN08l0yPc3PS7drBxIl6RrTkXNrKnfyWbxzDb7/H7SgiEkLCsqBLFChB6YKlWbpz\nqWNjVq/uvdRq9WrHhpQI0e+jt7kqphsVLizudhQRCSFhWdDg/Dq0MTqbW3Juw7ZDLIkazrDuD7sd\nRURCTNgWdJvL2jBmxRjHbvsJKmjJub7vvcclXE/tipe4HUVEQkzYFvT1l1zP4fTDLNi2wLExGzTw\nTnH//rtjQ0oY27M/ndnHXue1Drqtp4jkXNgWdJSJol+Nfryz6B3HxoyLgxYtvNdEi5xP/+GfUozK\ntKt1tdtRRCQEhW1BA9x21W1MXTeVXYd3OTamprnFF8dPePhy5ys83eRRt6OISIgK64JOTkimc+XO\nvL/kfcfGbNEC5s2DQ4ccG1LC0KMfTCEhJpG7mzVxO4qIhKiwLmiA/jX7M3zxcE56TjoyXlISXHst\nzJjhyHAShjwe+GDVK9x3zWMYY9yOIyIhKuwLulrJalxc+GImr5ns2Ji6q5icy0uf/YAncRfPdO7o\ndhQRCWFhX9DgPYoekjbEsfHatoVvvoGMDMeGlDBhLfx7wcv0qPAwMdHRbscRkRAWEQXd4fIOrPpz\nFSt3r3RkvNKloXx571q0SHYfTV3BoYKL+L8ePd2OIiIhLiIKOi46jjur38k7ac5dcqVpbjmTp6b9\nm9bF76FAvgS3o4hIiDNO3mnrvC9mjM3L18tu+8HtXDnsSjbdv4mk+CS/x1uxAlq1gk2bvLcBFZm+\nYAstJ1/F9sd+48LCyW7HEZEgYYzBWpvjpoiII2iA0kmlaXJJE0YvH+3IeFWqQEwMLF/uyHASBu4f\n9wb/KNBL5SwijoiYggYYUHMA7yx6x5H7cxujaW75r6Wr97I24SPe7fmA21FEJExEVEHXL1efKBPF\nnE1zHBlPdxWTU+4eMZQrYtpRuUwZt6OISJiIqII2xtC/Zn/H7s993XWwdSts3uzIcBKiNu84Rhpv\n8/bNj7gdRUTCSEQVNED3at1J3ZTK1gNb/R4rJgZat4bJzt0DRULQXe+OpGx0bRpcXtntKCISRiKu\noAvEFeCWK29h+OLhjoynae7Itu/ASWYefpWX2+iRkiLirIgraIB+NfvxwZIPOHHyhN9j3XADpKXB\nvn0OBJOQc++7X1IkthRdrqvrdhQRCTMRWdCVLqjElSWuZPzK8X6PlT8/NGwIU6f6n0tCy4kTlnHb\nXuafDXT0LCLOi8iCBhw9WUyXW0Wmf374LXGJJ7i3RSu3o4hIGIrYgm5dsTXbD21nye9L/B+rNcyc\nCSf8nzGXEOHxwLBfXqZ/tUeJMhH7YyQiARSxv1liomK465q7HLk/d/HicMUVMHu2A8EkJLw+djEZ\nhdbw7E3d3I4iImEqYgsaoHf13kxYPYG9x/b6PVbXrvDuuw6EkqBnLQye+zLdLn6A+Jg4t+OISJiK\n6IIulr8YbSq2YeTSkX6P1bs3LFsG8+c7EEyC2mfT17M/eTZv9ujjdhQRCWMRXdDgPVls6H+G4rEe\nv8bJlw+efRYee8x7hCXha+CU12h+wV0UTizodhQRCWMRX9C1StciOV8y09dP93usW2+FAwdgyhQH\ngklQmv3TLrYVGse7ve51O4qIhLmIL2hjDANqDXDkkqvoaHjxRRg4EE6edCCcBJ17Pn2L2vm7clGR\n4m5HEZEwF/EFDdClShfStqfx297f/B6rVSsoWhQ+/tiBYBJUfl5ziFWJwxnW4yG3o4hIBFBBAwmx\nCfS6qhfD/jPM77GMgZdfhmeegWPHHAgnQePu99/jsrgmXFW2vNtRRCQCqKCz3F3jbkYtH8XRjKN+\nj3XttVCzJgwZ4kAwCQrbfk9nAa/zZmfd1lNE8oYKOktKcgp1ytRhzC9jHBlv8GB45RU9RCNc3D30\nM0rFXU7TK6u7HUVEIoQKOpv+NfszZNEQrAPXSVWq5L1H90svORBMXHXgoIdpB1/hhRY6ehaRvKOC\nzqZp+aYcTj/Mgm0LHBlv0CD44APYts2R4cQlDwz7mqSEBHr8o4nbUUQkgqigs4kyUfSr0c+xp1yV\nLg133uktaglNJ07Ap5te5tG6j2KMcTuOiEQQ48R0rs8vZozNy9fLjX3H9nHJW5ewuv9qShQo4fd4\n+/dDxYqQmgqVK/ufT/LWwGE/8ObmnhwcvIaYqBi344hICDLGYK3N8b/wdQR9muSEZDpX7sz7S953\nZLzCheHRR+GJJxwZTvKQxwNvL32ZO694WOUsInnOp4I2xjQ3xqw2xqw1xvztTBljzM3GmOVZbz8Y\nY650Pmre6V+zP8MXD+ekx5nbgQ0YAEuWwI8/OjKc5JG3x/3KiaKLGHzTbW5HEZEIdN6CNsZEAUOA\nZkAVoJsxptJpm20A6ltrqwHPA84cfrqkWslqlCtUjslrJjsynh6kEXqshedn/5vOZe8hMS7B7Tgi\nEoF8OYKuBayz1m621mYAY4F22Tew1i601h7I+nAhUNrZmHlvQK0BDElz7k4j3bt7r4n++mvHhpQA\n+mLGVvYWm8xb3fu5HUVEIpQvBV0a2Jrt422cu4B7A9P8CRUMOlzegVV/rmLl7pWOjBcd7b0meuBA\nyMx0ZEgJoEcnvk7jIrdxQYFkt6OISIRy9CQxY0wjoBcQ8nd0iIuOo0/1PgxdNNSxMVu1guRkGD3a\nsSElAL5P28uW5I8Y1vMBt6OISATz5dTU7UDZbB+Xyfrc/zDGVAXeA5pba896g8tB2S4KbtiwIQ0b\nNvQxat7re01frhx2JYObDCYpPsnv8U49SKNrV+jSBRK0tBmU7vl4KNVLtqVCsYvcjiIiISg1NZXU\n1FS/xznvddDGmGhgDdAE+B1IA7pZa1dl26Ys8B3Q3Vq78BxjBf110Kfr/EVnGpZrSP9a/R0b88Yb\noW5dePhhx4YUh6xce4wrPkhh4d2zqZWiC9dFxH+5vQ7apxuVGGOaA2/inRL/0Fr7kjGmL2Ctte8Z\nY94HOgCbAQNkWGtrnWGckCvo7zd9z93f3M2v/X517E5Sq1ZBgwawdq33OmkJHg0eHsaOxGmse9aZ\nM/hFRAJa0E4JxYK21nLlsCt5q8VbNE5p7Ni4vXtDsWLw4ouODSl+2rHzJBe9fBkTe35Mm6vquh1H\nRMKE7iQWIMYY+tfs79j9uU8ZNAjeew+2/201X9wy4J0vKZZYUuUsIkFBBe2D7tW6k7opla0Htp5/\nYx+VKQN9+uhBGsHi4EHL5L0v868bQv4CBBEJEypoHxSIK8AtV97C8MXDHR33scdg4kTvmrS465Fh\n35K/0An6NGjtdhQREUAF7bN+NfvxwZIPOHHyhGNjJid7S1oP0nDXzp2Wj357kQdrP0KU0Y+EiAQH\n/TbyUaULKnFliSv5ctWXjo47YAAsXgwLFjg6rPgoPR3qPvg2RUodYGDrm92OIyLyFxV0DvSv2d/R\n+3OD90Ea//qXHqThlpsfXcC2S17gh3vGExcd53YcEZG/qKBzoHXF1mw7uI2lvy91dNwePWDvXvjm\nG0eHlfN484PdTIrrwqiOH1C+aIrbcURE/ocKOgdiomK4u8bdjl9yFR3tvR768cf1II28suCnTB5Z\ncAu3XXMLXa9u43YcEZG/UUHnUO/qvfly1ZfsPbbX0XFbt/beVeyTTxwdVs5g1y5oNvg5Lrs8g2Gd\nn3M7jojIGamgc6hY/mK0rtiakUtHOjruqQdpPPUUHD/u6NCSTXo6NOk7Ha5+n1l3jSEmypfnxYiI\n5D0VdC4MqDmAof8Zisd6HB23bl2oXh3ecXYGXbLp8/AW1l9xG5N7jqFkgZJuxxEROSsVdC7UKl2L\n5HzJTF8/3fGxBw/2Hknv3+/40BHv/RHpfG5v4olGD9Iwpb7bcUREzkkFnQuBuj83QOXK0LYtvPKK\n40NHtLQ0uPfrh7muakmeavyI23FERM5LT7PKpWMZxyj7RlkW3rGQ8kXKOzr2tm1QrRr8/DOULu3o\n0BFp1y6ofNM4Yps/weoHFlM4n57xKSJ5R0+zymMJsQn0uqoXw/4zzPGxy5TxPo7yX/9yfOiIk54O\nrXqu5nijAUy7bbzKWURCho6g/bBx30Zqvl+TLQ9sITE20dGx9+2DihVh3jyoVMnRoSNK3wFH+KxA\nLV7r/AB3XtPb7Tgirrv44ovZvHmz2zHCUrly5di0adPfPp/bI2gVtJ/ajGlD+8vac0f1Oxwf+9//\nhoUL4Utnb/8dMUaMsNw/tzttWsXySacRGJPjnw+RsJNVFm7HCEtn+3+rKW6XnDpZLBB/4QcM8J7c\ntHCh40OHvbQ0uG/0cC686mfeb/+OyllEQo4K2k9NyzflUPohFm5zvkUTErzr0I8+qgdp5MSuXdCm\n73+Iuv5ppnQf7/jyg4hIXlBB+ynKRNGvRj+GLHL2KVen9OgBe/bA1KkBGT7spKdD+257SW/XmRE3\nDqNi0YpuRxIRyRUVtANuu+o2pq6byq7DuxwfOybG+yCNgQP1IA1fPPCghw3VunNbnRvpWLmj23FE\nJAC2bt1KUlKST0uLmzdvJioqCo/He+fHli1bMnr0aABGjRpFvXr1AprVHypoByQnJHNT5Zt4as5T\nAVmLbtMGkpLg008dHzqsjBwJ47a/RMrl+3nlhpfdjiMiAXLRRRdx8OBBn88tyb7d1KlT6d69+xn/\nLNiooB3yyg2vsHzXcu6ddq/jJa0HaZxfWho88NZsTJ23+bLr58RGx7odSUTELypohxTKV4iZt85k\n0Y5F3DPtHsdLum5duOoqGDrU0WHDwq5d0L77dqI63crYzp9QOkm3XxMJRSkpKbz66qtUq1aNggUL\n0qdPH/744w9atmxJUlISTZs25cCBA3+btm7UqBFPP/00//jHP0hKSqJ58+bs3XvmRwI3atSIESNG\nnPHPHnnkEerXr8+hQ4cAGDFiBJUrV6Zo0aK0aNGCLVu2BGbHz0IF7aBC+Qoxs/tMlvy+hP5T+zv+\ntKvBg+Gll+DAAUeHDWnp6dDxpgxiunXlwXr9aHJJE7cjiYgfJkyYwHfffcfatWuZPHkyLVu25KWX\nXuLPP/8kMzOTt956C/j71PSYMWMYNWoUu3fv5sSJE7z66qs+v6a1lj59+rBixQpmzZpFwYIFmTRp\nEi+99BITJ05k9+7d1KtXj27dujm6r+ejgnZYUnwS02+dzvJdy+n/jbMlXaWKdz1aD9L4rwcfhN8r\nD6TKpQV5ot4TbscRET/dc889XHDBBVx44YXUq1eP2rVrU7VqVeLi4rjxxhtZunTpGb+uV69elC9f\nnvj4eG666SaWLVvm0+ulp6fTrVs39u/fz5QpU4iPjwdg+PDhDBw4kIoVKxIVFcXjjz/OsmXL2Lp1\nq2P7ej4q6ABIik9i+i3T+eWPX7j767sdLelBg+Ddd2HHDseGDFkjR8KEVRPIuHQ8n9w4miijv84i\n/jLGmbfcKlGixF/vJyQk/O3jw4cPA/xtGbFkyf8+3z0xMfGv7c5n/fr1TJ48mWeeeYaYmJi/Pr95\n82buu+8+ihQpQpEiRShatCjGGLZv356r/coN/UYLkILxBZl2yzRW/rmSvlP6OlbSF10Ed9yhB2mk\npcFDg9dx4oa7GN/lc4omFnU7kkhYsNaZt0Bz6uzrypUrM3LkSJo3b87atWv/+nzZsmUZPnw4e/fu\nZe/evezbt4/Dhw9Tp04dR17XFyroADpV0mv2rKHP5D6OlfTjj8OECbB8uSPDhZxdu6BDl2MU6t2J\n55oMolbpWm5HEpE8lpMTcc+3bZcuXRg8eDDXX389GzZsAKBv374MHjyYlStXAnDgwAHGjx+f+8C5\noIIOsAJxBZh6y1R+2/cbd0y+g0yP/3cbKVIE3nwTmjSBV1+FkycdCBoi0tOhc2co1rM/11aowt01\n7nY7kog45PSj4nMdJWf/s/MdTfuybY8ePXj66adp0qQJW7ZsoX379jz++ON07dqVwoULU7VqVaZP\nn+7LbjhGT7PKI0fSj9B6TGvKFSrHh20/JDoq2u8xf/sN7rwT9u+HDz/0XoYV7gYMgHmHR5BR81XS\n+qRRIK6A25FEQoqeZhU4eppViMofl59vbv6GrQe3cvvk2x05ki5fHr791ltaTZt6bwd67JgDYYPU\nyJEwZdEytld+jC9v+lLlLCJhTQWdhxJjE5nSbQrbD27ntkm3OVLSxkCvXvDzz94j6mrV4PvvHQgb\nZNLS4JGn9mNv6sTbLd7i8mKXux1JRCSgNMXtgqMZR2k3th3F8xdnVPtRxETFnP+LfDRpkveIukUL\n7/XShQs7NrRrdu2CGjUtpR/sQI2KpRnSMjBPDhOJBJriDhxNcYeBxNhEJnedzO4ju+nxVQ9Oepw7\ny6tdO1ixAqKj4Yor4KuvHBvaFadOCqvU6zVsgR281vQ1tyOJiOQJHUG76FjGMW4cdyOF8xXmkw6f\nOHokDTBvHvTu7S3qIUPgwgsdHT5PDBgAS/bM47drOpHWO41yhcu5HUkkpOkIOnB0BB1GEmITmNh1\nIgdOHOCWCbc4eiQNUK+e91rpyy/3rk1/+GHe3EDAKSNHwrR5O9lcoxsftftI5SwiEUVH0EHg+Mnj\ndPy8I/lj8/Nph08D8qjE5cu9R9MFC8J770GFCo6/hKPS0qBVm5OUH9SUppX+wbONnnU7kkhY0BF0\n4OgIOgzli8nHhJsmcDTjKN2+7EZGZobjr1GtGixc6H3YRp063hPIgu0GJzt3wvvvQ+vWcMMN0GDQ\n0xTIH8UzDZ5xO5qISJ7TEXQQOXHyBJ2+6ERsVCxjO40lLjouIK+zcSP07Qt//gkffADVqwfkZc7L\nWli1ynvm+aRJsGYNNGvmPdHNXPY1j3x/N4vvXEzx/MXdCSgShnQEnXObN28mJSWFkydPEhV19uNa\nHUGHsfiYeMZ3Hk+mzaTL+C6kZ6YH5HVSUmDGDLjvPu/lWI89BkePBuSl/ubkSZg7Fx56CCpWhObN\nYft2eO457+VUg4asZlq+Hgz49jbGdRqnchYRv4waNYp69er5PY5TD+fICRV0kImPieeLzl9graXz\nF50DVtLGQM+e3hucbNninQKfMycgL8Xhw96He/TsCSVLwv33e9fCP/8cNm/OOsO82gp6TulG/ZH1\nqVi0Ir/d+xvXXXRdYAKJSMSw1p63XD0e5x4J7CQVdBCKi47j886fE22i6fR5J06cPBGw1ypRAsaM\ngddf9xZo796wb5//4/7+u/dktNatoVQp7zOsa9aEJUu8b4MGwdVXw/Jdy+j0eSeu//h6ripxFb/d\n+xtP1n+SQvkK+R9CRELOtm3b6NixI8WLF6dYsWLce++9AIwYMYLKlStTtGhRWrRowZYtW/76mqio\nKIYPH07FihUpUqQIAwYMAGD16tXcfffdLFiwgIIFC1KkSBEAevXqRb9+/WjVqhUFCxYkNTWVqVOn\nUr16dQoVKkS5cuX4VzA809dam2dv3pcTX6WfTLcdxnWwrT5tZY9nHA/46x04YG3//taWKmXt+PE5\n+1qPx9oVK6x94QVra9e2tnBha7t2tXbMGGv37//79mnb0mybz9rYC1+90L7242v28InDzuyEiJxT\nMP8ezszMtNWqVbMPPfSQPXr0qD1x4oSdP3++nTRpkr300kvtmjVrbGZmpn3hhRfsdddd99fXGWNs\nmzZt7MGDB+2WLVtssWLF7IwZM6y11n700Ue2Xr16//M6t912my1cuLBdsGCBtdbaEydO2O+//96u\nWLHCWmvtL7/8YkuWLGknTZpkrbV206ZNNioqymZmZp4z/9n+32Z9PuedmZsvyu1bMP/FCFbpJ9Nt\nx3EdbctPW9pjGcfy5DV/+MHaSpWsvfFGa7dvP/t2GRnWpqZa++CD1pYvb23ZstYOGGDtrFnWnjhx\n5q/5ccuPtvknzW2Z/ytj31r4lj2afjQwOyEiZxTMv4cXLFhgixcv/rcibNGihR0xYsRfH2dmZtrE\nxES7ZcsWa623oH/88ce//vymm26yL7/8srX27AXds2fPc2a5//777YMPPmitda+gnb11lTguNjqW\nMR3HcMuEW+gwrgMTukwgX0y+gL5m3bqwbBm88IL3EZYvvAB33AFRUd715BkzYPJk+OYbKFcO2raF\n8eO969hnW+r5ftP3PDf3OdbvXc/AfwxkYpeJxMfEB3Q/RCTnzL+cORnKPpPzM8W3bt1KuXLl/nam\n9ObNm7nvvvt46KGHvGNnrStv376diy66CIASJUr8tX1iYiKHDx8+52ud+rpT0tLSePzxx1mxYgXp\n6emkp6fTuXPnHO+Dk1TQISA2OpbPOn7GrRNupf3Y9kzsOjHgJR0fD88+670Pdp8+MHq098SuefO8\n11G3awfPPw+n/R3/H9ZaZm+czbNzn2XbwW38s94/6V61e0BuxCIizshNsTrloosuYsuWLXg8nv8p\n6bJly/Lkk0/SrVu3HI95thPETv/8zTffzL333suMGTOIjY3lgQceYM+ePTl+PSfpJLEQERMVwycd\nPiE5IZl2Y9txLCNvHvx85ZUwf773kZY9esDWrTBzJvTvf/ZyttYyff106o6oS7+p/eh9dW/WDFjD\n7VffrnIWkbOqVasWF154IY8//jhHjx7lxIkT/Pjjj/Tt25fBgwezcuVKAA4cOMD48eN9GrNEiRJs\n27aNjIxz3wDq8OHDJCcnExsbS1paGp999tn//Ll14dpxFXQIiYmKYfSNoymaUJS2Y9uy52je/Osu\nOtpb0F26QKFznFxtrWXymsnU+qAWD818iHtr38vKfivpXq274w8CEZHwExUVxZQpU1i3bh1ly5bl\noosu4vPPP6d9+/Y8/vjjdO3alcKFC1O1alWmT5/+19edfjSc/ePGjRtTpUoVSpYsSfHiZ7+vwtCh\nQ3nqqacoVKgQzz//PF26dDnrmHlFdxILQSc9J+n/TX/GrBhD/rj8VClWxftW/L//LZwv7x4E7bEe\nvlr1Fc/Pex5rLU/Vf4obL7+RKKN//4kEG91JLHCcvpOYCjqEWWvZenArv/7xK7/u9r6t3L2SlbtX\nkhSfROVilf9W3k5eX5zpyeSLlV/w/NznSYhN4Kn6T9GmYhtX/qUpIr5RQQeOClrOy2M9bD2w1Vva\nf/xvcRfOV5gqxatQ+YLKf5V25WKVc1TcJz0nGfPLGF6Y9wJFEorwVP2naF6huYpZJASooANHBS25\n5rEethzY8rcj7lW7V5GckPxXWZ864q5crDJJ8Ul/fX1GZgajfx7N4HmDKZ1UmqfrP03jlMYqZpEQ\nooIOHBW0OM5jPWzev/lvR9yr/lxF0YSiVClehQrJFfh63deUTy7PU/WfosHFDdyOLSK5oIIOHBW0\n5BmP9bBp/yZ+/eNXVv+5mrpl6+oBFiIhTgUdOCpoERHJNRV04Oh50CIiIhFAd48QEYkg5cqV04md\nAVKuXDlHx/NpitsY0xx4A+8R94fW2pfPsM1bQAvgCHCbtXbZGbbRFLeIiESUgE1xG2OigCFAM6AK\n0M0YU+m0bVoA5a21lwJ9gXdzGiQcpKamuh0hoLR/oSuc9w20f6Eu3Pcvt3xZg64FrLPWbrbWZgBj\ngXanbdMO+BjAWvsTUMgYU4IIE+5/ybR/oSuc9w20f6Eu3Pcvt3wp6NLA1mwfb8v63Lm22X6GbURE\nRMRHOotbREQkCJ33JDFjTB1gkLW2edbHjwM2+4lixph3gTnW2nFZH68GGlhrd502ls4QExGRiJOb\nk8R8ucxqEVDBGFMO+B3oCnQ7bZvJQH9gXFah7z+9nHMbUEREJBKdt6CttZnGmAHATP57mdUqY0xf\n7x/b96y1U40xLY0x6/FeZtUrsLFFRETCW57e6lNERER8E5CTxIwxzY0xq40xa40xj53hzy8zxvxo\njDlujHkwEBkCyYf9u9kYszzr7QdjzJVu5MwNH/atbdZ+LTXGpBlj6rqRM7fOt3/ZtqtpjMkwxnTI\ny3z+8uH718AYs98YsyTr7Uk3cuaWL98/Y0zDrL+fK4wxc/I6oz98+P49nLVvS4wxvxhjThpjCruR\nNad82LckY8xkY8yyrH27zYWYuebD/hU2xkzI+v250BhT+byDWmsdfcNb+uuBckAssAyodNo2FwDX\nAM8BDzqdIZBvPu5fHaBQ1vvNgYVu53Zw3xKzvX8lsMrt3E7uX7btvgO+Bjq4ndvh718DYLLbWQO4\nf4WAX4HSWR9f4HZuJ/fvtO1bA9+6ndvB791A4MVT3zdgDxDjdnYH9+8V4Kms9y/z5XsXiCPo897Y\nxFr7p7V2MXAyAK8faL7s30Jr7YGsDxcSOteE+7JvR7N9WADw5GE+f/ly0x2Ae4DxwB95Gc4Bvu5f\nqJ6s6cv+3Qx8aa3dDt7fNXmc0R++fv9O6QaMyZNk/vNl3yxQMOv9gsAea22odIQv+1cZmA1grV0D\nXGyMKXauQQNR0L7c2CSU5XT/egPTAprIOT7tmzGmvTFmFTAFuD2PsjnhvPtnjCkFtLfWDiP0iszX\nv5vXZk0jfuPTNFvw8GX/KgJFjDFzjDGLjDHd8yyd/3z+3WKMScA7O/dlHuRygi/7NgSobIzZASwH\n7sujbE7wZf+WAx0AjDG1gLJAmXMNqqdZBZAxphHeM9r/4XYWJ1lrJwITjTH/AJ4HbnA5kpPeALKv\nH4VaSZ/PYqCstfZo1j30J+IttXARA1QHGgP5gQXGmAXW2vXuxnJcG+AHa+1+t4M4qBmw1Frb2BhT\nHphljKlqrT3sdjCHvAS8aYxZAvwCLAUyz/UFgSjo7Xj/ZXBKmazPhQuf9s8YUxV4D2hurd2XR9n8\nlaPvnbX2B2PMJcaYItbavQFP5z9f9q8GMNZ4n8d3AdDCGJNhrZ2cRxn9cd79y/7Lzlo7zRgzNMy+\nf4hDrSwAAAGaSURBVNuAP621x4Hjxpi5QDW864PBLic/f10Jnelt8G3fegEvAlhrfzPGbAQqAf/J\nk4T+8eVn7xDZZhyz9m/DOUcNwGJ5NP9dLI/Du1h++Vm2fQZ4yO0Ffqf3L+sbtQ6o43beAOxb+Wzv\nVwe2up3byf07bfuRhNZJYr58/0pke78WsMnt3A7vXyVgVta2iXiPVCq7nd2p/cvarhDeE6gS3M7s\n8PfuHeCZrPdL4J0yLuJ2dgf3rxAQm/V+H+Cj843r+BG09eHGJllPuvoP3hMBPMaY+7J+iIJ+KsOX\n/QOeAooAQ7OOxDKstbXcS+0bH/etozGmB5AOHANuci9xzvi4f//zJXke0g8+7l8nY8zdQAbe718X\n9xLnjC/7Z61dbYyZAfyMd/rwPWvtShdj+ywHfz/bAzOstcfcyppTPu7b88BHxpifs77sURsaMzu+\n7t/lwChjjAfvlQZ3nG9c3ahEREQkCOlpViIiIkFIBS0iIhKEVNAiIiJBSAUtIiIShFTQIiIiQUgF\nLSIiEoRU0CIiIkFIBS0iIhKE/h+XXcHSF61cxAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "plt.figure(figsize=(8, 6))\n", + "plt.plot(pow_bt[:, 0], pow_bt[:, 1], label='minlike')\n", + "plt.plot(pow_bt[:, 0], pow_bt[:, 2], label='central')\n", + "plt.legend(loc='lower right')\n", + "#plt.show()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "From the plot we can see that both binomial test have the same power for large true proportions, but the standard minlike binomial test is more powerful than the central binomial test for small true proportions. For example, if the true proportion is 0.15, then the probability of rejecting the null hypothesis are 0.52 versus 0.32. We can verify that the two R packages produce the same result" + ] + }, + { + "cell_type": "code", + "execution_count": 52, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ 0.15 0.52447582 0.32166697]\n" + ] + } + ], + "source": [ + "# 0.5244758 power.binom.test(n = 30, p0 = 0.3, pa = 0.15)\n", + "# 0.321667 powerBinom(n = 30, p0 = 0.3, p1 = 0.15, strict=TRUE)\n", + "print(pow_bt[1,:])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Power as a function of nobs" + ] + }, + { + "cell_type": "code", + "execution_count": 53, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0.57223643921315681, (3, 15)),\n", + " (0.6399669107049708, (4, 15)),\n", + " (0.56998461764305863, (4, 16)),\n", + " (0.63583922386169445, (4, 16)),\n", + " (0.56791996222455055, (4, 17)),\n", + " (0.63206233229720976, (4, 17)),\n", + " (0.69114679822814651, (5, 17)),\n", + " (0.62858903440064717, (5, 18)),\n", + " (0.68645084290255953, (5, 18)),\n", + " (0.7388025247055342, (5, 18)),\n", + " (0.68209018097786611, (6, 19)),\n", + " (0.7336478091810934, (6, 19)),\n", + " (0.67801693625369797, (6, 20)),\n", + " (0.72880881213382032, (6, 20)),\n", + " (0.77431005532071151, (6, 20)),\n", + " (0.72425939575356324, (7, 21)),\n", + " (0.76930532470325352, (7, 21)),\n", + " (0.80915383662430451, (7, 21)),\n", + " (0.76456080520956615, (7, 22)),\n", + " (0.80419952705314313, (8, 22)),\n", + " (0.76005675092873848, (8, 23)),\n", + " (0.79946934784191803, (8, 23)),\n", + " (0.83412532028134168, (8, 23)),\n", + " (0.79494925223816315, (9, 24)),\n", + " (0.82955489381149028, (9, 24)),\n", + " (0.85969535796997965, (9, 24)),\n", + " (0.82515915208675972, (9, 25)),\n", + " (0.85537850209764454, (10, 25)),\n", + " (0.88147666844959838, (10, 25)),\n", + " (0.85120253117177069, (10, 26)),\n", + " (0.87746966430760009, (10, 26)),\n", + " (0.89998454518500937, (11, 26)),\n", + " (0.87357294168778787, (11, 27)),\n", + " (0.89631613374652885, (11, 27)),\n", + " (0.86978231200457246, (11, 28)),\n", + " (0.89273043313360578, (11, 28)),\n", + " (0.9123571875031885, (12, 28)),\n", + " (0.8892256865435717, (12, 29)),\n", + " (0.90909500377968189, (12, 29)),\n", + " (0.92598392836486831, (12, 29)),\n", + " (0.90589194993819355, (13, 30)),\n", + " (0.92304364668391603, (13, 30)),\n", + " (0.93754091885866919, (13, 30)),\n", + " (0.92014416997635939, (13, 31)),\n", + " (0.93491119094924324, (14, 31)),\n", + " (0.947328884601218, (14, 31)),\n", + " (0.93230745896854683, (14, 32)),\n", + " (0.94499220757779534, (14, 32)),\n", + " (0.95560880613226584, (15, 32)),\n", + " (0.94266983364429702, (15, 33)),\n", + " (0.95354406594706098, (15, 33)),\n", + " (0.9403631701569205, (15, 34)),\n", + " (0.95148455561124845, (16, 34)),\n", + " (0.96079019453637449, (16, 34)),\n", + " (0.94943183320176827, (16, 35)),\n", + " (0.95897285282638978, (16, 35)),\n", + " (0.96692370801703154, (17, 35)),\n", + " (0.95715551699587487, (17, 36)),\n", + " (0.96532698073444512, (17, 36)),\n", + " (0.97211083775950236, (17, 36)),\n", + " (0.96372523793914133, (18, 37)),\n", + " (0.97071323829965117, (18, 37)),\n", + " (0.97649422072829783, (18, 37)),\n", + " (0.96930705160545516, (18, 38)),\n", + " (0.97527496978248207, (19, 38)),\n", + " (0.98019588266426305, (19, 38)),\n", + " (0.97404473985665685, (19, 39)),\n", + " (0.9791353399760685, (19, 39)),\n", + " (0.98331998698757472, (20, 39)),\n", + " (0.97806235410269993, (20, 40))]" + ] + }, + "execution_count": 53, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nobs_arr = np.arange(30, 100)\n", + "#this doesn't work vectorized in nobs\n", + "pbcentral_nobs = [power_binom_proptest(binom_test_centered, p_null, 0.5, nobs_) for nobs_ in nobs_arr]\n", + "pbcentral_nobs" + ] + }, + { + "cell_type": "code", + "execution_count": 54, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "[(0.57226196210831415, (4, 15)),\n", + " (0.6399669107049708, (4, 15)),\n", + " (0.56998461764305863, (4, 16)),\n", + " (0.63583922386169445, (4, 16)),\n", + " (0.69620740180835128, (4, 16)),\n", + " (0.63207178033189859, (5, 17)),\n", + " (0.69114679822814651, (5, 17)),\n", + " (0.74431582297256682, (5, 17)),\n", + " (0.68645084290255953, (5, 18)),\n", + " (0.7388025247055342, (5, 18)),\n", + " (0.68209018097786611, (6, 19)),\n", + " (0.7336478091810934, (6, 19)),\n", + " (0.77960188076758619, (6, 19)),\n", + " (0.81981199159417895, (6, 19)),\n", + " (0.77431223359280932, (7, 20)),\n", + " (0.81435254341931784, (7, 20)),\n", + " (0.76930532470325352, (7, 21)),\n", + " (0.80915383662430451, (7, 21)),\n", + " (0.84383690828378721, (7, 21)),\n", + " (0.80419952705314313, (8, 22)),\n", + " (0.83888242159912874, (8, 22)),\n", + " (0.86878157550036506, (8, 22)),\n", + " (0.83412613720018536, (9, 23)),\n", + " (0.86416102739275724, (9, 23)),\n", + " (0.82955489381149028, (9, 24)),\n", + " (0.85969535796997965, (9, 24)),\n", + " (0.88559735796142425, (9, 24)),\n", + " (0.85537850209764454, (10, 25)),\n", + " (0.88147666844959838, (10, 25)),\n", + " (0.90373690643325377, (10, 25)),\n", + " (0.87746996155257628, (11, 26)),\n", + " (0.89998454518500937, (11, 26)),\n", + " (0.87357294168778787, (11, 27)),\n", + " (0.89631613374652885, (11, 27)),\n", + " (0.91567859457304746, (11, 27)),\n", + " (0.89273054230762783, (12, 28)),\n", + " (0.9123571875031885, (12, 28)),\n", + " (0.92896436165278928, (12, 28)),\n", + " (0.94287287762251693, (12, 28)),\n", + " (0.92598399384178798, (13, 29)),\n", + " (0.94019538363476485, (13, 29)),\n", + " (0.92304364668391603, (13, 30)),\n", + " (0.93754091885866919, (13, 30)),\n", + " (0.94967817277525135, (13, 30)),\n", + " (0.93491119094924324, (14, 31)),\n", + " (0.947328884601218, (14, 31)),\n", + " (0.95767696528284429, (14, 31)),\n", + " (0.9449922309236829, (15, 32)),\n", + " (0.95560880613226584, (15, 32)),\n", + " (0.96441830716955212, (15, 32)),\n", + " (0.95354406594706098, (15, 33)),\n", + " (0.96260592717138593, (15, 33)),\n", + " (0.95148455561124845, (16, 34)),\n", + " (0.96079019453637449, (16, 34)),\n", + " (0.96851387575308567, (16, 34)),\n", + " (0.95897286108157542, (17, 35)),\n", + " (0.96692370801703154, (17, 35)),\n", + " (0.97349844766997706, (17, 35)),\n", + " (0.97889469676577523, (17, 35)),\n", + " (0.9721108426575078, (18, 36)),\n", + " (0.97770124172141426, (18, 36)),\n", + " (0.97071323829965117, (18, 37)),\n", + " (0.97649422072829783, (18, 37)),\n", + " (0.98124288505673463, (18, 37)),\n", + " (0.97527496978248207, (19, 38)),\n", + " (0.98019588266426305, (19, 38)),\n", + " (0.98422594080420933, (19, 38)),\n", + " (0.97913534169473704, (20, 39)),\n", + " (0.98331998698757472, (20, 39)),\n", + " (0.98673744747201031, (20, 39))]" + ] + }, + "execution_count": 54, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pbminlike_nobs = [power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs_) for nobs_ in nobs_arr]\n", + "pbminlike_nobs" + ] + }, + { + "cell_type": "code", + "execution_count": 55, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 30. , 0.57226196, 0.57223644],\n", + " [ 31. , 0.63996691, 0.63996691],\n", + " [ 32. , 0.56998462, 0.56998462],\n", + " [ 33. , 0.63583922, 0.63583922],\n", + " [ 34. , 0.6962074 , 0.56791996],\n", + " [ 35. , 0.63207178, 0.63206233],\n", + " [ 36. , 0.6911468 , 0.6911468 ],\n", + " [ 37. , 0.74431582, 0.62858903],\n", + " [ 38. , 0.68645084, 0.68645084],\n", + " [ 39. , 0.73880252, 0.73880252],\n", + " [ 40. , 0.68209018, 0.68209018],\n", + " [ 41. , 0.73364781, 0.73364781],\n", + " [ 42. , 0.77960188, 0.67801694],\n", + " [ 43. , 0.81981199, 0.72880881],\n", + " [ 44. , 0.77431223, 0.77431006],\n", + " [ 45. , 0.81435254, 0.7242594 ],\n", + " [ 46. , 0.76930532, 0.76930532],\n", + " [ 47. , 0.80915384, 0.80915384],\n", + " [ 48. , 0.84383691, 0.76456081],\n", + " [ 49. , 0.80419953, 0.80419953],\n", + " [ 50. , 0.83888242, 0.76005675],\n", + " [ 51. , 0.86878158, 0.79946935],\n", + " [ 52. , 0.83412614, 0.83412532],\n", + " [ 53. , 0.86416103, 0.79494925],\n", + " [ 54. , 0.82955489, 0.82955489],\n", + " [ 55. , 0.85969536, 0.85969536],\n", + " [ 56. , 0.88559736, 0.82515915],\n", + " [ 57. , 0.8553785 , 0.8553785 ],\n", + " [ 58. , 0.88147667, 0.88147667],\n", + " [ 59. , 0.90373691, 0.85120253],\n", + " [ 60. , 0.87746996, 0.87746966],\n", + " [ 61. , 0.89998455, 0.89998455],\n", + " [ 62. , 0.87357294, 0.87357294],\n", + " [ 63. , 0.89631613, 0.89631613],\n", + " [ 64. , 0.91567859, 0.86978231],\n", + " [ 65. , 0.89273054, 0.89273043],\n", + " [ 66. , 0.91235719, 0.91235719],\n", + " [ 67. , 0.92896436, 0.88922569],\n", + " [ 68. , 0.94287288, 0.909095 ],\n", + " [ 69. , 0.92598399, 0.92598393],\n", + " [ 70. , 0.94019538, 0.90589195],\n", + " [ 71. , 0.92304365, 0.92304365],\n", + " [ 72. , 0.93754092, 0.93754092],\n", + " [ 73. , 0.94967817, 0.92014417],\n", + " [ 74. , 0.93491119, 0.93491119],\n", + " [ 75. , 0.94732888, 0.94732888],\n", + " [ 76. , 0.95767697, 0.93230746],\n", + " [ 77. , 0.94499223, 0.94499221],\n", + " [ 78. , 0.95560881, 0.95560881],\n", + " [ 79. , 0.96441831, 0.94266983],\n", + " [ 80. , 0.95354407, 0.95354407],\n", + " [ 81. , 0.96260593, 0.94036317],\n", + " [ 82. , 0.95148456, 0.95148456],\n", + " [ 83. , 0.96079019, 0.96079019],\n", + " [ 84. , 0.96851388, 0.94943183],\n", + " [ 85. , 0.95897286, 0.95897285],\n", + " [ 86. , 0.96692371, 0.96692371],\n", + " [ 87. , 0.97349845, 0.95715552],\n", + " [ 88. , 0.9788947 , 0.96532698],\n", + " [ 89. , 0.97211084, 0.97211084],\n", + " [ 90. , 0.97770124, 0.96372524],\n", + " [ 91. , 0.97071324, 0.97071324],\n", + " [ 92. , 0.97649422, 0.97649422],\n", + " [ 93. , 0.98124289, 0.96930705],\n", + " [ 94. , 0.97527497, 0.97527497],\n", + " [ 95. , 0.98019588, 0.98019588],\n", + " [ 96. , 0.98422594, 0.97404474],\n", + " [ 97. , 0.97913534, 0.97913534],\n", + " [ 98. , 0.98331999, 0.98331999],\n", + " [ 99. , 0.98673745, 0.97806235]])" + ] + }, + "execution_count": 55, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pbcentral_nobs_arr, rej_minlike = list(zip(*pbcentral_nobs))\n", + "pbcentral_nobs_arr\n", + "pbminlike_nobs_arr, rej_minlike = list(zip(*pbminlike_nobs))\n", + "np.column_stack((nobs_arr, pbminlike_nobs_arr, pbcentral_nobs_arr))" + ] + }, + { + "cell_type": "code", + "execution_count": 56, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 56, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfAAAAFwCAYAAABHHCk+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xdc1fX+wPHXBxQR90ABUdy4UHBSmlKZq2GObmmWVtrO\n1m3e3y29dW+7rLSulTezTMtRZomZGW5xgQNQwQ0OcKCyx/n8/vhymGd8D+IA38/Hg0fnfL6fz/l8\njtfrm89WWmuEEEIIUbm4XekGCCGEEMJ1EsCFEEKISkgCuBBCCFEJSQAXQgghKiEJ4EIIIUQlJAFc\nCCGEqIScBnCl1Cyl1Eml1E4HeT5RSsUrpaKVUsHF0ocopfYopfYppV6qqEYLIYQQ1zozPfCvgcH2\nHiqlhgJttNbtgEeA/xakuwHTC8p2BsYopTpcdIuFEEII4TyAa63XAWcdZBkOzCnIGwnUU0o1BXoD\n8Vrrw1rrXGB+QV4hhBBCXKSKmANvBhwt9j6xIM1euhBCCCEu0qVYxKYuwWcKIYQQophqFfAZSUDz\nYu/9C9I8gBY20m1SSsmh7EIIIa4pWutyd3rN9sAV9nvWvwD3AyilQoFUrfVJYAvQVikVoJTyAO4p\nyGuX1vqa/Hn99deveBvk+8v3l+8v312+/+X9uVhOe+BKqe+BMKCRUuoI8DpG71prrb/QWi9TSg1T\nSiUA6cADBcE4Xyn1JLAC4xeFWVrruItusRBCCCGcB3Ct9VgTeZ60k74cCCxHu4QQQgjhgJzEdhUI\nCwu70k24ouT7h13pJlxR1/L3v5a/O8j3v1iqIsbhK4JSSl8tbRFCCCEuNaUU+jIsYhNCCCHEVUQC\nuBBCCFEJSQAXQgghKiEJ4EIIIUQF0xrWrYOHHoK0tEtTR0WcxCaEEEII4MwZmDMHvvjCCOKTJl26\nuqQHLoQQQgC5ufDtt3DXXbB2rflyFotmVUQe48ZB69awbRvMnAmxsfDcc1C79qVprwRwIYQQ17SM\nDPj0U2jbFmbPhuuvh7FjYdw4OHbMfrl8i4VXvvmZOs/34pYVTcnt+iUJ+y18+y3ccAOoS3y1lwRw\nIYQQlVpMDHz2GWRnu1bu7Fl4801o1Qr++gt+/BH+/BOefRbi4qBFC+jaFd5/3+idW+Xm5fPsVz9S\n+/kQPo7+F092+webn1jJkUb/446f+7HjxI6K/YJ2yEEuQgghKqXjx+H11+HnnyEoCE6dgu++M147\nknw2nbumvcfmqAxGN57Cqy940bGj7bz79sHTT8Phw/DBR3n8cnA+/4v/N9Ut9fh7z3/y2phhuLkZ\nXW2LtvDV9q/4v1X/x7iu45gaNpU6NerYbYcc5CKEEOKK+vVXGDXK6LVeDunp8K9/GYG6fn0jyK5c\nafScb7oJPvgALJay5fLyLTz2+Xf4/acDh9P2MWh0Itt79iK34U67dbVvD8uWwT2vrmT48q58v3cm\nU0M/4fwHG5ly762FwRvATbnxcI+HiXk8hrNZZ+n0WScWxi6skJvHbLrS16kVu1ZNCyGEqDyOHdN6\n9Git27TR+rXXtPb21nrxYufl0tO1fu45rYcM0TopyXx9ubkW/eWXFu3np/WYMVofPFg2z4EDWvfr\np3VYmNaHDhWlf7V8k671TB/t9UxP/d/f1muttbZYLHpO9Bzd+N3GetrGadpisZT5vMOph/WoH0bp\nltNa6p/jfraZx541h9bo3l/21scvHLf5vCDulT9uXkzhivyRAC6EEJVDfr7Wn32mdePGWr/6qtYZ\nGUb6li1at2ih9SuvaJ2XZ7vsqlVat26t9dixWr/+utbNmmm9aZPzOpdsjNG1nu6tGz5xh161/rzD\nvHl5Wr/9ttG+t6Yn6lbPjdNuL/jpidNn69y8/DL5E04n6N5f9tZDvxuqT1w4obXWOjM3U7+5+k3d\n8J2GespfU3RGTobzRtrgKOBfbACXOXAhhBCm7d4Njzxi7HH+4gvo0qXk85QUuPtuqF4dvv8eGjUy\n0s+dgxdfNIajP/8cbrvNSF+61Djs5L33YPz4svXl5OZz57sfsPzCe4z1eQOvNlFsStzI0jFLCagf\nYLedWmveWPoNb0a+QC/3h1n0zCv4NLS/nys3P5epq6fyv6j/8dx1z/Hfrf8lqGkQHw76kFYNWrn6\nx2TKxc6BX/Get/UH6YELIcRVKz9f6//8x+jVfv658d6e3Fyt//53rVu10nr7dq1//VVrf3+tJ03S\nOjW1bP6YGK3bttX62WeNslbLt+zVtZ8J1fWfvlGv3XVQa230aKdtnKZ93/fV64+st1l/SnqKHvnD\nSB30WZDecWKHS98z4mCEvu3723R4fLhL5coD6YELIYS4lM6cgfvvN7Zd/fAD+PubK/ftvEwm/vAy\nDc7ezPev38FNNzmu4557jNdzv7fw8KxPWHL2TUY1msK85x6nmnvJNdfh8eGM/3k8Hw7+kHFdxxWm\nL4tfxqSlkxjbZSxv3PQGntU8Xf26l83F9sAlgAshhLBr61bjZLKRI+Htt42hcTMOpR5ixA8j8K3Z\nkm0nNzLj1umM7jTaYZm8PHjipZN8mXo3dermsfj+2dwc0tZu/pjkGG6fdztjuozhlRte4YUVLxCe\nEM7sO2cT1jLMhW95Zcg2MiGEEHbl5xvz1a7S2pirHjrUOMjkgw/MB+8/9v9B6FehjO82nt/uW8yK\n+37nqfCnmLdrnsNyO5K3Ee7fi8eGDiDl3dUOgzdA5yadiZwYyZoja/D7wI/03HR2PLqjUgTviiA9\ncCGEqKJOnoRhw4zzuefPB3d3c+XS0uDRR2HnTli0CNq1M1dOa82769/l48iPmTdqHgNaDih8tjt5\nN4O+HcTbA9/m/m73lyk7b9c8Ji+fzMzbZjKy40hzFRbIzssm6kQUof6hLpW70mQIXQghRBkJCTB4\nsHGe99q10KmTcd63s/O5t8cfo/+0e7nO8hJLPhiCl5e5+i5kX+CBJQ9w9PxRFv1tEf51y06Ux6XE\nccu3t/CvG//FgyEPApBvyecfq/7BjzE/suSeJQQ1dXKMWhUiQ+hCCHGVu9x9k61boX9/eOklmDoV\nfvrJCOJvveW43J9RCfSZ2Y/OPoFEt76PPee2m6rvyLkjhM4KpWHNhqyZsMZm8Abo6N2RVeNXMSVi\nCjO3zuRc1jnumH8Hm5M2s3nS5msqeFcE6YELIcQlkpdnrN5OTzeCqNtl6DKtWGH0ur/4Au68syj9\n2DHo2xdeew0eeKBsuYVrd3L30qGM8X2d7559mEWxi3h6+dOsf3C9w/3WCWcSGDhnIJP7TOa5654z\n1cb9Z/Zz85ybybPkMaLDCD4c/CHV3U1OsFchMoQuhBBXofx8mDDBmIfOyjLO6J4yxXk5reGdd2Dg\nQOjZ07U658417p9etAj69Sv7fO9eGDAAZs2CW28tSv/vsvU8vnokk9t+yrRJfytMn7ZpGl9s+4L1\nD66nQc0GZT4vJjmGwd8N5rUBr/Fwj4ddauuRc0fYdmwbIzqOcKlcVSIHuQghxFUmP1/rBx/U+sYb\njXO/jx83DjJZutR52VdeMY4abd5c6+Rk83U++94W7d/confvdpxv40bjMBbr8aVvzAvX6qXG+s35\ny23mfyb8GT3g6wE6KzerRPq2Y9u0z/s++rsd35lvpCiBizzIRebAhRCiAmkNTz5p3JD1yy/g5QU+\nPsZd0w8+aCwus+ett2DJEoiMhLFj4d57jZ68M6Pf+5SP0nvx0Bef0Lmz47yhofD11zB8ONz71nxe\nixrP5/2X8I+7B9vM//6g92nk1YgHljyARRtXfG04uoEh3w3hs2GfcW/Xe503UFwaFxP9K/IH6YEL\nIa4SLlw4Vabc009r3aeP1ufOlX3+2Wdad+midVpa2WfTpxu3ellv58rNNW7Ueu01x3U+/vlc7f53\nf/39hlXa+11vvemoiZtBtNYPfzpPu7/gpxeudX7UaEZOhr7uq+v0y3+8rP888Kf2ftf7shw1WtUh\nt5EJIUTFyc3V+tZbtf6//3OtnMWi9Ysvat2jh9Znz9rPM2GC1vfcU/KXhNmzjSHzAwdK5j9+3Lit\na9ky2583de4yrV5son9av0trrfXi2MU64KMAfTrjtMO2rty/Unu/6613HN9p9uvplPQU3e6Tdrrh\nOw11xMEI0+WEfZclgANDgD3APuAlG8/rA4uBHcAmoFOxZ4cK0qOAzQ7quKR/UEIIYcbkyVrfdJPW\nPj5ab9hgvtzdr/+sO4dc0Kcdx06dkaF19+5af/SR8X7hQqOuuDjb+des0bpJk5J3W2ut9ee/rtfq\nRW89c1nJRj4T/oy+/fvb7V5juf3Ydu39rne5gnDiuUQdkxzjcjlh2yUP4Bh7xROAAKA6EA10KJXn\nXeCfBa8DgZXFnh0AGpio51L+OQkhhFMzZ2odGGj0oBcu1LpdO2MRmjOPfz5X85qbHv/jo6bqOXRI\n66ZNtZ46VWtvb+PGLkfef1/rnj21zipYR7Zw7U6tXmyi35xfdhg7Oy9b9/myj35v/Xtlnh04c0D7\nfeCnF8QsMNVOcWldbAA3s4itNxCvtT6stc4F5gPDS+XpBKwqiMJ7gZZKKe+CZwo5MEYIcZVbvRr+\n+U/jfur69WHUKGMb16uvOi43d9V2Pj/4DLPDVvNn4q+sOrjKaV0BAfDa59v514mezP7xFCEhjvM/\n9xy0aAHPPgurdxzkb0uG8kSbafzj7iFl8nq4e/DD6B94b8N7rD+yvjA9JT2Fwd8N5pV+rzi9VERU\nDmYCazPgaLH3iQVpxe0ARgIopXoDLQDrUTwa+EMptUUpNenimiuEEBXvwAG4+25jH3Xxc7+nT4cF\nC4zgbkvMoWTGh4/gucDPGX9jP2beNpOJv0wkLSfNYX2nM07z3pFRhPasybyzzzptn1Lwv//B8rXJ\n3Dx7EHf5vMqnD4+xmz+gfgCz7pjFmEVjOJVxirScNG79/lbu6nQXT/Z+0ml9onKoqJ7x20ADpdR2\n4AmM+W7r5oe+WuvuwDDgCaWUjeMFhBDi4lgs5St3/jzccYfR+x44sOSzhg1h5kzj5LK0UjE5IyuX\nvtPuIrTmfbz/4CgAhrUbRv+A/ry88mW79eVb8hmzaAyjO47m93HLWX9kPcvilzltZ606eXg/djdD\nWtzF/Ocfd5r/tva3MabLGO776T7+tuBvdG7SmTdvetNpOVF5OD2JTSkVCkzRWg8peP8yxrj9Ow7K\nHASCtNZppdJfBy5orT+0UUa//vrrhe/DwsIICwtz4asIIa5VmZnG/ubJk+Ghh8yXy883jhv194fP\nPrN/0ccDD4Cnp3G9plXXl5/kZPZhkt5fQjX3or7Q2cyzBH0exNyRc0vcxmX16p+vsilxEyvuW0E1\nt2r8eeBPHljyALsf303dGnXttvXllS+z/fh2wu8Nx93N3LViufm53DTnJurWqMvPd/98TR5XejWJ\niIggIiKi8P3UqVPRl/IkNsCdokVsHhiL2DqWylMPqF7wehIwu+C1F1C74HUtYD0wyE49l2KNgBDi\nGvDUU1r372+s1j5zxny5B17apQeEWXROjuN8qanGNq8VK4z34z/+Sns8F6gPn0y1mf+XPb/o1h+3\n1mnZJTd8L45drJt/2FyfTDtZIn3ikon60aX2F8At2bNEN/+wuU5JT3H+pUrJzsvWefl5LpcTlx6X\ncRvZXiAeeLkg7RHg4YLXoQXP44CFQL2C9FYFAT8K2GUta6eOS/1nJYSogsLDtW7RwgjcjzxibAMz\nY+r3yzRT0HO3mDjfVGv9++9GEP/wxw1aveitl23e4zD/uMXj9NPhTxe+j0uJ043fbawjEyPL5D2b\neVY3+6CZza1dCacTtPe73nrj0Y2m2ikqj8sSwC/HjwRwIYSrkpO19vPT+q+/it43bqx1jJOtyvuO\nntJuL/jpMbNe1u0/ba+z87JN1XffY8c1zzXTr3/3q9O8pzNOa9/3ffWaQ2v0+azzusP0DvrLbV/a\nzb9kzxLd9pO2Oj2naN9aRk6G7vZ5N/3Jpk9MtU9ULhcbwOU2MiFEpaS1MX/doYNxe5fVxx/Db7/B\n77/bntO2WDQt/34PjT392P6fjxg2dxiD2gzimdBnnNSnuW3ucOrnBDH3wX+bauPPe37mhT9eoEuT\nLnh7efPF7V84zH/PwntoUa8F797yLgAPLXmIjLwMvh/5PcreBL2otC72NjLZny2EqBDjxsGnn16+\n+r76Co4cgTfeKJn++OOQlGRcJGLLU1/M4yS7WPnyfwD4YNAH/HvtvzmVccphfXN2zCEx7TBfj3/d\nYb7i7uxwJ738epF0PolPhzr/w/lk6Cd8s+MbtiRtYdb2WWxM3MiXt38pwVvYJD1wIcRFW7oUnngC\nsrON27bq1DFXTmv7K78d2bcP+vaFNWugY8eyz//4Ax59FGJjoUaNovQtexPp87/uzBkUzribexSm\nTw6fTL4lnxm3zrBZX+L5RLrP7M6K+1YQ7BPsUlvzLHnkWfLwrOZpKv/cnXOZsnoKqVmprJmwho7e\nNr6gqBKkBy6EuKLS0+Gpp4wrKgcONN8LT0+H4GDYtMm1+nJzjd7+lCm2gzfALbdAly7w0UdFaXn5\nFob89wFurPVUieAN8PqA11kQu4DdybvLfJbWmom/TOSp3k+5HLwBqrlVMx28AcYGjWVAwAA+v/Vz\nCd7CIemBCyEuyssvw9Gjxilme/dCv35GL7xePeflvv/eCMK//26+vv/7p2b7NsVvvznuve/fD336\nwM6d4OcHd703nfBj33LqnfV4elQrk/+TyE/4dd+v/D7u9xJD1l9u+5KZ22ay8aGNso9aVCjpgQsh\nrpiYGOOIzw8+MN4HBsKwYcZCMkfi4mDWLFi3DvbsgQ0bzNU3d9V23k5vy8f/Pe906L1NG5g0CV55\nBcK37GXR6Sksuvdbm8Eb4LGej3Hk3JESp6IdSj3Eq6te5Zs7v5HgLa460gMXQpSLxQIDBsDYsfDY\nY0XpCQnGqWjx8dCgQdlyWhtD7XfcAU8/DV98AYsWOe+F5+VbaPB8PzwaJ/H8gEd49QYnt4wAFy5A\nYMdczozoyx0B4/nx7084zL8sfhnP/v4sux/bjbubOwPnDGRI2yG82PdFp3UJ4SrpgQshrohvvjEW\nrT38cMn0tm1h+PCS88/F/fgjnDplLHoDmDDB6IVv3Oi4vsf/+y1a5bF6UjjTNk3jQvYFp22sUwdu\n/sc06nvWY/5zzs8PH9p2KK3qt+KzLZ/x2ZbPyMrL4vnrnndaTogrQXrgQgiXnT4NnTpBeDh07172\n+cGDxlWc+/ZBo0ZF6RcuGHPeP/xgrCK3+uILWLwYli+3Xd+R5HO0er8jXw38mQcG9WbMojEENw3m\npX4vOWznibQTdPmsCxse2kD7Ru1NfbfYlFgGzDbOMF//4HrT5YRw1cX2wCWACyFcNnEi1K4N06bZ\nz/PII0bw/s9/itL+/ncj+H/9dcm8OTnGNZ7z58N115X9rJ6vPs/53FT2vTcLgJjkGG6acxMHJh+g\nlkctu214cMmDNKrZiPcGvefK12NqxFT86/rzUHcXbkYRwkUSwIUQl9W6dXDPPcYe67r2L8/iyBEI\nCTGGx729YfduuOkm479NmpTNb68X/sumWO78eQC7Ho2hc8uign9b8Df6NOvD89fbHuLekrSFO+bf\nwZ4n9lDP08mSeCGuAJkDF0JcNnl5xoK1jz5yHLwBWrQwAv177xkL1554wti7bSt4gzEXHhdXci7c\nYtFMmD+ZEQ3/WSJ4A/xf///j/Y3vk5GbUeaztNY8vfxp/n3TvyV4iypLArgQwrS5c42V5aNHm8v/\nyivGdrGPPoK0NGNY3R4PD3j1VZg6tSjtxdmLyHA7ydxnyi5A69q0K9f5X8eX274s285dc8nJz2FC\n8ARzDRWiEpIhdCGEKTk5xsUhs2dD//7myz02OZ3/Loli04/96NPHeR3WufB2nTLwebMj7/f9hmfu\nDLOZP+p4FLfNu439k/cXnnaWlpNGh+kd+PGuH7m++fXmGyrEZSZD6EKIy+Lrr43g6krwBqg9+D1q\nPDSEDt3OOc1bvBc+8sO38cu/zm7wBgjxDaGHbw9mbZ9VmPbW2rcIaxkmwVtUedIDF0I4lZVlBO9F\ni6B3b/PlzmSeod2n7ejs3ZmRHUc6vbITjF54q+4HOH5bLyIf2EGvQH+H+bckbWHkjyNJeCqBpAtJ\n9PqyFzsf3Umzus3MN1SIK0B64EKIS27mTGO/tyvBG+DDjR8yosMI3hn4DjO2zMCiLU7LeHhA24f/\nyXCfZ5wGb4BezXoR1CSIr6O/5u8r/s5zoc9J8BbXBNuHAgshRIH0dHj7bfuHrNhzKuMUn2/9nG0P\nbyOgXgD1atRjecJyhrUb5rDcnlN7iMv+g/2T/2u6rtcGvMbQuUNp4NmA70d971pDhaikpAcuhHDo\n00+Nee9u3Vwr9/6G97mr0120rN8SpRRP9X6KTzc7v2v0jTVv8Gzos9SpYfJScSDUP5ThgcOZPmy6\nS1d3ClGZyRy4EMKuc+eMs83XrLF/97YtyenJdJjegehHo2lRrwUAWXlZBEwLYM2ENQQ2DrRZbs+p\nPfT/uj/7J+93KYALURnJHLgQ4pL56CPjelBXgjfAu+vfZWzQ2MLgDeBZzZOJIROZsWWG3XLl6X0L\nca2SHrgQVdypU8a2rE8+wekd2sWdPm3c7715M7Rubb7cibQTdJrRiV2P7SqzmCzxfCJdP+/K4WcO\nlwnS0vsW1xrpgQshHPrsM5g+HSIjXSv33nvGiWuuBG+Ad9a9w/3d7re5Ety/rj8DWw/kmx3flHn2\nxpo3eCb0GQneQpgkPXAhqrDsbGjZ0rhEpG5d+Pxzc+UOJWYRcv0Zdm3ww9/5Tq5Cxy4co8tnXYh9\nIhaf2j4286w9vJZJSycR+0QsbsroQ+w5tYcbvr6B/ZP3U7eGk0PWhagipAcuhLBr3jzo2hXeegt+\n/NE4kMWMsTP/hcf9w10K3mCcgvZgyIN2gzdAvxb98KzmycoDKwvTrHPfEryFME8CuBBVlNbw4Yfw\n3HPGzWDdu8OSJc7LnTqXwaacr8ipdYDYlFjT9R09d5Tvd3/Pi31fdJjPuqXsk8hPAKP3vWL/Cp7s\n/aTpuoQQEsCFqLJWrYL8fBg0yHg/fjx8U3bquYxnv55Lk5xQHu45kW+iTRQo8M76d5gYMpEmtezc\nF1rM2KCxRCZFsv/Mful9C1FOMgcuRBV1220wfDhMmmS8T08Hf3+IiQE/P9tlLBaN19+78O9+nzL0\nBh9u+fYWjjxzBHc3d4d1pWal0urjVsQ+HotvHV9T7Xvpj5eIOxXHxsSNMvctrkkyBy6EKGPvXtiy\nBcaNK0qrVQtGjjTu9Lbn3UUrQbvz7J030sm7E351/ErMVdvzv6j/MazdMNPBG+DxXo/zW/xv0vsW\nopxMBXCl1BCl1B6l1D6l1Es2ntdXSi1WSu1QSm1SSnUyW1YIUfGmTYNHHoGaNUumT5hg3Odtb7Dr\nww3TGNPqGdzcjE7B/V3vt7nlq7h8Sz6fbv6Up/s87VIbA+oHsOCuBUzuM9mlckIIg9MArpRyA6YD\ng4HOwBilVIdS2V4ForTW3YDxwCculBWiSsvIMIaz09MvT32nT8P8+fD442Wf9etnrETfurXss/At\neznlsZUPHxhbmDYmaAy/xf/GuSz7d3n/uu9XmtZqSu9mLl5VBozsOJLaHrVdLieEMNcD7w3Ea60P\na61zgfnA8FJ5OgGrALTWe4GWSilvk2WFqNIWLYLffnP9Nq/y+uILuPNO8LGxk0sp+4vZXlz4KX09\nH6ZBnaLLQBp7NeamVjexIHaB3fo+2fyJ9KKFuALMBPBmwNFi7xML0orbAYwEUEr1BloA/ibLClGl\nzZplrARfuNC1cjm5+S7XlZNjnLr2zDP289x/v9FDz84uSjt4/Cwxbt/z8bjHyuQf32283WH0XSd3\nEZcSx+hOo11uqxDi4lTUfeBvAx8rpbYDu4AowOV/faZMmVL4OiwsjLCwsApqnhBXRkICxMXBtm3Q\npYsxfO1p4rbLN+cv5+3IKaR9tMml+n78ETp0cHz1Z8uWEBQEv/4Ko0YZaU/NnkXLnFvp3q7s8vRh\n7YYxaekk9p/ZT5uGbUo8+3TzpzzW8zE83D1caqcQ16KIiAgiIiIq7POcbiNTSoUCU7TWQwrevwxo\nrfU7DsocBIKALmbLyjYyURX94x9G0P7gAwgLMw5VueMO5+X8nh3B8fo/s+K2fdzSo52purSGnj3h\nX/+CW291nPebb4wRgaVLISsnj9r/aMPXQxZz3809bOafHD6ZhjUbMiVsSmHa6YzTtP20LXuf3Gtq\n77cQoqTLsY1sC9BWKRWglPIA7gF+KdWIekqp6gWvJwGrtdZpZsoKUVXl5Rkrvh980Hg/apQxH+5M\nzKFkjnv+ResL9/FBuIkCBZasTOZc/nGGDnWed9QoWLsWTp6Ef3z7M7VyW9gN3gD3d7ufOTvmYNGW\nwrSvtn/F8MDhEryFuEKcBnCtdT7wJLACiAHma63jlFKPKKUeLsjWEditlIrDWHH+tKOyFf81hLj6\nrFgBzZtD587G+5EjjR5vTo7jcq/O/442uXfy+PUTWHvafAB/OvwZ6t79DG4mfi2vXdtY6DZ3Lny1\n62MmBjneAtbDtwc1q9dk3ZF1AORZ8pixZYYsXhPiCjI1B661Xg4ElkqbWez1ptLPHZUV4lowa1ZR\n7xugWTNjfnrVKhgyxHYZi0Xze/L/eGfADB4Z2pcXNh9ifcxh+nYOcFjX4ZOpHKmxjFQgMzeTmtVr\nOswPxmr0UU9tJX3oEf59350O8yqljMVs0d/QP6A/P+/5mYD6AXT37e60HiHEpSEnsQlxCaSkwJ9/\nwj33lEwfPdrxavRv/9xKvsriqdv74+lRjXb5w3l3qfNe+Kvf/4B/9iC6+4WwYv8KU20cMAByQj5h\nSIMn8fRw/rv8uK7jWLxnMRm5GXwS+QmTe0vvW4grSQK4EJfAt98a55DXLXVC6MiR8PPPxvy4Le+u\n/B8D6k0oPAnt3u6j+OuE8wC+5MjXTOw5gVEdR7F4z2JTbbyQcw63jr/w1RMPOs8M+NXxo0+zPrz2\n12scSj2hfjI9AAAgAElEQVTEiI4jTJUTQlwaEsCFqGBaG8PnDz1U9lnLltCqFaxeXfbZmfOZxKkf\n+fdd4wvTnrvzZtJqxrE9/pjd+n6NjCOz+hFeGj2IER1G8Ou+X8nJdzLRDiyIXcAtbW/Gp14jM18L\nMPaEf7DxAx7v9TjV3CpqF6oQojwkgAtRwTZvNhaq3XCD7ef2htH/+f1PNMzqSZ+OzQvTatf0oGXO\nrby95Ce79b2xdDY9Pe7D06Mazeo2o32j9vx18C+n7ZwdPZsJ3SY4zVfcnR3u5MaWNzKp+ySXygkh\nKp4EcCEqmHXxmrKzu3PUKPjpJ+Ou7uLm7/maezuXHc6+u+so/ki0PYyelZPH1pxv+eftE4o+v+Mo\nFsU5HnaPPx1P/Jl4hrS1s5rOjprVa7Jq/CoaeZnvtQshLg0J4ELYYLFAVJTr5dLTYcECY4W3PW3b\nGueUr19flLY+5jBnPaOYOqbsVQEvjBhMqtc24o6klHn2zsIV1MxtwW19Ohamjew4kiV7l5BvsX8Y\n4pwdc7g36F6qu1c398WEEFcdCeBC2PDHHxAa6voNYgsXQt++4Ff2RNISSh/q8s+F39CFe6hfu+w5\nqw3r1sQ/azBv/7SkzLOvts7mjuYTSqS1btAavzp+hXu2S7NoC3N2zmF8Nwe/ZQghrnoSwIWwYcEC\nyM0FV48ttrd4rbTRo40AbrFAXr6FtRdm8+ItD9jNP7LjKJYdKjksvv/YGRJrrOCte+8pk9/RMHrE\noQga1mxINx8HB6YLIa56EsCFKCU319jq9eijEB5uvtzO2ExiE5O47TbneTt2NLaYbd4Mn/yymmr5\ndRh7o/1DUV4aOYxTNddz+GRqYdor38+jRc5QAprWL5N/VMdRLI5bXOLoU6vyLF4TQlx9JIALUUpE\nBLRpA4884tod3pN/fIeaY8dR3eS0snU1+vR1XzPE54HCvd+2+DWqg0/mjby1aGlh2m9JX/NoH9u9\n9o7eHalTow5bkraUSL+QfYFf9v7C2KCx5hophLhqSQAXopQFC4zg2rUrZGZCfLy5cpvOL+SEx3ou\nZF8wlX/0aJi3+BwHPX7hP2PudZr/9rajWBJvDIsvWreL7GoneX7EzXbz2xpGXxi7kLCWYXjX8jbV\nRiHE1UsCuBDF5OUZw+ejRxvbwIYMMdcL/zUyjlz3VPoH3MCfB/80VVdQEOQG/ohf1kA6tnAeUF8Z\neTsnaq7i2OkLvLVsNn1q3o9HdXe7+a0BvPg1vbN3zJbFa0JUERLAhShm9WoICDBOSwMjgJuZB5/2\n+yKC3EcxrN0wlieYG3dXCprcPJcXBt1vKn8r3wY0zuzLmwt+ISp/Lq/d4TgQB/sEY9EWdp7cCcCB\nsweITYnl1vZOLgsXQlQKEsCFKGbBArjrrqL3t9wC69YZQ+mOrD+7kIeuH82QtkMITwgv0eu152Ta\nSZLyd/DoLYNMt29Yy1HMPPAitbLbMrhne4d5lVIlhtHn7JjD2C5j8XD3MF2fEOLqJQFciAL5+cYJ\naaNHF6XVrw/dusGaNfbL/bEtnpzqJ3lk6PV08u6E1po9p/Y4re/nPT8ztO1QPKuV3fttz8sjhmOp\neYIRrSaYym8N4BZt4Zsd3zA+WIbPhagqJIALUWDNGvD3h9atS6Y7G0b/IHwRndRIPKq7o5Qq7IU7\nsyhuEaM6jnKpjR1bePO477e8d/8YU/n7+PchNSuVL7d9SR2POoT4hLhUnxDi6iUBXIgCpYfPrYYO\ndbyQbe3phTwQWtRtH9p2qNN58NMZp4lMimRou6Eut3PGo2Np0qCWqbxuyo0RHUbw3IrnGN9tPMre\nAe1CiEpHArgQGMPnixeXHD63Cg6Gs2fh4MGyz9bsPEimxxEev7Xo6rGbW9/MxsSNpOfYP4d1yd4l\n3NL6Fryqe1VE8x0a1XEU2XnZ3NvV+VY1IUTlIQFcXJWysowFZDnOr7WuEOvWga+vcdFIaW5u9ofR\n3/1tEYF6BJ4eRXdj161Rlx6+PYg4FGG3vkVxixjdycZvC5fAgJYDiJwYiU9tn8tSnxDi8pAALq5K\nq1fDypWwdevlqc/e8LmVvWH0iOSFjO9VNhA7GkZPzUpl7eG13Nru8mznclNu9PDrcVnqEkJcPhLA\nxVUpPBxq1TIC+aWWn29cLGJr+NzqlluMtmRnF6VtjD1CRo0EJt8eVia/o4Vsv+77lRtb3UidGnUu\nsuVCiGuZBHBxVVq2DJ5/3rUAfuGC4+1e9mzYAE2aQHsH26obNYJOnYyhdqt3ly6mbf5wvDzLHn7e\ntWlXMnIzSDiTUObZwtiFLq8+F0KI0iSAi6tOQoIRjCdPNoJrbq65cvPmwfhybHN2NnxuVXoe/M/j\nCxnX3Xa3vXA7WXzJXviF7Av8degvbm9/u+sNFUKIYiSAi6tOeLgx59yokXGk6fbt5ssdOgSHD5uv\ny2Ixhs/NBPChQ4sC+NZ9SaR5xvHcnfYvExnadijL95ecB18Wv4zrm19Pg5oNzDdSCCFskAAurjrW\nAA4wYIC5YfScHPjrL/P5rdauz6VBo1wCA53n7dkTkpPhyBF4Z8lPtM67ndo17R9LOrD1QNYeXktW\nXlZh2qK4RYzueHlWnwshqjYJ4OKqkpkJa9cai8bACMgREc7LrV9vzGHfdZdrAfyF8H9Sf+RrpvK6\nucGgQcZq9D+SFjI22HEgblCzAV2bdmXNYWNiPiM3g9/3/87wDsPNN1AIIeyQAC6uKhEREBJinEEO\n0L+/EZzz8hyXs/bazQZ8qx1Zv3Cm/krT+YcOhS/nneRczR08f+ctTvMXnwf/PeF3evn1orFXY/MN\nFEIIOySAi6tK8eFzAG9vaN4coqPNlevUCc6fh8RE53WtjzlMrkcKRzP3cC7rnKn2DR4MW9N+omXO\nrdSrXcNp/uLz4AvjZPW5EKLimArgSqkhSqk9Sql9SqmXbDyvq5T6RSkVrZTapZSaUOzZIaXUDqVU\nlFJqcwW2XVRBy5bBsGEl05zNax89CsePQ69exjB3//7mhtE/W7GcgLzB9G7Wm/VH15tqn7c31L9+\nEfd2NxeIQ3xDOJN5hr2n9rIsfhkjOo4wVU4IIZxxGsCVUm7AdGAw0BkYo5TqUCrbE0CM1joYuBH4\nQCllPVvSAoRprUO01r0rrumiqomPN+bAu3YtmR4W5nhYfPlyY27a3d14b3YYfdWRcIa1HcqAgAEO\njz0tLi0njXyfSF6+y/nwORinoA1uM5jnVzxPUJMgOc5UCFFhzPTAewPxWuvDWutcYD5QehWOBqzH\nStUBTmutrbOWymQ94hq3bJmx17r0hVn9+xsHqOTn2y5Xetg9LMx5DzwtM4cTNf9i8q2DCWsZxurD\n5la+/XXwL3o3601tj9qm8oMxD/5b/G+X7exzIcS1wUxgbQYcLfY+sSCtuOlAJ6XUMWAH8HSxZxr4\nQym1RSk16WIaK6q28PCyw+cATZuCjw/s3Fn2WU4OrFplzE1bdekCp0/DsWP265oZvo5amR0IbN6Y\n3s16E5Mcw4XsC87bmBDOkLZDTHybIoPaDMKzmicjO450qZwQQjhSUT3jwUCU1toPCAFmKKWsXZS+\nWuvuwDDgCaVUvwqqU1QhGRnGavOBA20/tzeMbt0+1qRJUZqZefD528LpWc/otntW86RXs15O58G1\n1oQnhDO0rWt3eDf2asyJ50/gX9ffpXJCCOFINedZSAJaFHvvX5BW3APAWwBa6/1KqYNAB2Cr1vp4\nQXqKUuonjCH5ddgwZcqUwtdhYWGEhYWZ+hKi8vvrL+jRA+rVs/18wACYPx+efbZkeunh8+L5IyJg\nzBjbn7crczkz+n9VlL9gHtxR73rf6X3kWfLo5N3Jybcpq56nnS8mhLhmREREEOHKPlcnlNbacQal\n3IG9wM3AcWAzMEZrHVcszwwgWWs9VSnVFNgKdAOyADetdZpSqhawApiqtV5hox7trC2i6nrySWO7\n2Etl9jgYjh83hsZTUowetlVQEHz5JYSGlswfHQ333AN79pT9rMi4o1z3TQhZb5zEo7qx8i3iUAQv\nr3yZTRM32W3jtE3TiE2J5Yvbv3D16wkhRBlKKbTWynlO25wOoWut84EnMYJvDDBfax2nlHpEKfVw\nQbY3geuVUjuBP4AXtdZngKbAOqVUFLAJWGoreItrm9bGAjZbPWkrX1/jbPRdu4rSim8fKy0oyDj2\n9MSJss9m/L6cFrmDCoM3QJ9mfdidvJu0nDS7bSjP/LcQQlwqZobQ0VovBwJLpc0s9vo4xjx46XIH\ngeCLbKOoJLSGmBijp+yKffuMxWhBQY7zWVeXd+tmvC+9faw4d3fo18/If/fdJZ+tPBzOHe1K7seu\nWb0mPfx6sP7Iega3LfNXmYzcDDYc3cCCuxa48M2EEOLSke1dosJs2GAETYvFtXLW3nfp7WOllT7Q\nxd78t5Wt7WRpmTkc91zFU8PKBukBAQPsbieLOBRBD98e1K1R13EjhRDiMpEALipMRAScOwd797pW\nzt72sdKsAdxisb19zFb+0utFZq3YiFdWOzq3bFImf1jLMLsHuixPWC7D50KIq4oEcFFhVq+Gxo0h\nMtJ8mbQ02LgRbrZ/rXYhf3/jkpPYWKO3365dye1jpQUHG3vBk5OL0uZtCad7XduBONQ/lJ0nd5Ke\nk17mWXm2jwkhxKUkAVxUiNxc2LQJnnjCtQD+11/GPdt1TY5MW3vhzobPoWgefM2aorQd6eHcF2q7\noFd1L0J8Q9hwdEOJ9IQzCaTnpNO1aVeb5YQQ4kqQAC4qxLZt0Lq1cRTqJvs7scpYtaro7m8zrMPi\nZgJ48fwAW/clkV0jkQkD+9jPb2Me3Dp8rpxN0gshxGUkAVxUiIgII1iGhBiryjMyzJVbvdooZ9aA\nAfD775CUBL1NXI1TfCHb9OXL8c+5pcT2sTL5bcyDy/YxIcTVSAK4qBCrVxvBskYNYxvZtm3Oy6Sm\nGjeQ2drHbU9AADRsnMegwRab28dKCwmBI0fg1ClYeWg5g9s47rZf538dUSeiyMg1fgPJysti7eG1\n3NLahWECIYS4DCSAi4uWl2csKrvhBuN9nz7mhtHXrjXyeni4Vl/tceNpcsscU3mrVYO+fWHFyjyO\n1VjJZBvbx4qr5VGLYJ9gNh7dCMCaw2vo2rQrDWo2cK2RQghxiUkAFxctKgpatDBWoINxrKmZhWyu\nDp+DcaFIcp0/ON/Q3PWfYNQxddZGPLNaEdTK+X3cxe8HD4+X4XMhxNVJAri4aNb5b6s+fS5dAI87\nFcf57PNsSjS/Ui4sDPbpcELqmNsGVvx+8OX7l8v2MSHEVUkCuLho1vlvq9atISvLWGhmz/nzEBdn\nbiFaiboOreauzneReD6Rs5lnTZXp3h3c2i/n3t7mAvH1za9n+/HtxKXEcSbzDCG+Ia41UgghLgMJ\n4OKi5OfDunXG/dtWSjnvha9fbyxe8/R0rb7Vh1dzc6ub6enXk81Jm02VuZB3hprNEpg4xP72seJq\ne9QmqGkQr0e8zuA2g3FT8n8TIcTVR/5lEhdlxw7w8yt7IpqzhWylh93N0FoTcSiCAQEDCG0WanoY\nfe3htfQNuA6PatVN1zUgYAALYhfI/LcQ4qolAVxclIiIksPnVs4WspVn/nvf6X14uHvQsn5LQv1D\n2ZRkLoCvPryaAQGuVRbWMgyFYlCbQa41UgghLhMJ4OKi2AvEvXrB9u3GFrPS0tJg924jyLtU1+HV\nRmBVij7+fYhMjMSinV99FnEogrCWYS7VNSBgADOGzaCxV2PXGimEEJeJBHBRbhaLsZfbVgCvX9+4\nfGT37rLPNmwwFpbVrOlafdbhcwCf2j7U86xH/Ol4h2VSs1KJPxNPT7+eLtVVs3pNHuv1mGsNFEKI\ny0gCuCi3nTvB2xt87GyttjeMXt7579WHVzOgZVHBUH/n8+BrD68l1D8UD3cXT4sRQoirnATwKkRr\nWLbs8tVXevtYafZWopdn/nv/2f0oFG0atClMM7OQrTzz30IIURlIAK9CDh2CW2+FM2cuT33OAnFo\naNmV6BkZEB0N113nWl0RhyIY0HJAiRvBzCxkkwAuhKiqJIBXIdbe7o4dl74ui8W4Z9tRAO/SBY4e\nhXPnitI2boTgYKhVy7X6bAXiYJ9g9p3eR3pOus0y57LOsefUHno3c/G0GCGEqAQkgFchkZHGbWBR\nUZe+rpgYaNAAmjWzn6daNeM2sC1bitLKPf99aHWZleQ1qtWga9OubD221Wa59UfX08uvFzWq1XCt\nQiGEqAQkgFchmzfD6NHGELVZGRnlC/hm57FLD6OXZ/77YOpBci25tGvYruznO5gHL8/2MSGEqCwk\ngFcRublG4J440bUAvmgRPPCA6/WZDcTFF7JlZhp7w/v2dbGuQ8bwefH5bytH8+Ay/y2EqMokgFcR\nu3ZBy5ZGjzchwbhMxIzISGM4PDvbfF1auxbAN20yykRGGvPitWubrwuKDnCxxbqVTGtdIv1C9gVi\nU2Lp42/u/HMhhKhsJIBXEZGRRrD09IS2bY2gbLacUhAba76uuDgjCLdo4Tyvv78xL3/wYPnmv8Fx\nT7pFPaMRR84dKZG+/uh6evj2wLOai7elCCFEJSEBvIrYvNkI4GAsHDMzjJ6VZQT6O+5wbR78u5XR\ndB24y3R+6zB6eea/D6ceJiM3gw6NO9h8rpSyeaCLddhdCCGqKgngVURkZNHd2sHB5gJ4VBR06ADX\nX+9aAJ935H1S288wnb9PHyN4b9kC/fqZrweKet+25r+tbC1kizgsC9iEEFWbBPAq4Nw5OHLEmF8G\n8wHc2msPDnYtgCfpSM54mC8QGgrffQcdO0LduubrAXM96dIL2dJy0th1cheh/i7eliKEEJWIqQCu\nlBqilNqjlNqnlHrJxvO6SqlflFLRSqldSqkJZsuKi7d1qxGEqxdcd92tm3GYi8XJRV3WXntwsHGu\nubP8AAlJp8n1PM7+tN3kWWxcNWZDjx7GcH155r/N9KR7+vVk58mdZOcZK/E2HN1Ad9/u1Kzu4m0p\nQghRiTgN4EopN2A6MBjoDIxRSpWekHwCiNFaBwM3Ah8opaqZLCsuknUBm1XDhsYhKwcOmCvXsKHx\ns3+/87rmrd1M/fTeNKvTjL2n9ppqX61a0Gjs8wRev89UfqvE84mczz5PJ+9Ojj/foxbtG7Un+oQx\n7CDz30KIa4GZHnhvIF5rfVhrnQvMB4aXyqOBOgWv6wCntdZ5JsuKi1R8AZuVs4VsKSlw6pQxBw7m\nh9H/3BtJhzp9CPENIeqEuWH0fEs+GR2+JK/5KlP5rVYfWk3/gP4O57+tis+Dl761TAghqiIzAbwZ\ncLTY+8SCtOKmA52UUseAHcDTLpQVF8G6v7p3qeO+nc2Db94MvXqBW8HfALMr12NSIwlr24cQnxC2\nH99uqo17Tu0hLfcCUSfM5beKOBRBWECYqbzWefCM3AyiT0Rznb+Lt6UIIUQlU1GL2AYDUVprPyAE\nmKGUcvG4DlEeiYnG3HVAQMl0Zz3q0r32kBDnPXCLRXO6xmbu7mcEcLM98MikSNo0aGM64Fu50pMO\n9Q9l49GNbDy6kW4+3ajl4eJtKUIIUclUM5EnCSh+ZId/QVpxDwBvAWit9yulDgIdTJYtNGXKlMLX\nYWFhhDm6bFoARb3v0qPMznrgkZHw2GMl8zsL4H9GJeCWX5vgNr4kp7sTfSIarbXTIe7IxEgmdZ/E\nv9b8i9z8XKq7V3fyreBk2klSMlLo0qSL07wA7Rq143z2eebvnm+61y6EEJdTREQEERERFfZ5ZgL4\nFqCtUioAOA7cA4wplecwMBBYr5RqCrQHDgDnTJQtVDyAC3NKL2CzCggwLipJToYmTUo+09rogc+e\nXZTWvLlxnvrx4+Dra7uuxZsj8c03KmtSqwm1qtfiUOohWjVo5bCNm5I2MbH7RObsnENsSizdfLo5\n/15JkfRp1gc3ZW6QyE250ce/D3N2zmHpmKWmygghxOVUumM6derUi/o8p/86aq3zgSeBFUAMMF9r\nHaeUekQp9XBBtjeB65VSO4E/gBe11mfslb2oFosSbC1gA6NHHhxs+27w+HioUwd8fErmdzYPvv5w\nJMHeRZWZWciWlpNGwpkEuvl0c2nePDLRCOCuCG0WSr4ln+ubX+9SOSGEqIxMdW+01su11oFa63Za\n67cL0mZqrb8oeH1caz1Ya9214Geeo7KiYuTlGbd79epl+7m9YXF7Qd/ZPPiB7EiGBhUL4CYC8tZj\nW+natCse7h509+1uOoBvStrk8kEsYS3D6NeiH7U9ZPmFEKLqk5PYKrHYWGjWDOrXt/3c3jy4vWF3\nR/PgqWlZpHvF8LcbuhemmVnIFpkYSWgzIxB39+1uauFbviWfLUlb6N2st9O8xQ1oOYC/xv/lUhkh\nhKisJIBXYra2jxXnKIDbKudoCH3humhqZgTSuJ5XUX7fEKKOOwngSZGFV3oG+wSz4+QO8i35DsvE\nnYrDp7YPjbwaOcxni5k940IIURVIAK/E7PWkrTp2hEOHjMVsVtYbyHr0KJs/MBCOHYPz58s+W7Yz\nklYeJSsLqBdAVl4WJ9NO2qxfa82mxE2Fc9n1PevTpFYT4s/EO/xemxJdHz4XQohrjQTwSszeXLaV\nh4dx0tquYjd/RkdD+/bg5VU2v7u7cSGKrYVv209Gcl3zkpUppRwuZEs8n0i+zqdl/ZaFad19uzvt\ntUsAF0II5ySAV1JpacbZ5V27Os5XehjdWdC3t5AtSUVyZ6+yBR0tZLP2vosPa3f3cb6QzbqFTAgh\nhH0SwCupbduM4O3h4Thf6QDubNjd1jx43JEU8qqfZkjPwLL5HSxksxWIQ3xD2O7gSNXz2ec5ePYg\nXZs6+c1ECCGucRLAKylnC9isbAVwZwvfSvfAf1i7mYaZvajmXvavi6OFbJFJkWWGwkN8jPxaa5tl\ntiRtIcQ3xNRpbUIIcS2TAF5JORsKt+rWzZgDz883bh9LSSm6gcyWoCDYuxdycorSVu2LpGNd25UF\nNgrkRNoJzmWdK5Gem59L1PEoejUruUm9ae2meFX34vC5wzY/T4bPhRDCHAnglZSzoXCrevWgaVPj\n9LXNm6FnT2Oxmj1eXtCqlbFS3Sr2/CZubGe7Mnc3d4KaBrHjZMmVb7uSd9Gyfkvq1qhbpkyIr+N5\nc1nAJoQQzkkAr4SOHYPMTGjd2lx+6zC62V578XnwvHwLZzy3cM8N9gvaWshWfPtYafYWslm3nUkA\nF0II5ySAV0KbN9u+gcweawA322svPg/+x/Z9uOc2oHPLJnbz21rIVvwAl9Lsnch2KPUQ1d2r41/X\n33kjhRDiGicBvBLavt32QSz2WAOyNfA7U3wr2eLNkfhZHEd9W3u7IxPLLmArnt9WD1x630IIYZ4E\n8EooOtoIymaFhEBEBNSqZf+q0OKst5hZLLDxSCQhTRwH8C5NupBwJoGsvCwAzmaeJelCEp29O9vM\n71/Xn9z8XI5fOF4ifVPipsJz04UQQjgmAbwScjWAN2tmXB9qZvgcoFEj44KUAwfgQE4kw7o6Llij\nWg3aNWrH7uTdAGxO2kwP3x64u9leLaeUsjmMvilpk91hdyGEECVJAK9kzpyB1FRjpbhZSoH/Tb/S\npleC6TIhIRCxLpNMrz3cdUOI8/zFFrKZGQovPYyenZfN7uTd9PB1YW5ACCGuYRLAK5kdO4y93W4u\n/i+nbnibGsGLTecPCYEZP23HK6MTDep4Os/vU3Sgi5m93KUDeNSJKAIbBVLLo5bpNgohxLVMAngl\nEx1tBHBXWLSFhLQdHEzfbbpMSAhEp0TSpoa5IW3rkLjW2uEK9MLPL7X1zNG2MyGEEGVJAK9kXJ3/\nBth/Zj8ZuRmFc9RmBAcDzSK5voW5oNrNpxu7k3ez9/RealWvhV8dP4f52zRsw9mss5zJPAPYPnZV\nCCGEfRLAK5nyBPDoE9Hc2PJG9pzaQ74l31SZFi3AvcVmRl9nLoDXrVEX3zq+zNkxx9RCNDflRrBP\ncOGwu2whE0II10gAr0RycowjUTvb3p1lV/SJaPq16IdPbR8OnD1gqszZrDN4NT7NTcFtTdcT4hPC\nrKhZpreCWYfRT6adJDUrlXaN2pmuSwghrnUSwCuR2Fhj9XnNmq6VizoRRYhPCJ2bdDY9jL7jxA66\n+XTDTZn/KxLiE0JyerLprWDWeXProjdX6hJCiGud/ItZiZRn+ByMHniwTzBdvLuYDuDWoO+K7r7d\ncVfudPftbjr/9uPbZfhcCCHKQQJ4JVKeAH4y7SSZeZm0qNeCLk26sDvl0gXw65pfxxs3voFXdS9T\n+Ts07sDR80dZeWClrEAXQggXSQCvRHbscD2A7zi5g2CfYJRSRgA32wM/HkWwj2uV1a1Rl1dueMV0\n/mpu1ejSpAtbjm2hdzMTh7QLIYQoVO1KN0CYo3X59oBHn4gu7EkHNg7kwNkD5OTn4OHuYbdMZm4m\nB84eoHMTF1fLlUN3n+6kZqXSyKvRJa9LCCGqEumBVxJHjhiL15rYv9XTpqgTRT1pz2qetKzfkn2n\n9zksszt5N4GNAx0G+YpyQ8ANDGw18JLXI4QQVY0E8EriYhewWXX2dr4SvXjQv9TGBo1lxq0zLktd\nQghRlUgAryTKM/+dnpPO4dTDdGjcoTDNzDx41HHXF7AJIYS4vEwFcKXUEKXUHqXUPqXUSzae/10p\nFaWU2q6U2qWUylNK1S94dkgptaPg+eaK/gLXivLMf+9O3k2Hxh1KDIWbCeDRJ6MlgAshxFXOaQBX\nSrkB04HBQGdgjFKqQ/E8Wuv3tdYhWuvuwCtAhNY6teCxBQgreC5LjcupPEPotraCOQvg+ZZ8dp3c\nRTcfF39bEEIIcVmZ6YH3BuK11oe11rnAfGC4g/xjgHnF3iuT9Qg7zp2D5GRoa/5UU6Ds/DdA24Zt\nSbqQREZuhs0y+07vw6e2D3Vr1C1vc4UQQlwGZgJrM+BosfeJBWllKKVqAkOARcWSNfCHUmqLUmpS\neXBH7XcAACAASURBVBt6Ldu5E4KCwN3dtXK2Ang1t2oENgokLiXObpkQXxk+F0KIq11F94xvB9YV\nGz4H6FswtD4MeEIp1a+C66zyyjP/nW/JZ3fybptD4Y7ORC/PCWxCCCEuPzMHuSQBLYq99y9Is+Ue\nSg6fo7U+XvDfFKXUTxhD8utsFZ4yZUrh67CwMMLCwkw0r+qLjoZevVwr42go3NGZ6FEnong29Nny\nNFMIIYQDERERREREVNjnKa214wxKuQN7gZuB48BmYIzWOq5UvnrAAcBfa51ZkOYFuGmt05RStYAV\nwFSt9Qob9WhnbblW9egBM2ZAqAv3fczbNY9FcYtY+LeFZZ4t3buUz7Z+Rvi94SXStdZ4v+fNrsd2\n4VvH92KbLYQQwgGlFFprVd7yTnvgWut8pdSTGMHXDZiltY5TSj1iPNZfFGS9E/jdGrwLNAV+Ukrp\ngrrm2grewr7cXIiLM+bAXWFr/tvK3kr0pAtJVHOrJsFbCCEqAVNnoWutlwOBpdJmlnr/DfBNqbSD\nwOU50usyysmBkyehefNLX9fevUY9tWq5Vi76ZDRP9X7K5rOA+gGczTxLalYq9T3rF6aX5wITIYQQ\nV4Zs7yqHJUtg3LjLU1d59n9rrR2epuam3OjcpDMxyTEl0mUBmxBCVB4SwMshJsboGV8O5Qngx9OO\no9H41fGzm6ezd2diUmwEcNlCJoQQlYIE8HKIizOG0M+du/R1lecMdOv8t1L210bYmgd3NG8uhBDi\n6iIBvBzi4sDTE+LjzZc5cz6T1LQsl+q5mDvAg5s6DsSlA/jZzLOczjhN24YuHvcmhBDiipAA7qK8\nPCNw33wz7HN8rXYJg97+P0Z/+L5LdR07BkqBr4uLws2cplY6gEefiKZr0664KfkrIYQQlYH8a+2i\ngwfBxwdCQlwL4AczdhJ/LsZ5xmKs898ORsJtMnOft29tX/IseSSnJxeWkQVsQghReUgAd1FcHHTs\nCO3buxbAUz1iSbG4tvKtPPPfF7IvcOzCMdo3au8wn1KqRC9c5r+FEKJykQDuovIE8MMnU7HUOE2m\n1z4sFvOnza2Oi6Fz11yX2rfz5E46e3emmpvzLf6dvYu2kskKdCGEqFwkgLsoNhY6dYJ27YwAbub0\n19+3x+GVFoTKq8X2hGOm64rwHk1G0z9dal/0iWjTQ+HWHnhmbiYJZxLo7N3ZpbqEEEJcORLAXWTt\ngTdsCDVqGNvJnNkQH4uPeyfqZAeyere5YfTz6dnk1Ikno6Zrw+6uDIV3adKF3Sm7iUmJoX2j9tSo\nVsOluoQQQlw5EsBdoDXs2WMEcDA/jL77RCztG3TCp3p7th0yN+7+R9Q+cMsnIXWPS200s4DNynqt\n6Pbj22UBmxBCVDISwF2QlAReXtCggfHebAA/khlHzxadaNsgkD0p5nrUa+JiqZ7XgL2nzffA8yx5\nxKbEEtTU3M0njb0a41Xdi6X7lkoAF0KISkYCuAus899WZgP4GfdYbgzqRHDzQBKzzAXkqMQYOrrd\nwZ5T5nvgCWcS8KvjR22P2qbLdGnSheUJy2UFuhBCVDISwF1gnf+2at/e+WlsJ86kke+ZTL/OLekb\n2J6zbuaG0A9ciKV/s8Gcyz7H+ezzpsrsTt5N5yauLUTr7N2ZPEueBHAhhKhkJIC7oHQAt65Ed2TF\n9j3UTA/Eo7o7/YNak+eVyPn0bKd1pRBDWKcutGvYjr2nzPXaY5JjXF5J3qVJF1o3aE09z3oulRNC\nCHFlSQB3QWxsyQDeti0cOAD5+fbLrNsbSxM3Y9y9dk0Pqme0IGLXfof1XMjIIcfrELeEtKdD4w6m\n58FjUmLo0qSLqbxWg9sM5uW+L7tURgghxJUnAdwFcXEl58C9vMDbG44csV9m5/FY2tUvKtTAEsj6\nPY4D8sqofXhkBlC3Vg0CGwWangePSXG9B968XnMm9ZjkUhkhhBBXngRwk06dgtxc4xz04pwtZDuU\nHkv35kUBvHnN9uxMcjzuvjo2lsbaCMRme+A5+TkcOHuAwMaBTvMKIYSo/CSAm2Sd/y59sYizAH7a\n7f/bu/foOKvz3uPfZ3SXrLtkSbalGUn2yDZgY5eYSzDIkFCbcApZTcBOV9LSU8pKQ+Bk0TR2FzSw\nmrJoD6c97UlzIG1IXU4hhxAC5kADhCAgCcQOtjG2LGuk0R1fJOtm3WVpnz9mRh6N5vJKlqUZzfNZ\ny8t633n3zLslL/+097svtVT7NdvXLq+isTd8IB/qOEZFpqdMVUGVpWfgrrMuyrLLSE1MjXitUkqp\n2KcBblHgFDKfcAHe3T/M+bQOqjdWTp37VHkVp8bDB3Jjfy2bVnpa4M58J65uFxOTYR604x2Brkuh\nKqVU3NAAtyhwBLpPuAB/89AJUoYqSUu5sLHIjZc7GUgN34XeyTFuWOcd+Ja8jIL0Alr7wjxoZ24D\n2JRSSsUuDXCL5hLg7x6vpZDpzfYN5cUY2yiNn3QHLTMwPMZYehO3bL7wLNvKQLa5DGBTSikVuzTA\nLQoV4A4HnDwJIyMzX/vok1oqs6YHuM0mpA9X8dZHwbvR3zrkImnIMwLdx8pAtmNnjs16ERellFKx\nSwPcgnPn4OxZsNtnvpaY6AnxxiBTu5sGatm0auaD80KbkwPu4M32mtpjFJjpZSK1wEfPj9LS14Iz\n3xm2HkoppZYODXAL6uo8XeUJCcFfD9WN3sXxqWfZ/iqyqjh2KniL+lB7LZVZ01vSkVrgJ86eoDyn\nnOSE5NCVUEoptaRogFsQqvvcJ9ia6J5n2c3cfOWaGddfsaKKlsHggdzQf4wrVwS0wAvCt8Dnsga6\nUkqp2KYBbkGoKWQ+wdZEf+uQa2o1tUDXrHFyluBd6J3UcuP66WG8KmsV/aP9ITc1OXbmGJcX6gh0\npZSKJxrgFlhpgQcG+DvHa8mfDJ76N290MprewNj49Lndnla7m89smv4s2yY2nPnOkAu6HOvUAWxK\nKRVvLAW4iGwXkToRqReRbwV5/c9F5JCIHBSRj0XkvIjkWCkbC+YS4Ifaa6dWUwu0PDeDhNECPjg+\nfW73Lw43kDRcRs6ymauprS1YG7IbXaeQKaVU/IkY4CJiA74L/C5wGbBLRNb6X2OMecIYs8kYsxnY\nA9QYY3qtlI12o6OezUpWrw59zYoVMDAAfX0XzjX21bJxReh+96zxKt6tnd6irqk9RkGIVntVflXQ\ngWzD48O097ezOi/MDSqllFpyrLTAtwAuY0yLMWYc+BFwe5jrdwHPzbFs1Kmvh/JySA4zwFvE8xzc\nfyDbGWq5fm3oZvuKFCeHWqc32w+2HaMiM3hLOtRUsuNdx1mdt5qkhKTwFVFKKbWkWAnwlUCb33G7\n99wMIpIGbAd+Mtuy0SpS97mPfzf6yNh5RtMb+Oym0DuDOfOrqA9oUTf213LlyuABHmoq2bEzuoSq\nUkrFo/kexPZfgF8aY3rn+X0XzVwC/O2PGkkcWUFBdnrI6zeXVdExOj2Q/ddAn/H++U4auhtmbGqi\nz7+VUio+JUa+hA6gzO94lfdcMDu50H0+27I88sgjU19XV1dTXV1t4fYurePH4bbbIl/ndMJ//qfn\n65pjteRNhJl3Bly/zskjH17oQh8aGWc03T1tDXR/GckZFKYX0tLXQkVuxdT5Y53H+OMr/zjyDSql\nlFpUNTU11NTUzNv7WQnwA8BqEbEDJ/GE9K7Ai0QkG7gR+IPZlvXxD/BoUVsLf/EXka9zOuEf/9Hz\n9cG2WhwZ4QP8uvV2JlI6OdMzyPLcDH5+2EXScGnQEeg+vgVdpgW4roGulFIxIbBh+uijj17U+0Xs\nQjfGTAD3AW8Ax4AfGWOOi8i9IvKnfpfeAbxujBmOVPai7niOmpvh4MHZlZmY8AxMqwr9KHuKbzEX\nY6Cht5YNJeEDPDkpgZShSt4+4hn59s6xWgomwwfx2vy10+aCD44NcmrgFJW5lWFKKaWUWoqstMAx\nxvwMqAo491TA8V5gr5Wyi+Hf/92zpvmzz1ov09QERUWQkRH52rw8SEmB06fh9GQt1zv/W+QyxskH\nrnruuvFKDrYfozzEvHGfqoIqPj798dRxbWctVQVVJNhCLNKulFJqyYqbldgaGsDtnl2ZSEuoBlqz\nBo4dn2A4vZ7Pbo483d2+rIojn3ha1I19tVy5IkILPGAkug5gU0qp+BVXAR5sy89wrI5A93E64Sc/\nbyFhrIAV+ZkRr7+sqAp3nyeQz4QZge4TuJjLsTMa4EopFa/iJsBdLujuhv7g+4EE9XLbv5K5+uPI\nF3o5nfD/flNL7nlrzfZPVTjpnKz3jkBvDDtvHGBl1krOjZ6jb8Sz5Juuga6UUvErLgK8txeGhz3d\n4U1N1st9nPg0Z7Pfsny90wltI7WUpVkL8Js2VDGYdsI7An0VeVlpYa+f2tTE2wrXLnSllIpfcRHg\njY2etcwrK2f3HHww1cVQsvV+d6cTKKzl8iJrAb5mVT4ymcT/+eXb5EcYge7jm0rWP9pP11AX5bnl\nlu9PKaXU0hEXAe5yeQaYVVRYD/Cmkz2YtC4+GWmw/DmrVwOFtVy3xvqD84wRJz9vfzHkGuiBfFPJ\najtrWVewDpvExY9QKaVUgLj437+hwROuFRXWB7LVfOzCNp5JY4/1FnhamiGppI5bt1gP8OLEKnqy\n3wm7c5m/qoIq6s7W6QIuSikV5+IuwK22wA+4XRSPbKO1r3XG+uOhnBo4Rc6yVEoLci3fW2VOFdgm\nuHGdxRZ4gacFrs+/lVIqvsVFgM+lC732lIvy9CsozCikrb8tcgGg/mw9znznrO5t46oqmLRFHIHu\nsyZvDQ3dDRw5fUQDXCml4lhcBLivBe5wQGurZ4nUSJrPuVhftIbK3Eoau611o88lwHds2khG/+9E\nHIHuk5GcQdGyIt5rfU+3EVVKqTi25AO8vx8GBqCkBFJTobAQ2tsjl+uadHFVhdMT4Bafg7u6XazJ\nWzOr+6veWMHAP+yfVZmq/CqSE5Ipyy6LfLFSSqklackHuK/1LeI5ttKNPjlpGEp1cePla6jMu7Qt\n8LlYW7CW9YXrEV+llFJKxZ24CXAfKwFe334WgDUr82fVAl+oAN9UvImrSq665J+jlFIqelnajSyW\n+Qaw+VgJ8HeO1pM+sgabTTwtcAsBPjE5gbvHzeq81RGvvVh3b7qbP7ryjy755yillIpecdcCt7Ia\n22+bXBTYPKnvG8RmjAlbpqWvhaJlRaQlWRuMdrG0+1wppeJb3AW4lcVcjp924cj0BHhuWi6JtkS6\nhrrCllmo7nOllFIK4iDA59KF3jrgYn3xhUJWutHrz9bjzNMAV0optTCWdICfO+eZRlZScuFcYSGM\njEBfX+hyXcbFlkq/AM+tpKE7/Jro2gJXSim1kJZ0gDc2ep552/xqKeJphYfaVnRy0jCc7qL6igsB\nvjpvdcSpZBrgSimlFtKSDvDA7nOfysrQz8E/bjqNTKTiKL6wnrmVqWQa4EoppRbSkg7wwAFsPuGe\ng797rJ6MkempH+kZ+PD4MKcGTmHPsV/M7SqllFKWaYAH+LDZxfLEgACPsB56Y08j5bnlJNqW/LR6\npZRSUWJJB3ioLvRwAV53xkV51vRCJZkl9I/2MzA2EPxzzs5+DXSllFLqYizpAJ9LC7xtyMVlJdPD\n2CY2ynPLcfcEL6TPv5VSSi20JRvgAwPQ2wsrV858zeGAtjY4f37ma924uHr1zNZ0uG50DXCllFIL\nbckGeGOjp6VtC1LDlBRYvnzmtqLnJyYZSWukekOIAA8xkK2+WwNcKaXUwlqyAR6q+9wnWDf6Qdcn\n2MazWJGfOeP6cNuKagtcKaXUQluyAe5yzT7A36utZ9lY8MFooVrgvSO9DI4NUrKsJEgppZRS6tJY\nsgHe0BB8BLpP0BZ4i4uipBABHmIuuOusC2e+U3cHU0optaAsBbiIbBeROhGpF5FvhbimWkQOichR\nEXnb73yziHzkfW3/fN14JJG60IOtxnaiy0VFdvAAd+Q4aO9vZ3xifNp57T5XSim1GCKuPCIiNuC7\nwM3AJ8ABEXnZGFPnd0028M/ALcaYDhEp8HuLSaDaGNMzv7ce3ly60DuGXGyrvDbo9ckJyZQsK6Gl\nr4XVeRfeWANcKaXUYrDSAt8CuIwxLcaYceBHwO0B13wJ+IkxpgPAGOO/ebZY/Jx5MzgI3d1QWhr6\nmmAB3i0urnGG7ncPNpBNR6ArpZRaDFaCdSXQ5nfc7j3nzwnkicjbInJARL7s95oB3vSev+fibtca\ntxvKy4NPIfMpKICxMc9ccYCx8QnG0pu48YrQzfZgA9m0Ba6UUmoxzNfi3YnAZuAmIAN4X0TeN8Y0\nAJ82xpwUkUI8QX7cGPPLYG/yyCOPTH1dXV1NdXX1nG4m1BKq/nzbirrdsHkz/KaujYTRAgqy00OW\nCdxW1BhD/dl6XUZVKaVURDU1NdTU1Mzb+1kJ8A6gzO94lfecv3agyxgzAoyIyLvARqDBGHMSwBjT\nKSI/xdMlHzHAL0akAWw+lZUXAvyXx+vJHA8fxJW5lfy67ddTx6cHT5OamEpuWm6YUkoppdTMhumj\njz56Ue9npQv9ALBaROwikgzsBPYFXPMycL2IJIhIOnA1cFxE0kVkGYCIZAC3AEcv6o4tsBrg/s/B\nD7W6KE6OEOABU8m0+1wppdRiidgCN8ZMiMh9wBt4Av8HxpjjInKv52XzfWNMnYi8DhwBJoDvG2Nq\nRaQc+KmIGO9n/Ycx5o1LVx0PlwvuvDPydRUVcOSIt8xZF5W5kVvg7h43xhhERANcKaXUorH0DNwY\n8zOgKuDcUwHHTwBPBJxrAq68yHuctdm0wF96yfN1x4iLHeu2hb0+MyWTjKQMTg2coiSzxBPgeRrg\nSimlFt6SW4ltaAg6O8NPIfOpqLiwmEuvzcW1YaaQ+fh3o2sLXCml1GJZcgHum0KWkBD5WofDsyNZ\n/8A44+kt3HBFRcQy/tuKaoArpZRaLFEd4I//+E1+8PpvZlXGavc5QHIyFBfDT95qJnF4BdkZqRHL\n+OaCT0xO4O5xT1uVTSmllFooUR3g//KbZ/j+ey/OqkykJVQDVVTAvl+7yDpvbS63rwu9pa+FomVF\npCWlzer+lFJKqfkQ1QHedb6Jk0MtsyoTaReyQBUV8MEJFytSLAZ4biUN3Q3afa6UUmpRRXWADyY1\n02NmF+C/7HuWEkef5esrKuDUuIvVFldT862HriPQlVJKLaaoDfD+wVEmMjoYSmqeVbkTq/6SnmXv\nW76+shLIc7FxlbUAL8ooYuT8CAc+OaAtcKWUUosmagP8QH0bCYOlTKZ00zswYqnM0Mg4ExltDKc0\nW/6cigog38W1VdYCXESoyK3g9YbXWZOva6ArpZRaHFEb4B82NpM5Xkni8CoOnGiLXADPhiTYJmnt\nb7L8OaWOMcjsYOvl5ZbLVOZV0jnUqS1wpZRSiyZqA/xoexOFSQ6WnbfzobvZUpkDDZ6FzZv7rF0P\ncC6hmbLcVaSnJlkuU5lbSaItEUeOw3IZpZRSaj7N13ai866xu5nSzHI4B8c/sTaQ7Wh7E9nja2nu\nbbb8Oe4eN87CyAu4+Fudt3oqxJVSSqnFELUJ1DHYxPbKHUyaSdzd1gK84WwTVUk30dz7guXPcfe4\nqciZXYBfV3odHf2BO6oqpZRSCydqu9DPTjRzRamDynw7HQPNlsq0D7rZXHQt/aP9DI4NWirj7nFT\nkTu7AN9QtIG/vumvZ1VGKaWUmk9RG+BDyc1scZZz+SoHZyestcC7TRNXllVgz7bT0metTGNPI5V5\nlRdzq0oppdSCi8oA7+4fZjKlm40VJWyusDOYZC2Mh5KbuKaqAkeOw/Jz8Lm0wJVSSqnFFpUB/psT\nrSQNlZKclMBVzlVMpJ1kaGQ8bJlT3QOYpAGuKC/CkeOgqSfyVDJjjAa4UkqpmBSVAf5hYxOZEw4A\n0lOTSBgu5mBD+EFjv6ptInnIgc0mlOeUW2qBdw11kWRLIic1Zx7uWimllFo4URngtZ80U5jsmDrO\nGLfzYWP4bvSDTU1kG09L2pHjsDQX3N3j1uffSimlYlJUBri7u5myzAsro+Ul2Dna3hy2TO0nbopT\nPGWsdqE39jRq97lSSqmYFJUB/slQE2sKHVPHKzMcNHaFb4E39TZRnnMhwK10oc9lDrhSSikVDaIy\nwLtNMxvtF1rgFXl22iPMBT810sS6Yk8YL89YztD4EOdGz4UtowPYlFJKxaqoDPCh5Ca2OB1Tx+tW\n2OkaD98C7xU3mys8oS8illrhGuBKKaViVdQF+JmeQUzSOa4oL5o6t7nCwbmE0AE+OWkYTWvmunUX\nWu1WA1wHsSmlFDgcDkRE/1yCPw6H45L8zKJuLfQP6lpIHraTYLvwu8XVa8s4n97G+YlJEhNm/s5x\nvLUTmUhlVWHW1LlIU8lGz49yevA0q7JWzev9K6VULGppacEYs9i3sSSJyCV536hrgR90N5E16Zh2\nLmdZKraxXA43ngxa5v06N2mj0/fzjtQCb+5tpjSrVHcUU0opFZOiLsBrTzazPLl8xvm0MQcfNgTv\nRv+opYk8mRngTb2hp5Lp82+llFKxLOoC3N3TRFmWY8b5PLFzpLU5aJkTZ5pYkT49wMtzw3ehu3vc\nVObq82+llFKxKeoC/NRwM2uLZrbAS9IdNISYC97S72Z1/vTWdKQudF3ERSmllpa2tjaysrIsPctv\naWnBZrMxOTkJwK233sozzzwDwN69e9m6deslvdf5YCnARWS7iNSJSL2IfCvENdUickhEjorI27Mp\n66/HNLOhzDHjfHmunbb+4AHeOd7E5Sunh35+Wj5jE2P0jfQFLaNd6EoptbSUlpbS399vedCY/3Wv\nvfYaX/7yl4O+Fq0iBriI2IDvAr8LXAbsEpG1AddkA/8M3GaMuRz4otWygYZTmri6yjHj/NpiO2fG\nm4OWOZfYxFWV0wM80lxwDXCllFKxzEoLfAvgMsa0GGPGgR8Btwdc8yXgJ8aYDgBjTNcsyk5p7+zH\nJIywtrRwxmubyh2ck5kt8JGx85xP6+Da9fYZr4V6Dq7biCqlVOwoLy/niSeeYOPGjWRmZnLPPfdw\n5swZbr31VrKysrjlllvo6+ub0S2+bds2/uqv/orrr7+erKwstm/fTnd3d9DP2LZtG08//XTQ1775\nzW9yww03cO6cZ3XPp59+mvXr15Ofn8+OHTtobW29NBWPwEqArwTa/I7bvef8OYE8EXlbRA6IyJdn\nUXbKB3XNpAx7tgQNdPVaO2PpLUxOTn+2sb+ujYSRIpalJc8o48gOPhK9c6iT1MRUslOzQ92KUkqp\nKPLiiy/y1ltvUV9fz759+7j11lt5/PHH6erqYmJign/6p38CZnZ9P/fcc+zdu5fOzk5GR0d54okn\nLH+mMYZ77rmHo0eP8uabb5KZmcnLL7/M448/zksvvURnZydbt25l165d81pXq+ZrEFsisBnYAWwH\nHhaR1bN9k0PNzWSbmQPYAIrzliHn0zje2jnt/IGGJpaNBy8Tqgu9sVsHsCmlVCz5+te/TkFBASUl\nJWzdupWrr76aDRs2kJyczOc//3kOHToUtNzdd99NZWUlKSkp3HnnnRw+fNjS542NjbFr1y56e3t5\n5ZVXSElJAeCpp55iz549OJ1ObDYbu3fv5vDhw7S1tUV4x/lnZRWTDqDM73iV95y/dqDLGDMCjIjI\nu8BGi2WnvPjv/xuGz/LII49QXV1NdXX1tNdTRx381tXCZY7lU+eOtLkpTAwexuW55bzX+t6M89p9\nrpRSszNfY7rmuthbUdGF5bXT0tJmHA8MDHjff/oHFBcXT32dnp4+dV0kDQ0NHDlyhP3795OYeCEq\nW1paeOCBB3jwwQenPk9E6OjooLS0NOx71tTUUFNTY+nzrbAS4AeA1SJiB04CO4HA/oKXgf8lIglA\nCnA18PfACQtlp6RdtZYtWSt5ZM+fB309BztHWluAT02dazjbRGnm7FrgGuBKKTU7sbLK6nyNHl+/\nfj1f+9rX2L59O7/4xS9wOp0AlJWV8dBDD82p2zywYfroo49e1D1G7EI3xkwA9wFvAMeAHxljjovI\nvSLyp95r6oDXgSPAB8D3jTG1ocqG+qyTI8HngPsUp9o5caZ52rmOwSachaEDvKm3acZvZO5eXcRF\nKaWWotms5x7p2rvuuovHHnuMz3zmM7jdbgDuvfdeHnvsMWprawHo6+vjhRdemPsNXwRLC4EbY34G\nVAWceyrg+AlgxuiAYGVD6aWJK8Ps2uLIddDQ3TDtXNekm4324K3p3NRcjDH0jvSSm5Y7dd7d4+Yr\nG75i5ZaUUkotssBWdbhWtv9rkVrjVq79yle+wtjYGDfffDPvvPMOd9xxB4ODg+zcuZPW1lays7P5\n7Gc/yxe+8AUrVZlXEi27z4iIYXc2DV93U7kiL+g1e/a+xA8PP82pf9g3dc72rSJ++yeH2LxmRdAy\nG5/cyL/d/m9sKtk0dW7V36/iV3/8K+w5M6eeKaVUPBIR3Y3sEgn1vfWen3Off3QtpSqTlBfnhnx5\ng91OnzRPHXv2Du9nQ0VxyDKBm5qMnB+hc6hTtxFVSikV06IqwFNHgs8B97mmysFI6oW54L+qbSJ5\nyBF0j3AfR/b0gWzNvc2UZZeRYEuYt/tWSimlFlpUBXhOiDngPvaiHMDQcroX8OwdHmreuE/gamy6\nC5lSSqmlIKoCvDjVEfZ1m01IGbGzv96zpGrtySaKguwd7i+wC12nkCmllFoKoirAHTnhwxgg2zj4\nqMUT4O4eN46c8GEcOBdcV2FTSim1FERVgK8tdkS8pijFTt2pZgBOjTSxrjhyC7y5t3lqBKC7V1vg\nSimlYl9UBXi4OeA+pVl2Wno9LfBeaWJTefgAz0nNIdGWSPewZwcafQaulFJqKYiqAL9mrSPiNVVF\nDk6NeEaij6Q18en1kVvT/iuyuXvclOdG7qpXSimlollUBbhnlHl4V5Ta6THNnGjvQiaTKFseI8J4\nRAAAClxJREFUeUtQXzf6mcEzpCelk5WSNR+3q5RSKo4F7j++0KIqwK341Bo7IyktfFDXROqItZZ0\neY5nKlljjw5gU0opBXv37mXr1q0X/T7ztXnKXFhaCz2arLcvxyQO8U7dx+SJtTB25Dg40XWC4mXF\nGuBKKaWmtgENZ3JyEpstetu50XtnIdhsQvJwGe+21LAizVoL3JHjoLmvWQewKaVUjGpvb+f3f//3\nWb58OYWFhdx///0APP3006xfv578/Hx27NhBa2vrVBmbzcZTTz2F0+kkLy+P++67D4C6ujq++tWv\n8v7775OZmUlenmf/jbvvvps/+7M/43Of+xyZmZnU1NTw2muvsXnzZrKzs7Hb7Re9Beh8irkAB8ic\nsNNie5vKvNl1oesiLkopFXsmJye57bbbKC8vp6WlhY6ODnbu3Mm+fft4/PHHeemll+js7GTr1q0z\n9ul+9dVX+fDDD/noo494/vnneeONN1i7di1PPvkk1157LefOnaO7u3vq+ueee46HH36Yc+fOcf31\n17Ns2TKeeeYZ+vr6ePXVV3nyySfZt29f4C0uipgM8MJkB5PLOrh8lbUwtufY9Rm4UkrFqP3793Py\n5En+7u/+jrS0NJKTk7nuuut48skn2bNnD06nE5vNxu7duzl8+DBtbW1TZffs2UNmZialpaVs27aN\nw4cPh/2s22+/nWuuuQaA5ORkbrjhBi677DIALr/8cnbu3Mk777xz6So7CzH3DBygNNNO3SRcVWmt\nBZ6VkkVqYioHTx7UAFdKqTmQR+dnsJb59uy3LG1ra8Nut894Ht3S0sIDDzzAgw8+6Hlv73Ptjo4O\nSktLASgqKpq6Pj09nYGBgbCf5Svns3//fnbv3s3Ro0cZGxtjbGyML37xi7Ouw6UQkwG+utDOm6eE\na9dZ38/bkePg6JmjrMxceQnvTCmllqa5BO98KS0tpbW1dcagsrKyMh566KEZ3eZWhBrAFnj+S1/6\nEvfffz+vv/46SUlJfOMb3+Ds2bOz/rxLISa70K8odWAbXEFWRorlMuU55ThyHLqNqFJKxZgtW7ZQ\nUlLC7t27GRoaYnR0lF//+tfce++9PPbYY9TW1gLQ19fHCy+8YOk9i4qKaG9vZ3x8POx1AwMD5Obm\nkpSUxP79+3n22Wenve5bpnsxxGSA/+HNW/jv1z0zqzKOHId2nyulVAyy2Wy88soruFwuysrKKC0t\n5fnnn+eOO+5g9+7d7Ny5k5ycHDZs2MDPfvazqXKBrWn/45tuuonLLruM4uJili9fHvKzv/e97/Hw\nww+TnZ3Nd77zHe66666Q77nQZDF/e/AnIuZS3stzHz9H/dl6vl397Uv2GUopFatEZFFbk0tZqO+t\n9/ycfwOImwBXSikVmgb4pXOpAjwmu9CVUkqpeKcBrpRSSsUgDXCllFIqBmmAK6WUUjFIA1wppZSK\nQRrgSimlVAyKyaVUlVJKzS+73b6oi5IsZXa79WW/Z8PSPHAR2Q78Tzwt9h8YY/424PUbgZcBt/fU\ni8aY73hfawb6gElg3BizJcRn6DxwpZRSceOSzwMXERvwXeB3gcuAXSKyNsil7xpjNnv/fMfv/CRQ\nbYzZFCq8411NTc1i38Ki0vrXLPYtLKp4rn881x20/hfLyjPwLYDLGNNijBkHfgTcHuS6UL9FiMXP\niVvx/o9Y61+z2LewqOK5/vFcd9D6XywrwboSaPM7bveeC3StiBwWkVdFZL3feQO8KSIHROSei7hX\npZRSSnnN1yC2D4EyY8yQiOwAXgKc3tc+bYw5KSKFeIL8uDHml/P0uUoppVRcijiITUSuAR4xxmz3\nHu8GTOBAtoAyTcDvGGO6A85/GzhnjPn7IGV0BJtSSqm4cjGD2Ky0wA8Aq0XEDpwEdgK7/C8QkSJj\nzGnv11vw/GLQLSLpgM0YMyAiGcAtwKPzXQmllFIq3kQMcGPMhIjcB7zBhWlkx0XkXs/L5vvAF0Tk\nq8A4MAz4djwvAn7qbV0nAv9hjHnjUlREKaWUiidRsx+4Ukoppaxb8OldIpIiIr8RkUMi8rH3uTgi\nkisib4jICRF5XUSyF/reFoqI2ETkoIjs8x7HTd3Bs7iPiHzk/Tew33suLr4HIpItIj8WkeMickxE\nro6juju9P/OD3r/7ROT+eKk/gIh8Q0SOisgREfkPEUmOl/qLyAPe//M/FpH7veeWdN1F5AciclpE\njvidC1lnEdkjIi7v/w+3RHr/BQ9wY8wosM0Yswm4EtjhfW6+G/i5MaYK+AWwZ6HvbQE9ANT6HcdT\n3SH44j7x8j34R+A1Y8w6YCNQR5zU3RhT7/2ZbwZ+BxgEfkqc1F9EVgBfBzYbYzbgeay4iziov4hc\nBvxX4Co8/+/fJiKVLP26/xDPImj+gtbZO/36TmAdsAP4nkRa29YYs2h/gHTgt8Cn8PxHVuQ9XwzU\nLea9XcI6rwLeBKqBfd5zcVF3v+9BE5AfcG7Jfw+ALKAxyPklX/cgdb4FeC+e6g+sAFqAXDzhvQ/4\nTDzUH/gC8C9+xw8B3wSOx0Hd7cARv+OgP29vsH/L77r/BK4O996LskKatwv5EHAKeNMYc8BbodMA\nxphTwPLFuLcF8A94/uH6Dz6Il7r7+C/u8yfec/HwPSgHukTkh95u5O97Z2rEQ90D3QU86/06Lupv\njPkE+B9AK9AB9Bljfk581P8osNXbfZwO3AqUEh91D7Q8RJ0DF03rIPiiaVMWJcCNMZPG04W+Ctji\n7V4JHE235EbXicjngNPGmMOEXnoWlmDdA3zaeLpRbwW+JiJbiYOfP55W12bgn731H8TzW3c81H2K\niCQBvwf82HsqLuovIjl4lqG242mNZ4jIHxAH9TfG1AF/i6f38TXgEDAR7NKFvK8oMec6L+oa5caY\nfqAG2A6cFpEiABEpBs4s4q1dKp8Gfk9E3MBzwE0i8gxwKg7qPsUYc9L7dyeeVfu2EB8//3agzRjz\nW+/xT/AEejzU3d8O4ENjTJf3OF7q/xnAbYzpNsZM4Hn+fx1xUn9jzA+NMVcZY6qBXuAEcVL3AKHq\n3IGnV8JnlfdcSIsxCr3AN+pORNKAz+J5DrIP+CPvZX+IZ3vSJcUY85fGmDJjTAWeBXF+YYz5MvAK\nS7zuPiKSLiLLvF/7Fvf5mPj4+Z8G2kTEt8zwzcAx4qDuAXbh+QXWJ17q3wpcIyKp3sFJN+MZzBoX\n9RfPctqISBnweTyPUOKh7sL0HtdQdd4H7PTOTCgHVgP7w76x92H5ghGRK4C9eH55sAH/1xjzNyKS\nBzyP5zeQFuBOY0zvgt7cAhLPHuoPGmN+L57q7v2H+VM83Ua+xX0ej5fvgYhsBP4VSALcwN1AAnFQ\nd/D8AoenjhXGmHPec3Hxs4ep5aR34ln06hDwJ0AmcVB/EXkXyMNT928YY2qW+s9eRJ7FM2A5HzgN\nfBtPr+OPCVJnEdmDZ7T+OPCAibDwmS7kopRSSsUg3adbKaWUikEa4EoppVQM0gBXSimlYpAGuFJK\nKRWDNMCVUkqpGKQBrpRSSsUgDXCllFIqBmmAK6WUUjHo/wPEe4vrSp5PsAAAAABJRU5ErkJggg==\n", + "text/plain": [ + "" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "plt.figure(figsize=(8, 6))\n", + "plt.plot(nobs_arr, pbminlike_nobs_arr, label='minlike')\n", + "plt.plot(nobs_arr, pbcentral_nobs_arr, label='central')\n", + "plt.legend(loc='lower right')" + ] + }, + { + "cell_type": "code", + "execution_count": 57, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "[ True True True True False False False True True True]\n" + ] + }, + { + "data": { + "text/plain": [ + "array([3, 6], dtype=int64)" + ] + }, + "execution_count": 57, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "xx = (np.arange(10)<4) | (np.arange(10) > 6)\n", + "print(xx)\n", + "np.nonzero(np.diff(xx))[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 58, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.3, 30)" + ] + }, + "execution_count": 58, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p_null, nobs\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Power and tests based on normal distribution\n", + "\n", + "\n", + "The following is still messy. The formulas look simple but are a bit confusing. There are also several different version for normal distribution based hypothesis tests and power calculations. The examples try to match up some examples from various references but that is not completely successful yet, either because of bugs in my code or because different versions are used." + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Lachine summarizes sample size calculations for proportions based on the normal distribution if we only consider the power in one tail. In this case we have an explicit formula for the required sample size. This is a good approximation to two sided tests if the probability to be in the small tail is negligible and useful for quick calculations. However, solving the sample size that correctly takes both tails into account can be done numerically without much computational effort." + ] + }, + { + "cell_type": "code", + "execution_count": 59, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# from Lachine 1981 equ (3) and (4)\n", + "\n", + "from scipy import stats\n", + "def sample_size_normal_greater(diff, std_null, std_alt, alpha=0.05, power=0.9):\n", + " crit_alpha, crit_pow = stats.norm.isf(alpha), stats.norm.isf(1 - power)\n", + " return ((crit_alpha * std_null + crit_pow * std_alt) / np.abs(diff))**2\n", + "\n", + "def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05):\n", + " crit_alpha = stats.norm.isf(alpha)\n", + " crit_pow = (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt\n", + " return stats.norm.cdf(crit_pow)" + ] + }, + { + "cell_type": "code", + "execution_count": 60, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.75280935132909166" + ] + }, + "execution_count": 60, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pa = 0.5\n", + "power_normal_greater(pa - p_null, np.sqrt(p_null * (1 - p_null)), np.sqrt(pa * (1 - pa)), 30, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 61, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "29.999189245636149" + ] + }, + "execution_count": 61, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "std_null, std_alt = np.sqrt(p_null * (1 - p_null)), np.sqrt(pa * (1 - pa))\n", + "sample_size_normal_greater(pa - p_null, std_null, std_alt, alpha=0.05, power=0.7528)" + ] + }, + { + "cell_type": "code", + "execution_count": 62, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.27039432496290688" + ] + }, + "execution_count": 62, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0 = 0.6\n", + "pa = 0.5\n", + "power_normal_greater(pa - p0, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 25, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 63, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.40695068348471369" + ] + }, + "execution_count": 63, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0 = 0.5\n", + "pa = 0.4\n", + "power_normal_greater(pa - p0, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 64, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.90673162370556504" + ] + }, + "execution_count": 64, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0 = 0.3\n", + "pa = 0.5\n", + "diff = pa - p0\n", + "power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.05)" + ] + }, + { + "cell_type": "code", + "execution_count": 65, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.80742957881382105" + ] + }, + "execution_count": 65, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0 = 0.5\n", + "pa = 0.5\n", + "diff = pa - p0\n", + "diff = 0.2\n", + "power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.025)\n", + "# 0.80743 PASS manual example Chow, Shao, and Wang (2008) 2-sided S(Phat)" + ] + }, + { + "cell_type": "code", + "execution_count": 66, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.80125278346378259" + ] + }, + "execution_count": 66, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0 = 0.5\n", + "pa = 0.6\n", + "diff = pa - p0\n", + "power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 153, alpha=0.05)\n", + "# 0.80125 PASS doc example from Ryan (2013) for one-sided alternative" + ] + }, + { + "cell_type": "code", + "execution_count": 67, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "# copied and adjusted from statsmodels.stats.power\n", + "def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, std_alt=1):\n", + " '''Calculate power of a normal distributed test statistic\n", + "\n", + " '''\n", + " d = effect_size\n", + "\n", + " if alternative in ['two-sided', '2s']:\n", + " alpha_ = alpha / 2. #no inplace changes, doesn't work\n", + " elif alternative in ['smaller', 'larger']:\n", + " alpha_ = alpha\n", + " else:\n", + " raise ValueError(\"alternative has to be 'two-sided', 'larger' \" +\n", + " \"or 'smaller'\")\n", + "\n", + " pow_ = 0\n", + " if alternative in ['two-sided', '2s', 'larger']:\n", + " crit = stats.norm.isf(alpha_)\n", + " pow_ = stats.norm.sf((crit* std_null - d*np.sqrt(nobs))/std_alt)\n", + " crit_pow = (np.sqrt(nobs) * np.abs(diff) - crit * std_null) / std_alt\n", + " if alternative in ['two-sided', '2s', 'smaller']:\n", + " crit = stats.norm.ppf(alpha_)\n", + " pow_ += stats.norm.cdf((crit* std_null - d*np.sqrt(nobs))/std_alt)\n", + " return pow_ #, (crit* std_null - d*np.sqrt(nobs))/std_alt, (crit* std_null - d*np.sqrt(nobs))/std_alt, crit_pow\n" + ] + }, + { + "cell_type": "code", + "execution_count": 68, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.807430419433 0.192569580567\n" + ] + } + ], + "source": [ + "p0 = 0.5\n", + "pa = 0.5\n", + "alpha = 0.05\n", + "nobs_ = 50\n", + "effect_size = diff = 0.2\n", + "std_null, std_alt = np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa))\n", + "po = normal_power(effect_size, nobs_, alpha, alternative='two-sided', std_null=std_null, std_alt=std_null)\n", + "print(po, 1-po)\n", + "# close to above 0.80742957881382105, closer to pass 0.80743" + ] + }, + { + "cell_type": "code", + "execution_count": 69, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.801252783464\n" + ] + }, + { + "data": { + "text/plain": [ + "0.80125278346378259" + ] + }, + "execution_count": 69, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0 = 0.5\n", + "pa = 0.6\n", + "diff = pa - p0\n", + "effect_size = diff\n", + "nobs_ = 153\n", + "alpha = 0.05\n", + "std_null, std_alt = np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa))\n", + "po = normal_power(effect_size, nobs_, alpha, alternative='larger', std_null=std_null, std_alt=std_alt)\n", + "# 0.80125 PASS doc example from Ryan (2013) for one-sided alternative\n", + "print(power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 153, alpha=0.05))\n", + "po" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "check size (power at null)" ] }, { "cell_type": "code", - "execution_count": 79, + "execution_count": 70, "metadata": { "collapsed": false }, @@ -1570,21 +2435,81 @@ { "data": { "text/plain": [ - "(8.293564511085938e-17, 0.99999999999999989)" + "0.049999999999999975" ] }, - "execution_count": 79, + "execution_count": 70, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null)" + "p0 = 0.6\n", + "pa = 0.6\n", + "diff = pa - p0\n", + "effect_size = diff\n", + "nobs_ = 153\n", + "alpha = 0.05\n", + "std_null, std_alt = np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa))\n", + "po = normal_power(effect_size, nobs_, alpha, alternative='larger', std_null=std_null, std_alt=std_alt)\n", + "po" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## trying more\n", + "\n", + "The rest below is just some unsorted experiments to try a few more things." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "TODO: The following is not correct because when we change the sample size, then the rejection region also changes." ] }, { "cell_type": "code", - "execution_count": 80, + "execution_count": 71, "metadata": { "collapsed": false }, @@ -1592,16 +2517,83 @@ { "data": { "text/plain": [ - "(5.864435705996961e-17, 1.0)" + "[0.047092254594638852,\n", + " 0.047775312714239675,\n", + " 0.051602724514375933,\n", + " 0.05859877650596669,\n", + " 0.068789625299809865,\n", + " 0.082186755965548378,\n", + " 0.098772671226915978,\n", + " 0.118489659792336,\n", + " 0.1412321565494524,\n", + " 0.16684288140347131,\n", + " 0.19511265080608356,\n", + " 0.22578351411938377,\n", + " 0.25855468655666075,\n", + " 0.29309063531712998,\n", + " 0.32903062340607336,\n", + " 0.36599901949754499,\n", + " 0.4036157318615764,\n", + " 0.44150620799796952,\n", + " 0.47931054705529291,\n", + " 0.51669138801043579]" ] }, - "execution_count": 80, + "execution_count": 71, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null)" + "[power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can also calculate this in vectorized form for the set of sample sizes and all three tests:" + ] + }, + { + "cell_type": "code", + "execution_count": 72, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.04709225, 0.02625388, 0.07020749],\n", + " [ 0.04777531, 0.03102743, 0.07728123],\n", + " [ 0.05160272, 0.03820442, 0.0883212 ],\n", + " [ 0.05859878, 0.04792633, 0.10324071],\n", + " [ 0.06878963, 0.06032282, 0.12191353],\n", + " [ 0.08218676, 0.07549525, 0.14416465],\n", + " [ 0.09877267, 0.09350311, 0.16976553],\n", + " [ 0.11848966, 0.11435385, 0.19843379],\n", + " [ 0.14123216, 0.1379965 , 0.2298369 ],\n", + " [ 0.16684288, 0.16431907, 0.26359926],\n", + " [ 0.19511265, 0.19314968, 0.29931183],\n", + " [ 0.22578351, 0.22426089, 0.33654338],\n", + " [ 0.25855469, 0.25737665, 0.37485255],\n", + " [ 0.29309064, 0.29218144, 0.41379979],\n", + " [ 0.32903062, 0.32833054, 0.45295869],\n", + " [ 0.36599902, 0.36546115, 0.49192593],\n", + " [ 0.40361573, 0.40320337, 0.53032969],\n", + " [ 0.44150621, 0.4411907 , 0.56783618],\n", + " [ 0.47931055, 0.47906961, 0.60415428],\n", + " [ 0.51669139, 0.51650774, 0.63903825]])" + ] + }, + "execution_count": 72, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None])" ] }, { @@ -1615,29 +2607,45 @@ }, { "cell_type": "code", - "execution_count": 4, + "execution_count": null, "metadata": { "collapsed": true }, "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, "source": [ - "?smprop.proportion_confint()" + "## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 73, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.39840953644479782, 0.69032832946419354)" + ] + }, + "execution_count": 73, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "smprop.proportion_confint()" + "smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null)" ] }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 74, "metadata": { "collapsed": false }, @@ -1645,14 +2653,122 @@ { "data": { "text/plain": [ - "0.34470140912721514" + "(-0.56343616981901101, 0.57313791338407638)" ] }, - "execution_count": 11, + "execution_count": 74, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 75, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.79681907288959564, 0.42555611641912894)" + ] + }, + "execution_count": 75, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 76, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(8.293564511085938e-17, 0.99999999999999989)" + ] + }, + "execution_count": 76, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 77, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(5.864435705996961e-17, 1.0)" + ] + }, + "execution_count": 77, "metadata": {}, "output_type": "execute_result" } ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "?smprop.proportion_confint()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], + "source": [ + "smprop.proportion_confint()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": false + }, + "outputs": [], "source": [ "from statsmodels.stats.proportion import proportion_effectsize\n", "es = proportion_effectsize(0.4, 0.5)\n", @@ -1662,22 +2778,11 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "259.154426739506" - ] - }, - "execution_count": 14, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9)" ] @@ -1711,26 +2816,11 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.94849873047984967,\n", - " ((0.75834970530451862,\n", - " 0.86051080550098236,\n", - " -2.5599758686988578,\n", - " 1.6821766224528543),))" - ] - }, - "execution_count": 25, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", @@ -1741,26 +2831,11 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.95000875677852759,\n", - " ((0.75207687201030093,\n", - " 0.86590768457946699,\n", - " -1.7341121433755891,\n", - " 2.3848884863189261),))" - ] - }, - "execution_count": 39, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm',\n", @@ -1770,26 +2845,11 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.95811685170327532,\n", - " ((0.75220160721176865,\n", - " 0.86582602622195959,\n", - " -1.812033626524528,\n", - " 2.4628099694678625),))" - ] - }, - "execution_count": 41, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm',\n", @@ -1799,23 +2859,11 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.95447058338704227,\n", - " ((158.0, 181.0, 5695.1110612524499, 6528.3652241583422),))" - ] - }, - "execution_count": 49, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom',\n", @@ -1825,26 +2873,11 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.91140841850002685,\n", - " ((0.76242701448039996,\n", - " 0.85913192581906617,\n", - " -1.4383338187061427,\n", - " 2.2137816599366142),))" - ] - }, - "execution_count": 55, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", @@ -1872,22 +2905,11 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(49.0, 51.0)" - ] - }, - "execution_count": 71, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs = 0.4, 0.6, 100\n", "smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05)" @@ -1895,22 +2917,11 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(12.0, 28.0)" - ] - }, - "execution_count": 59, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "value, nobs = 0.4, 50\n", "smprop.binom_test_reject_interval(value, nobs, alpha=0.05)" @@ -1918,44 +2929,22 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.39832112950330101, 0.6016788704966991)" - ] - }, - "execution_count": 70, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "smprop.proportion_confint(50, 100, method='beta')" ] }, { "cell_type": "code", - "execution_count": 72, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(78.0, 84.0)" - ] - }, - "execution_count": 72, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs = 0.7, 0.9, 100\n", "smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05)" @@ -1963,22 +2952,11 @@ }, { "cell_type": "code", - "execution_count": 76, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(0.65848903119285485, ((78.0, 85.0, 1930.0, 2105.0),))" - ] - }, - "execution_count": 76, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom',\n", @@ -1988,22 +2966,11 @@ }, { "cell_type": "code", - "execution_count": 78, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "0.61042723749210825" - ] - }, - "execution_count": 78, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8\n", "smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05)" @@ -2011,22 +2978,11 @@ }, { "cell_type": "code", - "execution_count": 79, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "0.71661671146632" - ] - }, - "execution_count": 79, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8\n", "smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05)" @@ -2061,7 +3017,7 @@ }, { "cell_type": "code", - "execution_count": 132, + "execution_count": null, "metadata": { "collapsed": true }, @@ -2082,22 +3038,11 @@ }, { "cell_type": "code", - "execution_count": 140, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "856.38473506679793" - ] - }, - "execution_count": 140, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "# Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes\n", "n_frac1 = 0.5\n", @@ -2118,22 +3063,11 @@ }, { "cell_type": "code", - "execution_count": 134, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "0.89999999999999991" - ] - }, - "execution_count": 134, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "#nobs = 858\n", "power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05)" @@ -2141,22 +3075,11 @@ }, { "cell_type": "code", - "execution_count": 135, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(1.6448536269514729, 1.2815515655446004)" - ] - }, - "execution_count": 135, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "alpha=0.05; power=0.9\n", "stats.norm.isf(alpha), stats.norm.isf(1 - power)\n" @@ -2164,22 +3087,11 @@ }, { "cell_type": "code", - "execution_count": 136, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "1.2815515655446004" - ] - }, - "execution_count": 136, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "crit_alpha = stats.norm.isf(alpha)\n", "(np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt" @@ -2187,44 +3099,22 @@ }, { "cell_type": "code", - "execution_count": 137, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "0.89999999999999991" - ] - }, - "execution_count": 137, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "stats.norm.cdf(_)" ] }, { "cell_type": "code", - "execution_count": 138, + "execution_count": null, "metadata": { "collapsed": false }, - "outputs": [ - { - "data": { - "text/plain": [ - "(array([ 30., 49.]), array([ 51., 70.]))" - ] - }, - "execution_count": 138, - "metadata": {}, - "output_type": "execute_result" - } - ], + "outputs": [], "source": [ "smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05)" ] diff --git a/notebooks/proportion_one_power.py b/notebooks/proportion_one_power.py index d37b005..d4f3f83 100644 --- a/notebooks/proportion_one_power.py +++ b/notebooks/proportion_one_power.py @@ -21,7 +21,7 @@ # - size and power of hypothesis tests # -# In[12]: +# In[1]: from __future__ import division # for py2 compatibility, I'm using Python 3.4 import numpy as np @@ -32,9 +32,14 @@ import pandas as pd # to store results with labels +# In[2]: + +get_ipython().magic('matplotlib inline') + + # ## Sample -# In[13]: +# In[3]: p_true = 0.3 nobs = 30 @@ -46,7 +51,7 @@ # Assume we have observed 7 events in a sample of size 30. What are our estimates, confidence interval, and test whether the true proportion = 0.3. -# In[15]: +# In[4]: count = y # alias prop_mle = count / nobs @@ -59,41 +64,41 @@ # **Two sided hypothesis** -# In[16]: +# In[5]: smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided') -# In[19]: +# In[6]: smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided') -# In[20]: +# In[7]: smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided', prop_var=p_null) # **Equivalence** -# In[28]: +# In[8]: low, upp = ci_df.loc['beta', :] smprop.binom_tost(count, nobs, low, upp) -# In[36]: +# In[9]: print('score', smprop.binom_tost(count, nobs, *ci_df.loc['wilson', :])) print('wald ', smprop.binom_tost(count, nobs, *ci_df.loc['normal', :])) -# In[32]: +# In[10]: smprop.proportions_ztost(count, nobs, *ci_df.loc['wilson', :]) -# In[33]: +# In[11]: smprop.proportions_ztost(count, nobs, *ci_df.loc['beta', :]) @@ -108,7 +113,7 @@ # # where p0 = 0.3 -# In[50]: +# In[12]: te = smprop.binom_test(count, nobs, prop=p_null, alternative='larger') tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='larger') @@ -125,7 +130,7 @@ # # where p0 = 0.3 -# In[51]: +# In[13]: te = smprop.binom_test(count, nobs, prop=p_null, alternative='smaller') tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='smaller') @@ -137,7 +142,7 @@ # We can look at null hypothesis that are further away from the observed proportion to see which hypothesis are rejected. The observed proportion is 0.23, our new null hypothesis value is 0.6. -# In[67]: +# In[14]: p_null_ = 0.6 te = smprop.binom_test(count, nobs, prop=p_null_, alternative='smaller') @@ -148,7 +153,7 @@ print('score: ', ts[1]) -# In[65]: +# In[15]: p_null_ = 0.6 te = smprop.binom_test(count, nobs, prop=p_null_, alternative='larger') @@ -161,14 +166,13 @@ # The `smaller` hypothesis is strongly rejected, which means that we reject the null hypothesis that the true proportion is 0.6 or larger in favor of the alternative hypothesis that the true proportion is smaller than 0.6. # -# In the case `larger` alternative, the p-value is very large and we cannot reject the Null hypothesis that the true proportion is 0.6 (or smaller) in favor of the hypothesis that the true proportion is larger than 0.6. +# In the case of the `larger` alternative, the p-value is very large and we cannot reject the null hypothesis that the true proportion is 0.6 (or smaller) in favor of the hypothesis that the true proportion is larger than 0.6. # -# Non-inferiority and superiority tests are special cases of these one-sided tests where the specific case is defined in terms of deviations from a benchmark value. The null hypothesis for a non-inferiority test can be defined, for example, by being less than a specified amount 5% below a benchmark proportion. If we reject the test, then we conclude that the proportion is not worse than 5% below the benchmark, at the given confidence level of the test. +# Non-inferiority and superiority tests are special cases of these one-sided tests. Often, the specific case is defined in terms of deviations from a benchmark value. The null hypothesis for a non-inferiority test can be defined, for example, by being less than a specified amount, say 5%, below a benchmark proportion. If we reject the test, then we conclude that the proportion is not worse than 5% below the benchmark, at the given confidence level of the test. # **Aside: Inequality Null hypothesis** # -# For most methods the p-values for the hypothesis tests are the same for the case when the null hypothesis is and inequality -# +# In the above definition of the null hypothesis we used an equality. For most methods the p-values for the hypothesis tests are the same for the case when the null hypothesis is an inequality # # The null nypothesis and alternative hypothesis for alternative `'larger'` specify that the true proportion is smaller than or equal to the hypothesized value versus the alternative that it is larger. # @@ -185,6 +189,8 @@ # The score test is an exception to this. If the null hypothesis is a inequality, then the constrained maximum likelihood estimate will depend on whether the constraint of the null hypothesis is binding or not. If it is binding, then the score test is the same as for the test with an equality in the null hypothesis. If the constrained is not binding then the null parameter estimate is the same as the estimate used for the Wald test. # Because the equality is the worst case in these hypothesis test, it does not affect the validity of the tests. However, in the asymptotic tests it would add another option to define the variance used in the calculations, and the standard score test does not take the inequality into account in calculating the variance. This is not implemented, so we restrict ourselves to equality null hypothesis, even though the interpretation is mostly the same as for the inequality null hypothesis. # +# Reference for a score analysis with inequality null hypothesis for the case of comparing two proportions, see ... +# # In[ ]: @@ -195,7 +201,7 @@ # # We can also use the standard t-test in large samples if we encode the data with 0 for no event and 1 for the success event. The t-test estimates the variance from the data and does not take the relationship between mean and variance explicitly into account. However, by the law of large numbers the mean, i.e. the proportion in the current case, will be asymptotically distributed as normal which can be approximated by the t-distribution. -# In[61]: +# In[16]: import statsmodels.stats.weightstats as smsw yy = np.repeat([0, 1], [nobs - count, count]) @@ -203,31 +209,39 @@ ds.ttest_mean(0.3) -# In[62]: +# In[17]: vars(ds) -# In[63]: +# In[18]: ds.ttest_mean(0.3, alternative='larger') -# In[64]: +# In[19]: ds.ttest_mean(0.3, alternative='smaller') -# In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, at the latter confidence interval. +# In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, which is 0.025 when we use the latter's confidence interval for the equivalence margins. -# In[68]: +# In[20]: ds.ttost_mean(*ci_df.loc['beta', :]) -# In[ ]: +# We used a full sample with individual observations in the above. However, `DescrStatsW` allows us to use weights and we can specify the sample by the frequency of each level of the observation. The results are the same as before. + +# In[21]: + +ds2 = smsw.DescrStatsW([0, 1], weights=[nobs - count, count]) +ds2.ttest_mean(0.3, alternative='smaller') +# In[22]: + +ds2.ttost_mean(*ci_df.loc['beta', :]) # In[ ]: @@ -250,7 +264,7 @@ # **Rejection region** -# In[103]: +# In[23]: rej = np.array([smprop.proportions_ztest(count_, nobs, value=p_null, alternative='two-sided', prop_var=p_null)[1] for count_ in range(nobs + 1)]) @@ -259,7 +273,7 @@ rej_indicator_score = rej_indicator # keep for later use -# In[104]: +# In[24]: rej = np.array([smprop.binom_test(count_, nobs, prop=p_null, alternative='two-sided') for count_ in range(nobs + 1)]) rej_indicator = (rej < 0.05) #.astype(int) @@ -272,14 +286,14 @@ # # We can use the set of values for which the null hypothesis is rejected instead of using a boolean indicator. -# In[105]: +# In[25]: x = np.arange(nobs + 1) x_rej = x[rej_indicator] x_rej_score = x[rej_indicator_score] -# In[113]: +# In[26]: print('binom', x_rej) print('score', x_rej_score) @@ -289,7 +303,7 @@ # For the current case we use the exact binomial distribution to calculate the power. The null hypothesis in this example is a two-sided test for p = 0.3. Use p1 for the proportion at which the power or rejection probability is calculated. First we check the size of the test, i.e. p1 = p_null = 0.3 -# In[114]: +# In[27]: p1 = 0.3 stats.binom.pmf(x_rej, nobs, p1).sum() @@ -297,7 +311,7 @@ # Because we are using the exact test, the probability of rejection under the null is smaller than the required alpha = 0.05. In this example the exact probability is close to the 0.05 threshold. In contrast to this, the score test is liberal in this example and rejects with probability 0.07 instead of the required 0.05. -# In[112]: +# In[28]: stats.binom.pmf(x_rej_score, nobs, p1).sum() @@ -306,7 +320,7 @@ # # In the case of the binomial distribution with probability p_null under the null hypothesis has tail probabilities at most alpha / 2 in each tail (for equal tailed hypothesis tests). -# In[129]: +# In[29]: lowi, uppi = stats.binom.interval(0.95, nobs, p_null) lowi, uppi @@ -324,26 +338,26 @@ # Because of the discreteness of the sample space having tail probabilities equal to alpha / 2 is in general not possible. # -# In[130]: +# In[30]: low, upp = lowi, uppi -# In[132]: +# In[31]: stats.binom.ppf(0.025, nobs, p_null), stats.binom.isf(0.025, nobs, p_null) # If we reject at 4 and smaller and reject at 14 and larger, then the probability of rejection is larger than 0.025 in each tail: -# In[133]: +# In[32]: stats.binom.cdf(low, nobs, p_null), stats.binom.sf(upp - 1, nobs, p_null) # If we shrink the rejection region in each tail by one, so we reject at 3 and smaller and reject at 15 and larger, then the probability of rejection is smaller than 0.025 in each tail. The total rejection probability is at 0.026 smaller than 0.05 and shows the typical case that exact tests are conservative, i.e. reject less often than alpha, often considerably less: -# In[137]: +# In[33]: prob_low = stats.binom.cdf(low - 1, nobs, p_null) prob_upp = stats.binom.sf(upp, nobs, p_null) @@ -352,21 +366,21 @@ # In this case we can increase the lower rejection threshold by one and still stay below the total rejection probability of 0.05, although in this case the rejection probability in the lower tail is larger than 0.025. In this example the same also works on the other side by expanding only the rejection region in the upper tail. -# In[138]: +# In[34]: prob_low = stats.binom.cdf(low, nobs, p_null) prob_upp = stats.binom.sf(upp, nobs, p_null) prob_low, prob_upp, prob_low + prob_upp -# In[139]: +# In[35]: prob_low = stats.binom.cdf(low - 1, nobs, p_null) prob_upp = stats.binom.sf(upp - 1, nobs, p_null) prob_low, prob_upp, prob_low + prob_upp -# In[124]: +# In[36]: stats.binom.cdf(upp, nobs, p_null) - stats.binom.cdf(low, nobs, p_null) @@ -374,12 +388,12 @@ # TODO: why does binom_test reject at 4? # binom_test is used from scipy.stats for the two-sided alternative. -# In[142]: +# In[37]: smprop.binom_test(3, nobs, prop=p_null, alternative='smaller'), smprop.binom_test(4, nobs, prop=p_null, alternative='smaller') -# In[144]: +# In[38]: smprop.binom_test(4, nobs, prop=p_null, alternative='two-sided') # we get the same answer as in R @@ -391,7 +405,7 @@ # # The pvalue for the centered test is based on doubling the probability of the smaller tail. Given that it does not exist, we can implement it quickly, and check against R's exactci package, which matches our results. -# In[151]: +# In[39]: def binom_test_centered(count, nobs, prop=0.5): """two-sided centered binomial test""" @@ -400,12 +414,12 @@ def binom_test_centered(count, nobs, prop=0.5): return 2 * min(prob_low, prob_upp) -# In[152]: +# In[40]: binom_test_centered(3, nobs, prop=p_null), binom_test_centered(4, nobs, prop=p_null) -# In[153]: +# In[41]: binom_test_centered(13, nobs, prop=p_null), binom_test_centered(14, nobs, prop=p_null) @@ -423,11 +437,11 @@ def binom_test_centered(count, nobs, prop=0.5): > be = binom.exact(14, 30, p = 0.3) > be$p.value [1] 0.0801051 -# ## Back to power +# ## Exact Power # After this more extended detour we go back to our power calculations. So assuming we know the critical values of our rejection region, we can calculate the power using the cdf and sf function of the binomial distribution. -# In[155]: +# In[42]: def power_binom_reject(low, upp, prop, nobs): """ calculate the power of a test given the rejection intervals @@ -466,7 +480,7 @@ def power_binom_reject(low, upp, prop, nobs): return prob_low + prob_upp -# In[162]: +# In[43]: for test, l, u in [('binom ', 4, 15), ('binom_central', 3, 15), ('score ', 4, 14)]: print(test, l, u, power_binom_reject(l, u, p_null, nobs)) @@ -477,6 +491,329 @@ def power_binom_reject(low, upp, prop, nobs): +# In[44]: + +def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), item=None, use_idx=False): + """calculate power for proportion test by explicit numeration of sample space + + + argument `item` is currently to avoid having to figure out the return of test_func + None if return is pvalue, integer for index of pvalue if tuple is returned + + """ + sample_space = np.arange(nobs + 1) + try: + # TODO: how do we vectorize, if res were a instance with pvalue attribute, then it would be easier. + res = test_func(sample_space, nobs, *args) + #if len(res) > 1 and not res.shape == sample_space.shape: + # assume p-value is the second term + if item is not None: + res = res[item] + except Exception: + # assume test_func is not vectorized + if item is None: + res = [test_func(x, nobs, p_null, *args) for x in sample_space] + else: + res = [test_func(x, nobs, p_null, *args)[item] for x in sample_space] + + pvalues = np.asarray(res) + rej_indicator = (pvalues <= alpha) + if use_idx: + # This evaluates the pmf at all points, useful for non-interval rejection regions + x_rej = sample_space[rej_indicator] + power = stats.binom.pmf(x_rej, nobs, prop).sum() + return power, x_rej + else: + # use critical values, assumes standard two tails, two-sided only for now + c = np.nonzero(np.diff(rej_indicator))[0] + if len(c) == 2: + low = c[0] + upp = c[1] + 1 + else: + raise NotImplementedError('currently only two sided hypothesis tests') + + power = power_binom_reject(low, upp, prop, nobs) + + return power, (low, upp) + + + +# We can use this function to check the size of the two binomial tests. Both results are what we already had before and agree with the results of R packages. + +# In[45]: + +print(power_binom_proptest(smprop.binom_test, p_null, p_null, nobs)) +print(power_binom_proptest(smprop.binom_test, p_null, p_null, nobs, use_idx=True)) +# 0.04709225 R library MESS: power.binom.test(n = 30, p0 = 0.3, pa = 0.3) + + +# In[ ]: + + + + +# In[46]: + +print(power_binom_proptest(binom_test_centered, p_null, p_null, nobs)) +print(power_binom_proptest(binom_test_centered, p_null, p_null, nobs, use_idx=True)) +# 0.02625388 from exactci: powerBinom(n = 30, p0 = 0.3, p1 = 0.3, strict=TRUE) + + +# We obtain the power of the test at a proportion that is different from the proportion of the null hypothesis. Using the minlike binomial test the power if the true proportion is 0.5 is 0.57, the power for the central binomial test differs only in the 5th decimal from this. + +# In[47]: + +print(power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs)) +print(power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs, use_idx=True)) +# 0.572262 R library MESS: power.binom.test(n = 30, p0 = 0.3, pa = 0.5) + + +# In[48]: + +print(power_binom_proptest(binom_test_centered, p_null, 0.5, nobs)) +print(power_binom_proptest(binom_test_centered, p_null, 0.5, nobs, use_idx=True)) +# 0.5722364 from exactci: powerBinom(n = 30, p0 = 0.3, p1 = 0.5, strict=TRUE) + + +# surprisingly this also works in vectorized for to calculate the power for a set of alternatives. + +# In[49]: + +p1 = np.linspace(0.1, 0.8, 15) +pbminlike = power_binom_proptest(smprop.binom_test, p_null, p1, nobs) +pbcentral = power_binom_proptest(binom_test_centered, p_null, p1, nobs) +pow_bt = np.column_stack((p1, pbminlike[0], pbcentral[0])) +pow_bt + + +# to check this let's use a list comprehension and explicitly loop over all alternative proportions + +# In[50]: + +[power_binom_proptest(smprop.binom_test, p_null, p1_, nobs) for p1_ in p1] + + +# And finally a plot. + +# In[51]: + +import matplotlib.pyplot as plt +plt.figure(figsize=(8, 6)) +plt.plot(pow_bt[:, 0], pow_bt[:, 1], label='minlike') +plt.plot(pow_bt[:, 0], pow_bt[:, 2], label='central') +plt.legend(loc='lower right') +#plt.show() + + +# From the plot we can see that both binomial test have the same power for large true proportions, but the standard minlike binomial test is more powerful than the central binomial test for small true proportions. For example, if the true proportion is 0.15, then the probability of rejecting the null hypothesis are 0.52 versus 0.32. We can verify that the two R packages produce the same result + +# In[52]: + +# 0.5244758 power.binom.test(n = 30, p0 = 0.3, pa = 0.15) +# 0.321667 powerBinom(n = 30, p0 = 0.3, p1 = 0.15, strict=TRUE) +print(pow_bt[1,:]) + + +# ### Power as a function of nobs + +# In[53]: + +nobs_arr = np.arange(30, 100) +#this doesn't work vectorized in nobs +pbcentral_nobs = [power_binom_proptest(binom_test_centered, p_null, 0.5, nobs_) for nobs_ in nobs_arr] +pbcentral_nobs + + +# In[54]: + +pbminlike_nobs = [power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs_) for nobs_ in nobs_arr] +pbminlike_nobs + + +# In[55]: + +pbcentral_nobs_arr, rej_minlike = list(zip(*pbcentral_nobs)) +pbcentral_nobs_arr +pbminlike_nobs_arr, rej_minlike = list(zip(*pbminlike_nobs)) +np.column_stack((nobs_arr, pbminlike_nobs_arr, pbcentral_nobs_arr)) + + +# In[56]: + +plt.figure(figsize=(8, 6)) +plt.plot(nobs_arr, pbminlike_nobs_arr, label='minlike') +plt.plot(nobs_arr, pbcentral_nobs_arr, label='central') +plt.legend(loc='lower right') + + +# In[57]: + +xx = (np.arange(10)<4) | (np.arange(10) > 6) +print(xx) +np.nonzero(np.diff(xx))[0] + + +# In[58]: + +p_null, nobs + + +# ## Power and tests based on normal distribution +# +# +# The following is still messy. The formulas look simple but are a bit confusing. There are also several different version for normal distribution based hypothesis tests and power calculations. The examples try to match up some examples from various references but that is not completely successful yet, either because of bugs in my code or because different versions are used. + +# Lachine summarizes sample size calculations for proportions based on the normal distribution if we only consider the power in one tail. In this case we have an explicit formula for the required sample size. This is a good approximation to two sided tests if the probability to be in the small tail is negligible and useful for quick calculations. However, solving the sample size that correctly takes both tails into account can be done numerically without much computational effort. + +# In[59]: + +# from Lachine 1981 equ (3) and (4) + +from scipy import stats +def sample_size_normal_greater(diff, std_null, std_alt, alpha=0.05, power=0.9): + crit_alpha, crit_pow = stats.norm.isf(alpha), stats.norm.isf(1 - power) + return ((crit_alpha * std_null + crit_pow * std_alt) / np.abs(diff))**2 + +def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): + crit_alpha = stats.norm.isf(alpha) + crit_pow = (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt + return stats.norm.cdf(crit_pow) + + +# In[60]: + +pa = 0.5 +power_normal_greater(pa - p_null, np.sqrt(p_null * (1 - p_null)), np.sqrt(pa * (1 - pa)), 30, alpha=0.05) + + +# In[61]: + +std_null, std_alt = np.sqrt(p_null * (1 - p_null)), np.sqrt(pa * (1 - pa)) +sample_size_normal_greater(pa - p_null, std_null, std_alt, alpha=0.05, power=0.7528) + + +# In[62]: + +p0 = 0.6 +pa = 0.5 +power_normal_greater(pa - p0, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 25, alpha=0.05) + + +# In[63]: + +p0 = 0.5 +pa = 0.4 +power_normal_greater(pa - p0, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.05) + + +# In[64]: + +p0 = 0.3 +pa = 0.5 +diff = pa - p0 +power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.05) + + +# In[65]: + +p0 = 0.5 +pa = 0.5 +diff = pa - p0 +diff = 0.2 +power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.025) +# 0.80743 PASS manual example Chow, Shao, and Wang (2008) 2-sided S(Phat) + + +# In[66]: + +p0 = 0.5 +pa = 0.6 +diff = pa - p0 +power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 153, alpha=0.05) +# 0.80125 PASS doc example from Ryan (2013) for one-sided alternative + + +# In[67]: + +# copied and adjusted from statsmodels.stats.power +def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, std_alt=1): + '''Calculate power of a normal distributed test statistic + + ''' + d = effect_size + + if alternative in ['two-sided', '2s']: + alpha_ = alpha / 2. #no inplace changes, doesn't work + elif alternative in ['smaller', 'larger']: + alpha_ = alpha + else: + raise ValueError("alternative has to be 'two-sided', 'larger' " + + "or 'smaller'") + + pow_ = 0 + if alternative in ['two-sided', '2s', 'larger']: + crit = stats.norm.isf(alpha_) + pow_ = stats.norm.sf((crit* std_null - d*np.sqrt(nobs))/std_alt) + crit_pow = (np.sqrt(nobs) * np.abs(diff) - crit * std_null) / std_alt + if alternative in ['two-sided', '2s', 'smaller']: + crit = stats.norm.ppf(alpha_) + pow_ += stats.norm.cdf((crit* std_null - d*np.sqrt(nobs))/std_alt) + return pow_ #, (crit* std_null - d*np.sqrt(nobs))/std_alt, (crit* std_null - d*np.sqrt(nobs))/std_alt, crit_pow + + +# In[68]: + +p0 = 0.5 +pa = 0.5 +alpha = 0.05 +nobs_ = 50 +effect_size = diff = 0.2 +std_null, std_alt = np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)) +po = normal_power(effect_size, nobs_, alpha, alternative='two-sided', std_null=std_null, std_alt=std_null) +print(po, 1-po) +# close to above 0.80742957881382105, closer to pass 0.80743 + + +# In[69]: + +p0 = 0.5 +pa = 0.6 +diff = pa - p0 +effect_size = diff +nobs_ = 153 +alpha = 0.05 +std_null, std_alt = np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)) +po = normal_power(effect_size, nobs_, alpha, alternative='larger', std_null=std_null, std_alt=std_alt) +# 0.80125 PASS doc example from Ryan (2013) for one-sided alternative +print(power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 153, alpha=0.05)) +po + + +# check size (power at null) + +# In[70]: + +p0 = 0.6 +pa = 0.6 +diff = pa - p0 +effect_size = diff +nobs_ = 153 +alpha = 0.05 +std_null, std_alt = np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)) +po = normal_power(effect_size, nobs_, alpha, alternative='larger', std_null=std_null, std_alt=std_alt) +po + + +# In[ ]: + + + + +# In[ ]: + + + + # In[ ]: @@ -493,14 +830,14 @@ def power_binom_reject(low, upp, prop, nobs): # TODO: The following is not correct because when we change the sample size, then the rejection region also changes. -# In[164]: +# In[71]: [power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)] # We can also calculate this in vectorized form for the set of sample sizes and all three tests: -# In[166]: +# In[72]: power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None]) @@ -517,27 +854,27 @@ def power_binom_reject(low, upp, prop, nobs): # ## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count. -# In[81]: +# In[73]: smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null) -# In[77]: +# In[74]: smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null) -# In[78]: +# In[75]: smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null) -# In[79]: +# In[76]: smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null) -# In[80]: +# In[77]: smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null) @@ -547,7 +884,7 @@ def power_binom_reject(low, upp, prop, nobs): -# In[4]: +# In[ ]: get_ipython().magic('pinfo smprop.proportion_confint') @@ -557,7 +894,7 @@ def power_binom_reject(low, upp, prop, nobs): smprop.proportion_confint() -# In[11]: +# In[ ]: from statsmodels.stats.proportion import proportion_effectsize es = proportion_effectsize(0.4, 0.5) @@ -565,7 +902,7 @@ def power_binom_reject(low, upp, prop, nobs): # R pwr 0.3447014091272153 -# In[14]: +# In[ ]: smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9) @@ -585,7 +922,7 @@ def power_binom_reject(low, upp, prop, nobs): -# In[25]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', @@ -594,7 +931,7 @@ def power_binom_reject(low, upp, prop, nobs): -# In[39]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', @@ -602,7 +939,7 @@ def power_binom_reject(low, upp, prop, nobs): critval_continuity=0) -# In[41]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', @@ -610,7 +947,7 @@ def power_binom_reject(low, upp, prop, nobs): critval_continuity=0) -# In[49]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', @@ -618,7 +955,7 @@ def power_binom_reject(low, upp, prop, nobs): critval_continuity=0) -# In[55]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', @@ -636,30 +973,30 @@ def power_binom_reject(low, upp, prop, nobs): -# In[71]: +# In[ ]: low, upp, nobs = 0.4, 0.6, 100 smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) -# In[59]: +# In[ ]: value, nobs = 0.4, 50 smprop.binom_test_reject_interval(value, nobs, alpha=0.05) -# In[70]: +# In[ ]: smprop.proportion_confint(50, 100, method='beta') -# In[72]: +# In[ ]: low, upp, nobs = 0.7, 0.9, 100 smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) -# In[76]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', @@ -667,13 +1004,13 @@ def power_binom_reject(low, upp, prop, nobs): critval_continuity=0) -# In[78]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) -# In[79]: +# In[ ]: low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8 smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) @@ -694,7 +1031,7 @@ def power_binom_reject(low, upp, prop, nobs): -# In[132]: +# In[ ]: # from Lachine 1981 equ (3) and (4) @@ -709,7 +1046,7 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): return stats.norm.cdf(crit_pow) -# In[140]: +# In[ ]: # Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes n_frac1 = 0.5 @@ -728,30 +1065,30 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): nobs -# In[134]: +# In[ ]: #nobs = 858 power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05) -# In[135]: +# In[ ]: alpha=0.05; power=0.9 stats.norm.isf(alpha), stats.norm.isf(1 - power) -# In[136]: +# In[ ]: crit_alpha = stats.norm.isf(alpha) (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt -# In[137]: +# In[ ]: stats.norm.cdf(_) -# In[138]: +# In[ ]: smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05) From e44e07690e0813c9d3fe16245c7e32d1d2aaf5b3 Mon Sep 17 00:00:00 2001 From: Josef Date: Tue, 22 Dec 2015 09:00:25 -0500 Subject: [PATCH 3/4] ENH: one proportion, notebook has almost all pieces, not clean --- notebooks/proportion_one_power.ipynb | 1021 +++++++++++++++++++++----- notebooks/proportion_one_power.py | 183 ++++- 2 files changed, 974 insertions(+), 230 deletions(-) diff --git a/notebooks/proportion_one_power.ipynb b/notebooks/proportion_one_power.ipynb index f1e4f08..58044b9 100644 --- a/notebooks/proportion_one_power.ipynb +++ b/notebooks/proportion_one_power.ipynb @@ -1411,7 +1411,7 @@ }, "outputs": [], "source": [ - "def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), item=None, use_idx=False):\n", + "def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwds=None, item=None, use_idx=False):\n", " \"\"\"calculate power for proportion test by explicit numeration of sample space\n", " \n", " \n", @@ -1419,6 +1419,9 @@ " None if return is pvalue, integer for index of pvalue if tuple is returned\n", " \n", " \"\"\"\n", + " if kwds is None:\n", + " kwds = {}\n", + " \n", " sample_space = np.arange(nobs + 1)\n", " try:\n", " # TODO: how do we vectorize, if res were a instance with pvalue attribute, then it would be easier.\n", @@ -1430,9 +1433,9 @@ " except Exception:\n", " # assume test_func is not vectorized\n", " if item is None:\n", - " res = [test_func(x, nobs, p_null, *args) for x in sample_space]\n", + " res = [test_func(x, nobs, p_null, *args, **kwds) for x in sample_space]\n", " else:\n", - " res = [test_func(x, nobs, p_null, *args)[item] for x in sample_space]\n", + " res = [test_func(x, nobs, p_null, *args, **kwds)[item] for x in sample_space]\n", " \n", " pvalues = np.asarray(res)\n", " rej_indicator = (pvalues <= alpha)\n", @@ -1678,7 +1681,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 51, @@ -1689,7 +1692,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAegAAAFwCAYAAABzZegiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4VFXixvHvSSUBAgEpAoIRRAQFRZqydKU3KQIqIAqi\ngL3iWlgLltWfDUEsIKICikhRqkIEEQxLU6RLB0Gk94TM+f0xwc0iZZK5kzvl/TxPHpNwc+a9huTl\nnnOLsdYiIiIiwSXK7QAiIiLydypoERGRIKSCFhERCUIqaBERkSCkghYREQlCKmgREZEgdN6CNsZ8\naIzZZYz5+RzbvGWMWWeMWWaMucrZiCIiIpHHlyPokUCzs/2hMaYFUN5aeynQF3jXoWwiIiIR67wF\nba39Adh3jk3aAR9nbfsTUMgYU8KZeCIiIpHJiTXo0sDWbB9vz/qciIiI5JJOEhMREQlCMQ6MsR24\nKNvHZbI+9zfGGN34W0REIo611uT0a3wtaJP1diaTgf7AOGNMHWC/tXbX2QYK54dzDBo0iEGDBrkd\nI2C0f6ErnPcNwnf/jh7P4P0ZP/LB0H9Tql5d9h3bx8H0fRzO3MdRzz6Om31kRO/jZOw+iDuESU8i\nOiOZ2Mxk8tlkEqOSKRiTTFJcMsn5kimaP5niBZMpUSiZUsnJlC1WhHLFkyl9QRJxsdGu7We4fv9O\nMSbH3Qz4UNDGmM+AhkBRY8wW4BkgDrDW2vestVONMS2NMeuBI0CvXCUREREWrdnG29Om8e3mafye\nbzaJxyuQcNiSeHw/RRKKcOkF5YOyZMV55y1oa+3NPmwzwJk4IiKR5fCxdIZP+4Gxi6fxy7HppMf9\nTtmMprS99EbuafkuVS4uHvZHmHJmTqxBS5aGDRu6HSGgtH+hK5z3DUJv/xas3MKQGdOYvWUaOxPm\nkP9YJWoUasGQeu/To0nNvx0Jh9r+5VS4719umbxcEzbG2HBegxYROZODR07w7rR5fL5kGr8cn0ZG\n3G7KZTSjxaUtuKdlUy4vW8ztiBJAxphcnSSmghYRCYAfVmzinRnTmLNtGrsSvqfAscrULNyCnte1\n4JbG1xATratcI4UKWkTERfsPH2fY1Ll8sXQav6ZPIyNmHymZzWl5aXPubdWUS8sUdTuiuEQFLSKS\nx2Yv+41hs6Yxd8d0/kiYS8GjV1KrSAtuq9uCrg2v1lGyACpoEZE8MWvxOh4ZP4SV6dPIjD7EJZ7m\ntLysOfe2uoHypYq4HU+CUG4LWmdxi4j46OlPvub5X27nH/nuYlSzcXSuV01HyRIwKmgRkfM4memh\n+QuDmXPoXd5tPIk7W1zrdiSJACpoEZFz2LHnEDVfuI2DdgeL+qVR/dJSbkeSCKG5GRGRs/hu6Xou\neeFaCkYXYfvzqSpnyVMqaBGRM3h+7HRuGFuXDmUGsPLl90jKH+92JIkwmuIWEcnG47G0evEVZh54\nk7fqjWdAm3puR5IIpYIWEcnyx74j1HjudvbYDSy8K42al5VxO5JEME1xi4gAqcs3UO7Z64iLSmD7\nc/NUzuI6FbSIRLxXxn9Lk8+uo1Wp3qx9ZSSFC+RzO5KIprhFJHJ5PJZ2L/8fU/e9ymvXjeX+9g3d\njiTyFxW0iESkPw8cpcazffjDs4q5vRdSt0o5tyOJ/A9NcYtIxJn/62bKDvoHBsOWQT+onCUoqaBF\nJKK8MTGV+qPqcEOJW/nt36O5oFCi25FEzkhT3CISETweS+dX32binsG8WOsTHu10vduRRM5JBS0i\nYW//4ePUGHQX2zOXMqfXAupXTXE7ksh5qaBFJKwtWrONRu92oIhJYfPTP1I8Ob/bkUR8ojVoEQlb\nQ7/+gTof1qLeBR3Z9OpYlbOEFB1Bi0jY8Xgst7z+LuP+GMSz1UfxZNfmbkcSyTEVtIiElYNHTlBr\n0AA2ZS5gVvf5NLm6gtuRRHJFBS0iYWPJuh00GNqRJFOKDf9cQKmiBd2OJJJrWoMWkbDw3rQF1Hyv\nFnWKtGbzv79QOUvI0xG0iIS8nm9+wOgdT/BktRE8e2trt+OIOEIFLSIhrddbIxiz5RW+6TqPFjUv\nczuOiGOMtTbvXswYm5evJyLhbcXGXVR990o+b/0tnepVdTuOyBkZY7DWmhx/nQpaREJVykO3Uixf\nKdJeeMXtKCJnlduC1hS3iISkV8Z/y1bzAz89/KvbUUQCQmdxi0jI2X/4OE8uuJsnrhqiu4NJ2NIU\nt4iEnPrPPM2GQyvZ9n/j3Y4icl6a4haRiDA1bTU/nBhG2l3L3I4iElCa4haRkOHxWG7+7C46FH2a\nGhVLux1HJKBU0CISMu4cOooMc4TP7u/ndhSRgNMUt4iEhDVb/2Tk1sf5uM1U4mKj3Y4jEnA6SUxE\nQsKlD/eiYGxhlrz4uttRRHJEJ4mJSNh6Y2IqG813bNE1zxJBtAYtIkHt4JETPDbvLh654i09oUoi\niqa4RSSoNXn2OVbu+w+/vz7J7SgiuaIpbhEJO7MWr2PO0TeZ32eJ21FE8pymuEUkKHk8li6j76ZN\n4Se4tnJZt+OI5DkVtIgEpQHvfcZxs4dxD97rdhQRV2iKW0SCzm879jJ8w8N80GIS+eL0a0oik04S\nE5Ggc/mjdxIbFcfPLw1xO4qI33SSmIiEhWHfzGct37DxwZVuRxFxldagRSRoHD6WzgPf9eW+y96g\nbPFCbscRcZUKWkSCRqfX/o8kW5ZXe3VyO4qI6zTFLSJBIXX5BmYefpXUXouIisrxcp1I2NERtIi4\nzuOxdP6oP80KPkL9qiluxxEJCipoEXHdQyO+4JDZxpcPPeh2FJGgoSluEXHV5l37eWvtAwy9/gsS\n88W6HUckaOg6aBFx1ZWP9eekPcmqV4a7HUUkIELmOmiPB6I0sS4iwIczfmKlncD6B3TNs8jp8rwq\nl+ihNCICHE8/yYDpfbnrktdIuTDZ7TgiQSfPC3rGjLx+RREJRp1fe5NEW4y37+zmdhSRoORTQRtj\nmhtjVhtj1hpjHjvDnycZYyYbY5YZY34xxtx2trFU0CIy/9fNfHPgRT7vOUzXPIucxXlPEjPGRAFr\ngSbADmAR0NVauzrbNgOBJGvtQGPMBcAaoIS19uRpY9kCBSzbt0NSksN7IiIhweOxlHqoHVck1+Lb\np590O45IwOX2JDFfjqBrAeustZuttRnAWKDdadtYoGDW+wWBPaeX8ynXXguzZ+c0poiEi4EfT2Sf\nWceEhx5xO4pIUPOloEsDW7N9vC3rc9kNASobY3YAy4H7zjZYs2aa5haJVDv2HOK1X+/l1YbDScof\n73YckaDm1ElizYCl1tpSwNXAO8aYAmfcMKugdTm0SORp9epTXMIN3NO2vttRRIKeL9dBbwfKZvu4\nTNbnsusFvAhgrf3NGLMRqAT85/TBvvhiEH/+CffeCx07NqRhw4a5Ci4ioeWT7xaz3DOGVff+6nYU\nkYBKTU0lNTXV73F8OUksGu9JX02A34E0oJu1dlW2bd4B/rDW/ssYUwJvMVez1u49bSxrreX22+Hq\nq+Gee/zOLyIhID0jk+RHa3NzhXt4v39Pt+OI5KmAnSRmrc0EBgAzgV+BsdbaVcaYvsaYO7M2ex64\nzhjzMzALePT0cs5O69AikaXb6+8QZwsy/O4ebkcRCRmu3It7zx5ISYHduyFe54mIhLVFa7ZRe+RV\nfN3hB1rWquR2HJE8F8jLrBxXtChUrgzz57vx6iKSl9oPv496+fqrnEVyyLXHVmiaWyT8PTV6CrvN\nL0x6eKDbUURCjgpaRALij31HePHnATx/7VAKF8jndhyRkOPa86BPnoTixWHlSihZMs8iiEgeqfnP\nR9hzfCcbXhvtdhQRV4XUGjRATAw0bgwzZ7qVQEQC5fO5y1l8chRT7nnN7SgiIcu1ggZNc4uEo/SM\nTG7/qi/dSw2mysXF3Y4jErJcL+hZs8DjcTOFiDhp4MdfAR4+HHC721FEQpqrBV22rPeSqyVL3Ewh\nIk768Jch3FH5IWKiXf31IhLyXP8Jat5c09wi4eLLH37hUNw6Xuzewe0oIiHP9YLWOrRI+Hhy8hAa\n5O9LYr5Yt6OIhDzXLrM65ehRKFECtm+HpKQ8iyIiDtv4+z7Kv3UJy/qsouolunZS5JSQu8zqlMRE\nuPZamD3b7SQi4o/7R31E2fSWKmcRh7he0KBpbpFQdzLTw9Td7/DE9QPcjiISNoKqoPNwtl1EHPTC\nuOnEeQrTu1kdt6OIhI2gKOgqVSA9HdavdzuJiOTGkEVv0638AKKicrzMJiJnERQFbQw0bappbpFQ\nNGvxOvbELebfPbu4HUUkrARFQYPWoUVC1WPjh1I77g6SCya4HUUkrLh+mdUpe/ZASgrs3g3x8XkW\nSUT8sHPvYUq9Uo553ZdQt0o5t+OIBKWQvczqlKJFoXJlmD/f7SQi4qsHP/qEkicaqJxFAiBoCho0\nzS0SSjwey4RtQ3ioni6tEgkEFbSI5Mobk1IBywPtG7kdRSQsBVVB16oFW7bAzp1uJxGR83lt3hBu\nLK1Lq0QCJagKOiYGGjeGmTPdTiIi57Jg5RZ+j5/D6726ux1FJGwFVUGDprlFQsFDY96lGt0pWaSA\n21FEwlZQFvSsWeDxuJ1ERM5k/+HjLEz/gJc69nc7ikhYC7qCLlvWe8nV0qVuJxGRM3lk1DiKpFen\nWY2KbkcRCWtBV9CgaW6RYOXxWD5b/zYDatzjdhSRsBeUBd28OUyf7nYKETndyJlppEfv44mbmrsd\nRSTsBWVB16/vneI+eNDtJCKS3fOz3qbFBf2Ii412O4pI2AvKgk5MhGuvhdmz3U4iIqes2LiLzXHf\n8GbP292OIhIRgrKgQevQIsHmvtHvc1lmZ1IuTHY7ikhECPqCzsOHbYnIWRw9nkHqoXd5ro3uuy2S\nV4K2oKtUgfR0WL/e7SQi8s9PJlIwozyd6lV1O4pIxAjagjYGmjbVNLdIMBix4m3uuFJHzyJ5KWgL\nGrQOLRIMPp+7nMOxG3julvZuRxGJKEFd0NdfD3Pneqe6RcQdT095h0YF7yIxX6zbUUQiSlAXdNGi\nUKkSzJ/vdhKRyPTbjr2sjfmCN3r0cTuKSMQJ6oIG3VVMxE33jxrJxemtueLiEm5HEYk4QV/QWocW\ncUd6RibT9wzlyWY6OUzEDUFf0LVqwZYtsHOn20lEIsvz46YRl1mEXjfUcjuKSEQK+oKOiYHGjWHm\nTLeTiESWof8Zwi0V7sEY43YUkYgU9AUNmuYWyWvTF61lb/xSXul5k9tRRCJWyBT0rFng8bidRCQy\nPD7hHa6N703hAvncjiISsWLcDuCLsmW9l1wtXQrXXON2GpHwtmPPIX5mNAu6LXc7ikhEC4kjaNA0\nt0heefCj0Vx4ohG1L7/I7SgiEU0FLSJ/8XgsX20fwiP173E7ikjEC5mCbtAAliyBgwfdTiISvv7v\nqzkYori3bQO3o4hEvJAp6MREuPZamD3b7SQi4ev/fnibjhcNICpKl1aJuC3PC3rX4V25/lpNc4sE\nzvxfN7Mzfi6v9bzV7SgiggsFPWfTnFx/7amCttbBQCICwMNj3uWqqB6ULFLA7SgighsFvTH3BV2l\nivfRk+vXOxhIRNh78Bg/ZXzIvzv3dzuKiGTJ84KevSn3i8jGQNOmmuYWcdojo8ZxQXoNmlxdwe0o\nIpIlzwt6//H9bD2wNddfr3VoEWd5PJYxG95mQC09tUokmOR5QTe6uJFf69DXXw9z53qnukXEfx9M\nX8jJ6AM8cVNzt6OISDYhV9BFi0KlSjB/voOhRCLY4O+G0LJYf2KiQ+aqS5GIkOc/kY1TGjN742ys\nH6dia5pbxBnLf9vJlripvHlbL7ejiMhp8rygKxatSEZmBhv3b8z1GM2bw/TpDoYSiVD3f/Iel3u6\nUK5EYbejiMhp8rygjTE0SmnE7I25P5u7Vi3YsgV27nQwmEiEOXIsg7lHhvNcW11aJRKMfCpoY0xz\nY8xqY8xaY8xjZ9mmoTFmqTFmhTHmnIvMjS9u7Nc6dEwMNG4MM2fmegiRiPfEJxNISq9Ih7pXuh1F\nRM7gvAVtjIkChgDNgCpAN2NMpdO2KQS8A7S21l4BdD7XmKeOoLUOLeKej34dQu9qurRKJFj5cgRd\nC1hnrd1src0AxgLtTtvmZuBLa+12AGvtn+caMKVwCvHR8azZsyY3mQFvQc+aBR5ProcQiVhjU5dx\nJHYTz91y+o+yiAQLXwq6NJD9ziLbsj6XXUWgiDFmjjFmkTGm+7kGdGIdumxZ7yVXS5fmegiRiPXM\nN0NonHQ3+eJi3I4iImfh1EliMUB1oAXQHHjKGHPOewb6uw4NmuYWyY312/eyLuZL3ujR2+0oInIO\nvvzzeTtQNtvHZbI+l9024E9r7XHguDFmLlAN+NtjLQYNGgTAwRMHmbl3Jp5OHqJM7v6d0KwZvPQS\nPPFErr5cJCLdN+pDUjLaUrlccbejiISl1NRUUlNT/R7HnO9ELWNMNLAGaAL8DqQB3ay1q7JtUwl4\nG+/RczzwE9DFWrvytLFs9ter+HZFxt80nqolquYq/NGjUKIEbN8OSUm5GkIkoqRnZJJ/YAU+aP45\nPa+v6XYckYhgjMFaa3L6dec9dLXWZgIDgJnAr8BYa+0qY0xfY8ydWdusBmYAPwMLgfdOL+czaXSx\nf+vQiYlQpw7M8W+mXCRiPDt2KvGZxVXOIiHAp7lla+10a+1l1tpLrbUvZX1uuLX2vWzbvGqtrWKt\nrWqtfduXcRun+L8OrbuKifhu2OK36V7xHrdjiIgPXL07fsOLGzJ381wyPZm5HuPUiWJ+XFItEhGm\npq1mf/xyXu5xztsUiEiQcLWgSxQoQemCpVm6M/fXSlWp4n305Pq/nY4mItkNnDCUa/P1ISl/vNtR\nRMQHrj9fzt91aGOgaVNdbiVyLtv/PMQv5hNev+Uut6OIiI9cL2gn1qF1PbTIuT3w0ceUPtGEmhXL\nuB1FRHzkekE3uLgB87fMJyMzI9djXH89zJ3rneoWkf/l8Vgm7RjCow11cphIKHG9oIskFKFCkQos\n2rEo12MULQqVKsH8+Q4GEwkTb02eiyGa/q3ruR1FRHLA9YIG/9ehQdPcImfz9g8jaF7iDqKicnyf\nBBFxUVAUtNahRQJjyx8H2BA7iZduvtXtKCKSQ0FR0PXK1eOnbT9x/OTxXI9RuzZs3gw7dzoYTCTE\nPfHpOEqfuJ5KFxVzO4qI5FBQFHRSfBJXFL+ChdsW5nqMmBho3BhmznQwmEiIm7h5BHfWvN3tGCKS\nC0FR0KB1aBGnTfzxV47FbeWxTk3djiIiuRA0Be3UOvSsWeDxOBRKJIQ99/UI6uS7jfhYX54qKyLB\nJmgKum7Zuiz9fSlH0o/keoyyZb2XXC3N/Z1DRcLC4WPpLM38hGc79HI7iojkUtAUdGJsItUvrM78\nrf5dzKxpbhF4duw3JKVXoslVFdyOIiK5FDQFDd516DkbdbmViL8++vlDulx6h9sxRMQPQVXQjVMa\nM3uTfyeKNWgAS5bAwYMOhRIJMYvX7uDPfD/y/M0d3Y4iIn4IqoKuU6YOK3ev5MDxA7keIzER6tSB\nOf4diIuErCfGfcxlnk4UK5zf7Sgi4oegKuj4mHhql67NvC3z/BpH09wSqTwey5z9I3i4sa59Fgl1\nQVXQ4Mw6dPPmMH06WOtQKJEQ8c6UH4iysfS6obbbUUTET0FX0E6sQ1ep4n305Pr1DoUSCRFvzhtB\ns+K368EYImEg6Aq6RqkabNi3gT1H9+R6DGOgaVNNc0tk2frHQX6LnciL3bq7HUVEHBB0BR0bHUvd\ni+ry/ebv/RqnRQv45huHQomEgH9+9jmlTjSicrnibkcREQcEXUGDc9dDz58Phw45FEokyH21aQR9\naujkMJFwEZQF7cQ6dFIS1K3rPVlMJNxNXrCKo3GbeLxTc7ejiIhDgrKgryp5FTsO7WDX4V1+jdOu\nHUya5FAokSD27JQR1M7Xg3xxejCGSLgIyoKOjoqmQbkGfj/dqm1bmDoVMjIcCiYShI4cy2BJ5mj+\n1V7T2yLhJCgLGpxZhy5VCi69FObOdSiUSBB6btxUCqZfyg3VK7odRUQcFLQF7cQ6NGiaW8LfyGUf\n0rmCjp5Fwk3QFnSV4lXYf3w/Ww9s9WucUwWtu4pJOFqy7nd2J8zjhZs7ux1FRBwWtAUdZaK809x+\nrkNXrgxxcbBsmUPBRILIP8eNpmJmR0okF3A7iog4LGgLGnCkoI3RNLeEJ4/H8t2+D3lID8YQCUtB\nXdCNUxoze+NsrJ/z0ypoCUfDvv4RQxR3NL3W7SgiEgBBXdAVi1YkIzODjfs3+jXOddfB9u2waZMz\nuUSCwRtzR9C0mB6MIRKugrqgjTF/HUX7IzoaWreGyZMdCibish1/HmZ97ARe7KoHY4iEq6AuaHBm\nHRo0zS3hZeCnn1PyRH2uuLik21FEJECCv6BTGjmyDn3DDbBoEezd61AwERd9tXEEfa65w+0YIhJA\nQV/QKYVTiI+OZ82eNX6Nk5gIjRt7b/0pEsq++WkNR+LXM7BTC7ejiEgABX1BG2P+Oor2l6a5JRz8\na9IIasb1ICE+1u0oIhJAQV/QAI0vbuzIOnTr1jBzJhw/7kAoERccPZ7B4pMfM6idrn0WCXchUdCN\nUrwPzvBYj1/jFCsG1arBbP8PxkVc8fy46RTIuITmNSq5HUVEAiwkCrpMUhmKJBRhxR8r/B5L09wS\nykYs/ZCO5XX0LBIJQqKgwXu5lVPr0JMng8e/g3GRPLds/U52JaYy+Jab3I4iInkgZAq6cYoz69AV\nKkDRopCW5kAokTz0xNhPqHiyAyWTC7odRUTyQMgUdMOLGzJ381wyPZl+j6Vpbgk1Ho/lu70jeLCR\nprdFIkXIFHSJAiUoXbA0S3cu9XssFbSEmuFTF4LJpE+zum5HEZE8EjIFDc6tQ9eoAQcOwNq1DoQS\nyQOvp47g+gv0YAyRSBJSBe3UOnRUFLRtq6NoCQ079x5hXex4XuzSw+0oIpKHQqqgG1zcgPlb5pOR\nmeH3WJrmllAxcPQXlDjxD6pecqHbUUQkD4VUQRdJKEKFIhVYtGOR32M1agQrVsCuXQ4EEwmgLzeM\n4I6rdXKYSKQJqYIG59ah4+OhWTP4+msHQokEyLS0tRzJt4Ynb2rtdhQRyWMhV9BOrUODprkl+A2a\nNJJrYrvrwRgiESjkCrpeuXr8tO0njp/0/4kXLVtCaiocOeJ/LhGnHTtxkv9kjNKDMUQiVMgVdFJ8\nElcUv4KF2xb6PVbhwlC7tvcJVyLB5oVxM8ifUY6WNSu7HUVEXBByBQ3OrUODprkleH24ZAQdLtHR\ns0ikCsmCdnIdum1b74liJ086MpyII37Z8Ac7E79j8M1d3I4iIi4JyYKuW7YuS39fypF0/xePy5aF\ncuVg/nwHgok4ZOCYT7j0ZHtKFU1yO4qIuCQkCzoxNpHqF1Zn/lZnWlXT3BJMPB7LrD8/5P4Gmt4W\niWQhWdDgXYees9HZy62sdWQ4Eb+8Py0NG53OXS3quR1FRFzkU0EbY5obY1YbY9YaYx47x3Y1jTEZ\nxpgOzkU8s8YpjZm9yZkTxapWBY/He2cxEbf935wRNCnSSw/GEIlw5y1oY0wUMARoBlQBuhljKp1l\nu5eAGU6HPJM6ZeqwcvdKDhw/4PdYxmiaW4LDrr1HWRf7BS926el2FBFxmS9H0LWAddbazdbaDGAs\n0O4M290DjAf+cDDfWcXHxFO7dG3mbZnnyHgqaAkGT3wynuInruWq8qXdjiIiLvOloEsDW7N9vC3r\nc38xxpQC2ltrhwF5Ni/n5Dp0vXqwYQNs2+bIcCK5Mv63Edx2lU4OExHnThJ7A8i+Np0nJe3kOnRM\nDLRqBZMnOzKcSI7NWLSewwkrebpLG7ejiEgQiPFhm+1A2Wwfl8n6XHY1gLHGGANcALQwxmRYa/9W\nd4MGDfrr/YYNG9KwYcMcRs72oqVqsGHfBvYc3UPRxKK5HueUdu3gvfegXz+/hxLJsUETP6J6zK0k\nxse5HUVE/JCamkpqaqrf4xh7nmuLjDHRwBqgCfA7kAZ0s9auOsv2I4Ep1toJZ/gze77Xy6mWn7ak\nd/XedLjc/xPHDx+GUqVg61YoVMiBcCI+On4ik/xPlmNip+m0qX2F23FExEHGGKy1OZ5ZPu8Ut7U2\nExgAzAR+BcZaa1cZY/oaY+4805fkNIQ/nFyHLlAA6teHadMcGU7EZ4M/n0liZimVs4j8xZcpbqy1\n04HLTvvc8LNsm6dnuDROaUyPiT0cG+/U2dxduzo2pMh5fbB4BDdecofbMUQkiITsncROuarkVew4\ntINdh3c5Ml6bNjB9OqSnOzKcyHmt2Lib3xNn8eIt+lehiPxXyBd0dFQ0Dco1cOzpViVLwuWXgwPr\n+yI+GTjmU8qfbEPpojrxQUT+K+QLGpxdhwbdtETyjsdjmbl7BPfV0/S2iPyvsChoJ6+HBj08Q/LO\niOn/wUYfoV+r+m5HEZEgExYFXaV4FfYf38/WA1vPv7EPKlWC/Plh8WJHhhM5q9dmj6BRci+io8Li\nR1FEHBQWvxWiTJR3mtuhdWiA9u01zS2B9ce+o6yJHcdgPRhDRM4gLAoacLygtQ4tgfbkJ19R7ERt\nrqlwkdtRRCQIhU1BN05pzOyNs3HqTmW1a8OuXd4HaIgEwufrP6RnNT0YQ0TOLGwKumLRimRkZrBx\n/0ZHxouOhrZtdRQtgfHt4g0cTPiFp29q63YUEQlSYVPQxpi/jqKdomluCZSnv/qI6jG3UCAh3u0o\nIhKkwqagwfl16CZNYOlS+PNPx4YU4ejxk/x0/COebNXL7SgiEsTCqqCdXodOSIDrr4dvvnFkOBEA\nHvpwPEmecrS/tprbUUQkiIVVQackpxAfHc+aPWscG1PT3OKkzEzLyHUv8/C1j7kdRUSCXFgVNECj\nlEaOrkO3agXffQfHjjk2pESw5z6bhYlJ5/GOLd2OIiJBLuwKuvHFjR1dhy5aFKpXh2+/dWxIiVDW\nwutpr3BHpUd15zAROa+w+y3RKMX74AyP9Tg2pqa5xQkfTF3M0YS1/LtHN7ejiEgICLuCLpNUhiIJ\nRVjxxwrwaBkPAAAgAElEQVTHxmzXDqZMgcxMx4aUCPT0jJe58cIHSIiLczuKiISAsCto8F5u5eQ6\ndEoKlCgBCxc6NqREmK9/XM8fiXMYekcft6OISIgIy4JunOLsOjTo4Rninwe+eJWGBe/igqQCbkcR\nkRARlgXd8OKGzN08l0yPc3PS7drBxIl6RrTkXNrKnfyWbxzDb7/H7SgiEkLCsqBLFChB6YKlWbpz\nqWNjVq/uvdRq9WrHhpQI0e+jt7kqphsVLizudhQRCSFhWdDg/Dq0MTqbW3Juw7ZDLIkazrDuD7sd\nRURCTNgWdJvL2jBmxRjHbvsJKmjJub7vvcclXE/tipe4HUVEQkzYFvT1l1zP4fTDLNi2wLExGzTw\nTnH//rtjQ0oY27M/ndnHXue1Drqtp4jkXNgWdJSJol+Nfryz6B3HxoyLgxYtvNdEi5xP/+GfUozK\ntKt1tdtRRCQEhW1BA9x21W1MXTeVXYd3OTamprnFF8dPePhy5ys83eRRt6OISIgK64JOTkimc+XO\nvL/kfcfGbNEC5s2DQ4ccG1LC0KMfTCEhJpG7mzVxO4qIhKiwLmiA/jX7M3zxcE56TjoyXlISXHst\nzJjhyHAShjwe+GDVK9x3zWMYY9yOIyIhKuwLulrJalxc+GImr5ns2Ji6q5icy0uf/YAncRfPdO7o\ndhQRCWFhX9DgPYoekjbEsfHatoVvvoGMDMeGlDBhLfx7wcv0qPAwMdHRbscRkRAWEQXd4fIOrPpz\nFSt3r3RkvNKloXx571q0SHYfTV3BoYKL+L8ePd2OIiIhLiIKOi46jjur38k7ac5dcqVpbjmTp6b9\nm9bF76FAvgS3o4hIiDNO3mnrvC9mjM3L18tu+8HtXDnsSjbdv4mk+CS/x1uxAlq1gk2bvLcBFZm+\nYAstJ1/F9sd+48LCyW7HEZEgYYzBWpvjpoiII2iA0kmlaXJJE0YvH+3IeFWqQEwMLF/uyHASBu4f\n9wb/KNBL5SwijoiYggYYUHMA7yx6x5H7cxujaW75r6Wr97I24SPe7fmA21FEJExEVEHXL1efKBPF\nnE1zHBlPdxWTU+4eMZQrYtpRuUwZt6OISJiIqII2xtC/Zn/H7s993XWwdSts3uzIcBKiNu84Rhpv\n8/bNj7gdRUTCSEQVNED3at1J3ZTK1gNb/R4rJgZat4bJzt0DRULQXe+OpGx0bRpcXtntKCISRiKu\noAvEFeCWK29h+OLhjoynae7Itu/ASWYefpWX2+iRkiLirIgraIB+NfvxwZIPOHHyhN9j3XADpKXB\nvn0OBJOQc++7X1IkthRdrqvrdhQRCTMRWdCVLqjElSWuZPzK8X6PlT8/NGwIU6f6n0tCy4kTlnHb\nXuafDXT0LCLOi8iCBhw9WUyXW0Wmf374LXGJJ7i3RSu3o4hIGIrYgm5dsTXbD21nye9L/B+rNcyc\nCSf8nzGXEOHxwLBfXqZ/tUeJMhH7YyQiARSxv1liomK465q7HLk/d/HicMUVMHu2A8EkJLw+djEZ\nhdbw7E3d3I4iImEqYgsaoHf13kxYPYG9x/b6PVbXrvDuuw6EkqBnLQye+zLdLn6A+Jg4t+OISJiK\n6IIulr8YbSq2YeTSkX6P1bs3LFsG8+c7EEyC2mfT17M/eTZv9ujjdhQRCWMRXdDgPVls6H+G4rEe\nv8bJlw+efRYee8x7hCXha+CU12h+wV0UTizodhQRCWMRX9C1StciOV8y09dP93usW2+FAwdgyhQH\ngklQmv3TLrYVGse7ve51O4qIhLmIL2hjDANqDXDkkqvoaHjxRRg4EE6edCCcBJ17Pn2L2vm7clGR\n4m5HEZEwF/EFDdClShfStqfx297f/B6rVSsoWhQ+/tiBYBJUfl5ziFWJwxnW4yG3o4hIBFBBAwmx\nCfS6qhfD/jPM77GMgZdfhmeegWPHHAgnQePu99/jsrgmXFW2vNtRRCQCqKCz3F3jbkYtH8XRjKN+\nj3XttVCzJgwZ4kAwCQrbfk9nAa/zZmfd1lNE8oYKOktKcgp1ytRhzC9jHBlv8GB45RU9RCNc3D30\nM0rFXU7TK6u7HUVEIoQKOpv+NfszZNEQrAPXSVWq5L1H90svORBMXHXgoIdpB1/hhRY6ehaRvKOC\nzqZp+aYcTj/Mgm0LHBlv0CD44APYts2R4cQlDwz7mqSEBHr8o4nbUUQkgqigs4kyUfSr0c+xp1yV\nLg133uktaglNJ07Ap5te5tG6j2KMcTuOiEQQ48R0rs8vZozNy9fLjX3H9nHJW5ewuv9qShQo4fd4\n+/dDxYqQmgqVK/ufT/LWwGE/8ObmnhwcvIaYqBi344hICDLGYK3N8b/wdQR9muSEZDpX7sz7S953\nZLzCheHRR+GJJxwZTvKQxwNvL32ZO694WOUsInnOp4I2xjQ3xqw2xqw1xvztTBljzM3GmOVZbz8Y\nY650Pmre6V+zP8MXD+ekx5nbgQ0YAEuWwI8/OjKc5JG3x/3KiaKLGHzTbW5HEZEIdN6CNsZEAUOA\nZkAVoJsxptJpm20A6ltrqwHPA84cfrqkWslqlCtUjslrJjsynh6kEXqshedn/5vOZe8hMS7B7Tgi\nEoF8OYKuBayz1m621mYAY4F22Tew1i601h7I+nAhUNrZmHlvQK0BDElz7k4j3bt7r4n++mvHhpQA\n+mLGVvYWm8xb3fu5HUVEIpQvBV0a2Jrt422cu4B7A9P8CRUMOlzegVV/rmLl7pWOjBcd7b0meuBA\nyMx0ZEgJoEcnvk7jIrdxQYFkt6OISIRy9CQxY0wjoBcQ8nd0iIuOo0/1PgxdNNSxMVu1guRkGD3a\nsSElAL5P28uW5I8Y1vMBt6OISATz5dTU7UDZbB+Xyfrc/zDGVAXeA5pba896g8tB2S4KbtiwIQ0b\nNvQxat7re01frhx2JYObDCYpPsnv8U49SKNrV+jSBRK0tBmU7vl4KNVLtqVCsYvcjiIiISg1NZXU\n1FS/xznvddDGmGhgDdAE+B1IA7pZa1dl26Ys8B3Q3Vq78BxjBf110Kfr/EVnGpZrSP9a/R0b88Yb\noW5dePhhx4YUh6xce4wrPkhh4d2zqZWiC9dFxH+5vQ7apxuVGGOaA2/inRL/0Fr7kjGmL2Ctte8Z\nY94HOgCbAQNkWGtrnWGckCvo7zd9z93f3M2v/X517E5Sq1ZBgwawdq33OmkJHg0eHsaOxGmse9aZ\nM/hFRAJa0E4JxYK21nLlsCt5q8VbNE5p7Ni4vXtDsWLw4ouODSl+2rHzJBe9fBkTe35Mm6vquh1H\nRMKE7iQWIMYY+tfs79j9uU8ZNAjeew+2/201X9wy4J0vKZZYUuUsIkFBBe2D7tW6k7opla0Htp5/\nYx+VKQN9+uhBGsHi4EHL5L0v868bQv4CBBEJEypoHxSIK8AtV97C8MXDHR33scdg4kTvmrS465Fh\n35K/0An6NGjtdhQREUAF7bN+NfvxwZIPOHHyhGNjJid7S1oP0nDXzp2Wj357kQdrP0KU0Y+EiAQH\n/TbyUaULKnFliSv5ctWXjo47YAAsXgwLFjg6rPgoPR3qPvg2RUodYGDrm92OIyLyFxV0DvSv2d/R\n+3OD90Ea//qXHqThlpsfXcC2S17gh3vGExcd53YcEZG/qKBzoHXF1mw7uI2lvy91dNwePWDvXvjm\nG0eHlfN484PdTIrrwqiOH1C+aIrbcURE/ocKOgdiomK4u8bdjl9yFR3tvR768cf1II28suCnTB5Z\ncAu3XXMLXa9u43YcEZG/UUHnUO/qvfly1ZfsPbbX0XFbt/beVeyTTxwdVs5g1y5oNvg5Lrs8g2Gd\nn3M7jojIGamgc6hY/mK0rtiakUtHOjruqQdpPPUUHD/u6NCSTXo6NOk7Ha5+n1l3jSEmypfnxYiI\n5D0VdC4MqDmAof8Zisd6HB23bl2oXh3ecXYGXbLp8/AW1l9xG5N7jqFkgZJuxxEROSsVdC7UKl2L\n5HzJTF8/3fGxBw/2Hknv3+/40BHv/RHpfG5v4olGD9Iwpb7bcUREzkkFnQuBuj83QOXK0LYtvPKK\n40NHtLQ0uPfrh7muakmeavyI23FERM5LT7PKpWMZxyj7RlkW3rGQ8kXKOzr2tm1QrRr8/DOULu3o\n0BFp1y6ofNM4Yps/weoHFlM4n57xKSJ5R0+zymMJsQn0uqoXw/4zzPGxy5TxPo7yX/9yfOiIk54O\nrXqu5nijAUy7bbzKWURCho6g/bBx30Zqvl+TLQ9sITE20dGx9+2DihVh3jyoVMnRoSNK3wFH+KxA\nLV7r/AB3XtPb7Tgirrv44ovZvHmz2zHCUrly5di0adPfPp/bI2gVtJ/ajGlD+8vac0f1Oxwf+9//\nhoUL4Utnb/8dMUaMsNw/tzttWsXySacRGJPjnw+RsJNVFm7HCEtn+3+rKW6XnDpZLBB/4QcM8J7c\ntHCh40OHvbQ0uG/0cC686mfeb/+OyllEQo4K2k9NyzflUPohFm5zvkUTErzr0I8+qgdp5MSuXdCm\n73+Iuv5ppnQf7/jyg4hIXlBB+ynKRNGvRj+GLHL2KVen9OgBe/bA1KkBGT7spKdD+257SW/XmRE3\nDqNi0YpuRxIRyRUVtANuu+o2pq6byq7DuxwfOybG+yCNgQP1IA1fPPCghw3VunNbnRvpWLmj23FE\nJAC2bt1KUlKST0uLmzdvJioqCo/He+fHli1bMnr0aABGjRpFvXr1AprVHypoByQnJHNT5Zt4as5T\nAVmLbtMGkpLg008dHzqsjBwJ47a/RMrl+3nlhpfdjiMiAXLRRRdx8OBBn88tyb7d1KlT6d69+xn/\nLNiooB3yyg2vsHzXcu6ddq/jJa0HaZxfWho88NZsTJ23+bLr58RGx7odSUTELypohxTKV4iZt85k\n0Y5F3DPtHsdLum5duOoqGDrU0WHDwq5d0L77dqI63crYzp9QOkm3XxMJRSkpKbz66qtUq1aNggUL\n0qdPH/744w9atmxJUlISTZs25cCBA3+btm7UqBFPP/00//jHP0hKSqJ58+bs3XvmRwI3atSIESNG\nnPHPHnnkEerXr8+hQ4cAGDFiBJUrV6Zo0aK0aNGCLVu2BGbHz0IF7aBC+Qoxs/tMlvy+hP5T+zv+\ntKvBg+Gll+DAAUeHDWnp6dDxpgxiunXlwXr9aHJJE7cjiYgfJkyYwHfffcfatWuZPHkyLVu25KWX\nXuLPP/8kMzOTt956C/j71PSYMWMYNWoUu3fv5sSJE7z66qs+v6a1lj59+rBixQpmzZpFwYIFmTRp\nEi+99BITJ05k9+7d1KtXj27dujm6r+ejgnZYUnwS02+dzvJdy+n/jbMlXaWKdz1aD9L4rwcfhN8r\nD6TKpQV5ot4TbscRET/dc889XHDBBVx44YXUq1eP2rVrU7VqVeLi4rjxxhtZunTpGb+uV69elC9f\nnvj4eG666SaWLVvm0+ulp6fTrVs39u/fz5QpU4iPjwdg+PDhDBw4kIoVKxIVFcXjjz/OsmXL2Lp1\nq2P7ej4q6ABIik9i+i3T+eWPX7j767sdLelBg+Ddd2HHDseGDFkjR8KEVRPIuHQ8n9w4miijv84i\n/jLGmbfcKlGixF/vJyQk/O3jw4cPA/xtGbFkyf8+3z0xMfGv7c5n/fr1TJ48mWeeeYaYmJi/Pr95\n82buu+8+ihQpQpEiRShatCjGGLZv356r/coN/UYLkILxBZl2yzRW/rmSvlP6OlbSF10Ed9yhB2mk\npcFDg9dx4oa7GN/lc4omFnU7kkhYsNaZt0Bz6uzrypUrM3LkSJo3b87atWv/+nzZsmUZPnw4e/fu\nZe/evezbt4/Dhw9Tp04dR17XFyroADpV0mv2rKHP5D6OlfTjj8OECbB8uSPDhZxdu6BDl2MU6t2J\n55oMolbpWm5HEpE8lpMTcc+3bZcuXRg8eDDXX389GzZsAKBv374MHjyYlStXAnDgwAHGjx+f+8C5\noIIOsAJxBZh6y1R+2/cbd0y+g0yP/3cbKVIE3nwTmjSBV1+FkycdCBoi0tOhc2co1rM/11aowt01\n7nY7kog45PSj4nMdJWf/s/MdTfuybY8ePXj66adp0qQJW7ZsoX379jz++ON07dqVwoULU7VqVaZP\nn+7LbjhGT7PKI0fSj9B6TGvKFSrHh20/JDoq2u8xf/sN7rwT9u+HDz/0XoYV7gYMgHmHR5BR81XS\n+qRRIK6A25FEQoqeZhU4eppViMofl59vbv6GrQe3cvvk2x05ki5fHr791ltaTZt6bwd67JgDYYPU\nyJEwZdEytld+jC9v+lLlLCJhTQWdhxJjE5nSbQrbD27ntkm3OVLSxkCvXvDzz94j6mrV4PvvHQgb\nZNLS4JGn9mNv6sTbLd7i8mKXux1JRCSgNMXtgqMZR2k3th3F8xdnVPtRxETFnP+LfDRpkveIukUL\n7/XShQs7NrRrdu2CGjUtpR/sQI2KpRnSMjBPDhOJBJriDhxNcYeBxNhEJnedzO4ju+nxVQ9Oepw7\ny6tdO1ixAqKj4Yor4KuvHBvaFadOCqvU6zVsgR281vQ1tyOJiOQJHUG76FjGMW4cdyOF8xXmkw6f\nOHokDTBvHvTu7S3qIUPgwgsdHT5PDBgAS/bM47drOpHWO41yhcu5HUkkpOkIOnB0BB1GEmITmNh1\nIgdOHOCWCbc4eiQNUK+e91rpyy/3rk1/+GHe3EDAKSNHwrR5O9lcoxsftftI5SwiEUVH0EHg+Mnj\ndPy8I/lj8/Nph08D8qjE5cu9R9MFC8J770GFCo6/hKPS0qBVm5OUH9SUppX+wbONnnU7kkhY0BF0\n4OgIOgzli8nHhJsmcDTjKN2+7EZGZobjr1GtGixc6H3YRp063hPIgu0GJzt3wvvvQ+vWcMMN0GDQ\n0xTIH8UzDZ5xO5qISJ7TEXQQOXHyBJ2+6ERsVCxjO40lLjouIK+zcSP07Qt//gkffADVqwfkZc7L\nWli1ynvm+aRJsGYNNGvmPdHNXPY1j3x/N4vvXEzx/MXdCSgShnQEnXObN28mJSWFkydPEhV19uNa\nHUGHsfiYeMZ3Hk+mzaTL+C6kZ6YH5HVSUmDGDLjvPu/lWI89BkePBuSl/ubkSZg7Fx56CCpWhObN\nYft2eO457+VUg4asZlq+Hgz49jbGdRqnchYRv4waNYp69er5PY5TD+fICRV0kImPieeLzl9graXz\nF50DVtLGQM+e3hucbNninQKfMycgL8Xhw96He/TsCSVLwv33e9fCP/8cNm/OOsO82gp6TulG/ZH1\nqVi0Ir/d+xvXXXRdYAKJSMSw1p63XD0e5x4J7CQVdBCKi47j886fE22i6fR5J06cPBGw1ypRAsaM\ngddf9xZo796wb5//4/7+u/dktNatoVQp7zOsa9aEJUu8b4MGwdVXw/Jdy+j0eSeu//h6ripxFb/d\n+xtP1n+SQvkK+R9CRELOtm3b6NixI8WLF6dYsWLce++9AIwYMYLKlStTtGhRWrRowZYtW/76mqio\nKIYPH07FihUpUqQIAwYMAGD16tXcfffdLFiwgIIFC1KkSBEAevXqRb9+/WjVqhUFCxYkNTWVqVOn\nUr16dQoVKkS5cuX4VzA809dam2dv3pcTX6WfTLcdxnWwrT5tZY9nHA/46x04YG3//taWKmXt+PE5\n+1qPx9oVK6x94QVra9e2tnBha7t2tXbMGGv37//79mnb0mybz9rYC1+90L7242v28InDzuyEiJxT\nMP8ezszMtNWqVbMPPfSQPXr0qD1x4oSdP3++nTRpkr300kvtmjVrbGZmpn3hhRfsdddd99fXGWNs\nmzZt7MGDB+2WLVtssWLF7IwZM6y11n700Ue2Xr16//M6t912my1cuLBdsGCBtdbaEydO2O+//96u\nWLHCWmvtL7/8YkuWLGknTZpkrbV206ZNNioqymZmZp4z/9n+32Z9PuedmZsvyu1bMP/FCFbpJ9Nt\nx3EdbctPW9pjGcfy5DV/+MHaSpWsvfFGa7dvP/t2GRnWpqZa++CD1pYvb23ZstYOGGDtrFnWnjhx\n5q/5ccuPtvknzW2Z/ytj31r4lj2afjQwOyEiZxTMv4cXLFhgixcv/rcibNGihR0xYsRfH2dmZtrE\nxES7ZcsWa623oH/88ce//vymm26yL7/8srX27AXds2fPc2a5//777YMPPmitda+gnb11lTguNjqW\nMR3HcMuEW+gwrgMTukwgX0y+gL5m3bqwbBm88IL3EZYvvAB33AFRUd715BkzYPJk+OYbKFcO2raF\n8eO969hnW+r5ftP3PDf3OdbvXc/AfwxkYpeJxMfEB3Q/RCTnzL+cORnKPpPzM8W3bt1KuXLl/nam\n9ObNm7nvvvt46KGHvGNnrStv376diy66CIASJUr8tX1iYiKHDx8+52ud+rpT0tLSePzxx1mxYgXp\n6emkp6fTuXPnHO+Dk1TQISA2OpbPOn7GrRNupf3Y9kzsOjHgJR0fD88+670Pdp8+MHq098SuefO8\n11G3awfPPw+n/R3/H9ZaZm+czbNzn2XbwW38s94/6V61e0BuxCIizshNsTrloosuYsuWLXg8nv8p\n6bJly/Lkk0/SrVu3HI95thPETv/8zTffzL333suMGTOIjY3lgQceYM+ePTl+PSfpJLEQERMVwycd\nPiE5IZl2Y9txLCNvHvx85ZUwf773kZY9esDWrTBzJvTvf/ZyttYyff106o6oS7+p/eh9dW/WDFjD\n7VffrnIWkbOqVasWF154IY8//jhHjx7lxIkT/Pjjj/Tt25fBgwezcuVKAA4cOMD48eN9GrNEiRJs\n27aNjIxz3wDq8OHDJCcnExsbS1paGp999tn//Ll14dpxFXQIiYmKYfSNoymaUJS2Y9uy52je/Osu\nOtpb0F26QKFznFxtrWXymsnU+qAWD818iHtr38vKfivpXq274w8CEZHwExUVxZQpU1i3bh1ly5bl\noosu4vPPP6d9+/Y8/vjjdO3alcKFC1O1alWmT5/+19edfjSc/ePGjRtTpUoVSpYsSfHiZ7+vwtCh\nQ3nqqacoVKgQzz//PF26dDnrmHlFdxILQSc9J+n/TX/GrBhD/rj8VClWxftW/L//LZwv7x4E7bEe\nvlr1Fc/Pex5rLU/Vf4obL7+RKKN//4kEG91JLHCcvpOYCjqEWWvZenArv/7xK7/u9r6t3L2SlbtX\nkhSfROVilf9W3k5eX5zpyeSLlV/w/NznSYhN4Kn6T9GmYhtX/qUpIr5RQQeOClrOy2M9bD2w1Vva\nf/xvcRfOV5gqxatQ+YLKf5V25WKVc1TcJz0nGfPLGF6Y9wJFEorwVP2naF6huYpZJASooANHBS25\n5rEethzY8rcj7lW7V5GckPxXWZ864q5crDJJ8Ul/fX1GZgajfx7N4HmDKZ1UmqfrP03jlMYqZpEQ\nooIOHBW0OM5jPWzev/lvR9yr/lxF0YSiVClehQrJFfh63deUTy7PU/WfosHFDdyOLSK5oIIOHBW0\n5BmP9bBp/yZ+/eNXVv+5mrpl6+oBFiIhTgUdOCpoERHJNRV04Oh50CIiIhFAd48QEYkg5cqV04md\nAVKuXDlHx/NpitsY0xx4A+8R94fW2pfPsM1bQAvgCHCbtXbZGbbRFLeIiESUgE1xG2OigCFAM6AK\n0M0YU+m0bVoA5a21lwJ9gXdzGiQcpKamuh0hoLR/oSuc9w20f6Eu3Pcvt3xZg64FrLPWbrbWZgBj\ngXanbdMO+BjAWvsTUMgYU4IIE+5/ybR/oSuc9w20f6Eu3Pcvt3wp6NLA1mwfb8v63Lm22X6GbURE\nRMRHOotbREQkCJ33JDFjTB1gkLW2edbHjwM2+4lixph3gTnW2nFZH68GGlhrd502ls4QExGRiJOb\nk8R8ucxqEVDBGFMO+B3oCnQ7bZvJQH9gXFah7z+9nHMbUEREJBKdt6CttZnGmAHATP57mdUqY0xf\n7x/b96y1U40xLY0x6/FeZtUrsLFFRETCW57e6lNERER8E5CTxIwxzY0xq40xa40xj53hzy8zxvxo\njDlujHkwEBkCyYf9u9kYszzr7QdjzJVu5MwNH/atbdZ+LTXGpBlj6rqRM7fOt3/ZtqtpjMkwxnTI\ny3z+8uH718AYs98YsyTr7Uk3cuaWL98/Y0zDrL+fK4wxc/I6oz98+P49nLVvS4wxvxhjThpjCruR\nNad82LckY8xkY8yyrH27zYWYuebD/hU2xkzI+v250BhT+byDWmsdfcNb+uuBckAssAyodNo2FwDX\nAM8BDzqdIZBvPu5fHaBQ1vvNgYVu53Zw3xKzvX8lsMrt3E7uX7btvgO+Bjq4ndvh718DYLLbWQO4\nf4WAX4HSWR9f4HZuJ/fvtO1bA9+6ndvB791A4MVT3zdgDxDjdnYH9+8V4Kms9y/z5XsXiCPo897Y\nxFr7p7V2MXAyAK8faL7s30Jr7YGsDxcSOteE+7JvR7N9WADw5GE+f/ly0x2Ae4DxwB95Gc4Bvu5f\nqJ6s6cv+3Qx8aa3dDt7fNXmc0R++fv9O6QaMyZNk/vNl3yxQMOv9gsAea22odIQv+1cZmA1grV0D\nXGyMKXauQQNR0L7c2CSU5XT/egPTAprIOT7tmzGmvTFmFTAFuD2PsjnhvPtnjCkFtLfWDiP0iszX\nv5vXZk0jfuPTNFvw8GX/KgJFjDFzjDGLjDHd8yyd/3z+3WKMScA7O/dlHuRygi/7NgSobIzZASwH\n7sujbE7wZf+WAx0AjDG1gLJAmXMNqqdZBZAxphHeM9r/4XYWJ1lrJwITjTH/AJ4HbnA5kpPeALKv\nH4VaSZ/PYqCstfZo1j30J+IttXARA1QHGgP5gQXGmAXW2vXuxnJcG+AHa+1+t4M4qBmw1Frb2BhT\nHphljKlqrT3sdjCHvAS8aYxZAvwCLAUyz/UFgSjo7Xj/ZXBKmazPhQuf9s8YUxV4D2hurd2XR9n8\nlaPvnbX2B2PMJcaYItbavQFP5z9f9q8GMNZ4n8d3AdDCGJNhrZ2cRxn9cd79y/7Lzlo7zRgzNMy+\nf4hDrSwAAAGaSURBVNuAP621x4Hjxpi5QDW864PBLic/f10Jnelt8G3fegEvAlhrfzPGbAQqAf/J\nk4T+8eVn7xDZZhyz9m/DOUcNwGJ5NP9dLI/Du1h++Vm2fQZ4yO0Ffqf3L+sbtQ6o43beAOxb+Wzv\nVwe2up3byf07bfuRhNZJYr58/0pke78WsMnt3A7vXyVgVta2iXiPVCq7nd2p/cvarhDeE6gS3M7s\n8PfuHeCZrPdL4J0yLuJ2dgf3rxAQm/V+H+Cj843r+BG09eHGJllPuvoP3hMBPMaY+7J+iIJ+KsOX\n/QOeAooAQ7OOxDKstbXcS+0bH/etozGmB5AOHANuci9xzvi4f//zJXke0g8+7l8nY8zdQAbe718X\n9xLnjC/7Z61dbYyZAfyMd/rwPWvtShdj+ywHfz/bAzOstcfcyppTPu7b88BHxpifs77sURsaMzu+\n7t/lwChjjAfvlQZ3nG9c3ahEREQkCOlpViIiIkFIBS0iIhKEVNAiIiJBSAUtIiIShFTQIiIiQUgF\nLSIiEoRU0CIiIkFIBS0iIhKE/h+XXcHSF61cxAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -2030,7 +2033,7 @@ { "data": { "text/plain": [ - "" + "" ] }, "execution_count": 56, @@ -2041,7 +2044,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfAAAAFwCAYAAABHHCk+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xdc1fX+wPHXBxQR90ABUdy4UHBSmlKZq2GObmmWVtrO\n1m3e3y29dW+7rLSulTezTMtRZomZGW5xgQNQwQ0OcKCyx/n8/vhymGd8D+IA38/Hg0fnfL6fz/l8\njtfrm89WWmuEEEIIUbm4XekGCCGEEMJ1EsCFEEKISkgCuBBCCFEJSQAXQgghKiEJ4EIIIUQlJAFc\nCCGEqIScBnCl1Cyl1Eml1E4HeT5RSsUrpaKVUsHF0ocopfYopfYppV6qqEYLIYQQ1zozPfCvgcH2\nHiqlhgJttNbtgEeA/xakuwHTC8p2BsYopTpcdIuFEEII4TyAa63XAWcdZBkOzCnIGwnUU0o1BXoD\n8Vrrw1rrXGB+QV4hhBBCXKSKmANvBhwt9j6xIM1euhBCCCEu0qVYxKYuwWcKIYQQophqFfAZSUDz\nYu/9C9I8gBY20m1SSsmh7EIIIa4pWutyd3rN9sAV9nvWvwD3AyilQoFUrfVJYAvQVikVoJTyAO4p\nyGuX1vqa/Hn99deveBvk+8v3l+8v312+/+X9uVhOe+BKqe+BMKCRUuoI8DpG71prrb/QWi9TSg1T\nSiUA6cADBcE4Xyn1JLAC4xeFWVrruItusRBCCCGcB3Ct9VgTeZ60k74cCCxHu4QQQgjhgJzEdhUI\nCwu70k24ouT7h13pJlxR1/L3v5a/O8j3v1iqIsbhK4JSSl8tbRFCCCEuNaUU+jIsYhNCCCHEVUQC\nuBBCCFEJSQAXQgghKiEJ4EIIIUQF0xrWrYOHHoK0tEtTR0WcxCaEEEII4MwZmDMHvvjCCOKTJl26\nuqQHLoQQQgC5ufDtt3DXXbB2rflyFotmVUQe48ZB69awbRvMnAmxsfDcc1C79qVprwRwIYQQ17SM\nDPj0U2jbFmbPhuuvh7FjYdw4OHbMfrl8i4VXvvmZOs/34pYVTcnt+iUJ+y18+y3ccAOoS3y1lwRw\nIYQQlVpMDHz2GWRnu1bu7Fl4801o1Qr++gt+/BH+/BOefRbi4qBFC+jaFd5/3+idW+Xm5fPsVz9S\n+/kQPo7+F092+webn1jJkUb/446f+7HjxI6K/YJ2yEEuQgghKqXjx+H11+HnnyEoCE6dgu++M147\nknw2nbumvcfmqAxGN57Cqy940bGj7bz79sHTT8Phw/DBR3n8cnA+/4v/N9Ut9fh7z3/y2phhuLkZ\nXW2LtvDV9q/4v1X/x7iu45gaNpU6NerYbYcc5CKEEOKK+vVXGDXK6LVeDunp8K9/GYG6fn0jyK5c\nafScb7oJPvgALJay5fLyLTz2+Xf4/acDh9P2MWh0Itt79iK34U67dbVvD8uWwT2vrmT48q58v3cm\nU0M/4fwHG5ly762FwRvATbnxcI+HiXk8hrNZZ+n0WScWxi6skJvHbLrS16kVu1ZNCyGEqDyOHdN6\n9Git27TR+rXXtPb21nrxYufl0tO1fu45rYcM0TopyXx9ubkW/eWXFu3np/WYMVofPFg2z4EDWvfr\np3VYmNaHDhWlf7V8k671TB/t9UxP/d/f1muttbZYLHpO9Bzd+N3GetrGadpisZT5vMOph/WoH0bp\nltNa6p/jfraZx541h9bo3l/21scvHLf5vCDulT9uXkzhivyRAC6EEJVDfr7Wn32mdePGWr/6qtYZ\nGUb6li1at2ih9SuvaJ2XZ7vsqlVat26t9dixWr/+utbNmmm9aZPzOpdsjNG1nu6tGz5xh161/rzD\nvHl5Wr/9ttG+t6Yn6lbPjdNuL/jpidNn69y8/DL5E04n6N5f9tZDvxuqT1w4obXWOjM3U7+5+k3d\n8J2GespfU3RGTobzRtrgKOBfbACXOXAhhBCm7d4Njzxi7HH+4gvo0qXk85QUuPtuqF4dvv8eGjUy\n0s+dgxdfNIajP/8cbrvNSF+61Djs5L33YPz4svXl5OZz57sfsPzCe4z1eQOvNlFsStzI0jFLCagf\nYLedWmveWPoNb0a+QC/3h1n0zCv4NLS/nys3P5epq6fyv6j/8dx1z/Hfrf8lqGkQHw76kFYNWrn6\nx2TKxc6BX/Get/UH6YELIcRVKz9f6//8x+jVfv658d6e3Fyt//53rVu10nr7dq1//VVrf3+tJ03S\nOjW1bP6YGK3bttX62WeNslbLt+zVtZ8J1fWfvlGv3XVQa230aKdtnKZ93/fV64+st1l/SnqKHvnD\nSB30WZDecWKHS98z4mCEvu3723R4fLhL5coD6YELIYS4lM6cgfvvN7Zd/fAD+PubK/ftvEwm/vAy\nDc7ezPev38FNNzmu4557jNdzv7fw8KxPWHL2TUY1msK85x6nmnvJNdfh8eGM/3k8Hw7+kHFdxxWm\nL4tfxqSlkxjbZSxv3PQGntU8Xf26l83F9sAlgAshhLBr61bjZLKRI+Htt42hcTMOpR5ixA8j8K3Z\nkm0nNzLj1umM7jTaYZm8PHjipZN8mXo3dermsfj+2dwc0tZu/pjkGG6fdztjuozhlRte4YUVLxCe\nEM7sO2cT1jLMhW95Zcg2MiGEEHbl5xvz1a7S2pirHjrUOMjkgw/MB+8/9v9B6FehjO82nt/uW8yK\n+37nqfCnmLdrnsNyO5K3Ee7fi8eGDiDl3dUOgzdA5yadiZwYyZoja/D7wI/03HR2PLqjUgTviiA9\ncCGEqKJOnoRhw4zzuefPB3d3c+XS0uDRR2HnTli0CNq1M1dOa82769/l48iPmTdqHgNaDih8tjt5\nN4O+HcTbA9/m/m73lyk7b9c8Ji+fzMzbZjKy40hzFRbIzssm6kQUof6hLpW70mQIXQghRBkJCTB4\nsHGe99q10KmTcd63s/O5t8cfo/+0e7nO8hJLPhiCl5e5+i5kX+CBJQ9w9PxRFv1tEf51y06Ux6XE\nccu3t/CvG//FgyEPApBvyecfq/7BjzE/suSeJQQ1dXKMWhUiQ+hCCHGVu9x9k61boX9/eOklmDoV\nfvrJCOJvveW43J9RCfSZ2Y/OPoFEt76PPee2m6rvyLkjhM4KpWHNhqyZsMZm8Abo6N2RVeNXMSVi\nCjO3zuRc1jnumH8Hm5M2s3nS5msqeFcE6YELIcQlkpdnrN5OTzeCqNtl6DKtWGH0ur/4Au68syj9\n2DHo2xdeew0eeKBsuYVrd3L30qGM8X2d7559mEWxi3h6+dOsf3C9w/3WCWcSGDhnIJP7TOa5654z\n1cb9Z/Zz85ybybPkMaLDCD4c/CHV3U1OsFchMoQuhBBXofx8mDDBmIfOyjLO6J4yxXk5reGdd2Dg\nQOjZ07U658417p9etAj69Sv7fO9eGDAAZs2CW28tSv/vsvU8vnokk9t+yrRJfytMn7ZpGl9s+4L1\nD66nQc0GZT4vJjmGwd8N5rUBr/Fwj4ddauuRc0fYdmwbIzqOcKlcVSIHuQghxFUmP1/rBx/U+sYb\njXO/jx83DjJZutR52VdeMY4abd5c6+Rk83U++94W7d/confvdpxv40bjMBbr8aVvzAvX6qXG+s35\ny23mfyb8GT3g6wE6KzerRPq2Y9u0z/s++rsd35lvpCiBizzIRebAhRCiAmkNTz5p3JD1yy/g5QU+\nPsZd0w8+aCwus+ett2DJEoiMhLFj4d57jZ68M6Pf+5SP0nvx0Bef0Lmz47yhofD11zB8ONz71nxe\nixrP5/2X8I+7B9vM//6g92nk1YgHljyARRtXfG04uoEh3w3hs2GfcW/Xe503UFwaFxP9K/IH6YEL\nIa4SLlw4Vabc009r3aeP1ufOlX3+2Wdad+midVpa2WfTpxu3ellv58rNNW7Ueu01x3U+/vlc7f53\nf/39hlXa+11vvemoiZtBtNYPfzpPu7/gpxeudX7UaEZOhr7uq+v0y3+8rP888Kf2ftf7shw1WtUh\nt5EJIUTFyc3V+tZbtf6//3OtnMWi9Ysvat2jh9Znz9rPM2GC1vfcU/KXhNmzjSHzAwdK5j9+3Lit\na9ky2583de4yrV5son9av0trrfXi2MU64KMAfTrjtMO2rty/Unu/6613HN9p9uvplPQU3e6Tdrrh\nOw11xMEI0+WEfZclgANDgD3APuAlG8/rA4uBHcAmoFOxZ4cK0qOAzQ7quKR/UEIIYcbkyVrfdJPW\nPj5ab9hgvtzdr/+sO4dc0Kcdx06dkaF19+5af/SR8X7hQqOuuDjb+des0bpJk5J3W2ut9ee/rtfq\nRW89c1nJRj4T/oy+/fvb7V5juf3Ydu39rne5gnDiuUQdkxzjcjlh2yUP4Bh7xROAAKA6EA10KJXn\nXeCfBa8DgZXFnh0AGpio51L+OQkhhFMzZ2odGGj0oBcu1LpdO2MRmjOPfz5X85qbHv/jo6bqOXRI\n66ZNtZ46VWtvb+PGLkfef1/rnj21zipYR7Zw7U6tXmyi35xfdhg7Oy9b9/myj35v/Xtlnh04c0D7\nfeCnF8QsMNVOcWldbAA3s4itNxCvtT6stc4F5gPDS+XpBKwqiMJ7gZZKKe+CZwo5MEYIcZVbvRr+\n+U/jfur69WHUKGMb16uvOi43d9V2Pj/4DLPDVvNn4q+sOrjKaV0BAfDa59v514mezP7xFCEhjvM/\n9xy0aAHPPgurdxzkb0uG8kSbafzj7iFl8nq4e/DD6B94b8N7rD+yvjA9JT2Fwd8N5pV+rzi9VERU\nDmYCazPgaLH3iQVpxe0ARgIopXoDLQDrUTwa+EMptUUpNenimiuEEBXvwAG4+25jH3Xxc7+nT4cF\nC4zgbkvMoWTGh4/gucDPGX9jP2beNpOJv0wkLSfNYX2nM07z3pFRhPasybyzzzptn1Lwv//B8rXJ\n3Dx7EHf5vMqnD4+xmz+gfgCz7pjFmEVjOJVxirScNG79/lbu6nQXT/Z+0ml9onKoqJ7x20ADpdR2\n4AmM+W7r5oe+WuvuwDDgCaWUjeMFhBDi4lgs5St3/jzccYfR+x44sOSzhg1h5kzj5LK0UjE5IyuX\nvtPuIrTmfbz/4CgAhrUbRv+A/ry88mW79eVb8hmzaAyjO47m93HLWX9kPcvilzltZ606eXg/djdD\nWtzF/Ocfd5r/tva3MabLGO776T7+tuBvdG7SmTdvetNpOVF5OD2JTSkVCkzRWg8peP8yxrj9Ow7K\nHASCtNZppdJfBy5orT+0UUa//vrrhe/DwsIICwtz4asIIa5VmZnG/ubJk+Ghh8yXy883jhv194fP\nPrN/0ccDD4Cnp3G9plXXl5/kZPZhkt5fQjX3or7Q2cyzBH0exNyRc0vcxmX16p+vsilxEyvuW0E1\nt2r8eeBPHljyALsf303dGnXttvXllS+z/fh2wu8Nx93N3LViufm53DTnJurWqMvPd/98TR5XejWJ\niIggIiKi8P3UqVPRl/IkNsCdokVsHhiL2DqWylMPqF7wehIwu+C1F1C74HUtYD0wyE49l2KNgBDi\nGvDUU1r372+s1j5zxny5B17apQeEWXROjuN8qanGNq8VK4z34z/+Sns8F6gPn0y1mf+XPb/o1h+3\n1mnZJTd8L45drJt/2FyfTDtZIn3ikon60aX2F8At2bNEN/+wuU5JT3H+pUrJzsvWefl5LpcTlx6X\ncRvZXiAeeLkg7RHg4YLXoQXP44CFQL2C9FYFAT8K2GUta6eOS/1nJYSogsLDtW7RwgjcjzxibAMz\nY+r3yzRT0HO3mDjfVGv9++9GEP/wxw1aveitl23e4zD/uMXj9NPhTxe+j0uJ043fbawjEyPL5D2b\neVY3+6CZza1dCacTtPe73nrj0Y2m2ikqj8sSwC/HjwRwIYSrkpO19vPT+q+/it43bqx1jJOtyvuO\nntJuL/jpMbNe1u0/ba+z87JN1XffY8c1zzXTr3/3q9O8pzNOa9/3ffWaQ2v0+azzusP0DvrLbV/a\nzb9kzxLd9pO2Oj2naN9aRk6G7vZ5N/3Jpk9MtU9ULhcbwOU2MiFEpaS1MX/doYNxe5fVxx/Db7/B\n77/bntO2WDQt/34PjT392P6fjxg2dxiD2gzimdBnnNSnuW3ucOrnBDH3wX+bauPPe37mhT9eoEuT\nLnh7efPF7V84zH/PwntoUa8F797yLgAPLXmIjLwMvh/5PcreBL2otC72NjLZny2EqBDjxsGnn16+\n+r76Co4cgTfeKJn++OOQlGRcJGLLU1/M4yS7WPnyfwD4YNAH/HvtvzmVccphfXN2zCEx7TBfj3/d\nYb7i7uxwJ738epF0PolPhzr/w/lk6Cd8s+MbtiRtYdb2WWxM3MiXt38pwVvYJD1wIcRFW7oUnngC\nsrON27bq1DFXTmv7K78d2bcP+vaFNWugY8eyz//4Ax59FGJjoUaNovQtexPp87/uzBkUzribexSm\nTw6fTL4lnxm3zrBZX+L5RLrP7M6K+1YQ7BPsUlvzLHnkWfLwrOZpKv/cnXOZsnoKqVmprJmwho7e\nNr6gqBKkBy6EuKLS0+Gpp4wrKgcONN8LT0+H4GDYtMm1+nJzjd7+lCm2gzfALbdAly7w0UdFaXn5\nFob89wFurPVUieAN8PqA11kQu4DdybvLfJbWmom/TOSp3k+5HLwBqrlVMx28AcYGjWVAwAA+v/Vz\nCd7CIemBCyEuyssvw9Gjxilme/dCv35GL7xePeflvv/eCMK//26+vv/7p2b7NsVvvznuve/fD336\nwM6d4OcHd703nfBj33LqnfV4elQrk/+TyE/4dd+v/D7u9xJD1l9u+5KZ22ay8aGNso9aVCjpgQsh\nrpiYGOOIzw8+MN4HBsKwYcZCMkfi4mDWLFi3DvbsgQ0bzNU3d9V23k5vy8f/Pe906L1NG5g0CV55\nBcK37GXR6Sksuvdbm8Eb4LGej3Hk3JESp6IdSj3Eq6te5Zs7v5HgLa460gMXQpSLxQIDBsDYsfDY\nY0XpCQnGqWjx8dCgQdlyWhtD7XfcAU8/DV98AYsWOe+F5+VbaPB8PzwaJ/H8gEd49QYnt4wAFy5A\nYMdczozoyx0B4/nx7084zL8sfhnP/v4sux/bjbubOwPnDGRI2yG82PdFp3UJ4SrpgQshrohvvjEW\nrT38cMn0tm1h+PCS88/F/fgjnDplLHoDmDDB6IVv3Oi4vsf/+y1a5bF6UjjTNk3jQvYFp22sUwdu\n/sc06nvWY/5zzs8PH9p2KK3qt+KzLZ/x2ZbPyMrL4vnrnndaTogrQXrgQgiXnT4NnTpBeDh07172\n+cGDxlWc+/ZBo0ZF6RcuGHPeP/xgrCK3+uILWLwYli+3Xd+R5HO0er8jXw38mQcG9WbMojEENw3m\npX4vOWznibQTdPmsCxse2kD7Ru1NfbfYlFgGzDbOMF//4HrT5YRw1cX2wCWACyFcNnEi1K4N06bZ\nz/PII0bw/s9/itL+/ncj+H/9dcm8OTnGNZ7z58N115X9rJ6vPs/53FT2vTcLgJjkGG6acxMHJh+g\nlkctu214cMmDNKrZiPcGvefK12NqxFT86/rzUHcXbkYRwkUSwIUQl9W6dXDPPcYe67r2L8/iyBEI\nCTGGx729YfduuOkm479NmpTNb68X/sumWO78eQC7Ho2hc8uign9b8Df6NOvD89fbHuLekrSFO+bf\nwZ4n9lDP08mSeCGuAJkDF0JcNnl5xoK1jz5yHLwBWrQwAv177xkL1554wti7bSt4gzEXHhdXci7c\nYtFMmD+ZEQ3/WSJ4A/xf///j/Y3vk5GbUeaztNY8vfxp/n3TvyV4iypLArgQwrS5c42V5aNHm8v/\nyivGdrGPPoK0NGNY3R4PD3j1VZg6tSjtxdmLyHA7ydxnyi5A69q0K9f5X8eX274s285dc8nJz2FC\n8ARzDRWiEpIhdCGEKTk5xsUhs2dD//7myz02OZ3/Loli04/96NPHeR3WufB2nTLwebMj7/f9hmfu\nDLOZP+p4FLfNu439k/cXnnaWlpNGh+kd+PGuH7m++fXmGyrEZSZD6EKIy+Lrr43g6krwBqg9+D1q\nPDSEDt3OOc1bvBc+8sO38cu/zm7wBgjxDaGHbw9mbZ9VmPbW2rcIaxkmwVtUedIDF0I4lZVlBO9F\ni6B3b/PlzmSeod2n7ejs3ZmRHUc6vbITjF54q+4HOH5bLyIf2EGvQH+H+bckbWHkjyNJeCqBpAtJ\n9PqyFzsf3Umzus3MN1SIK0B64EKIS27mTGO/tyvBG+DDjR8yosMI3hn4DjO2zMCiLU7LeHhA24f/\nyXCfZ5wGb4BezXoR1CSIr6O/5u8r/s5zoc9J8BbXBNuHAgshRIH0dHj7bfuHrNhzKuMUn2/9nG0P\nbyOgXgD1atRjecJyhrUb5rDcnlN7iMv+g/2T/2u6rtcGvMbQuUNp4NmA70d971pDhaikpAcuhHDo\n00+Nee9u3Vwr9/6G97mr0120rN8SpRRP9X6KTzc7v2v0jTVv8Gzos9SpYfJScSDUP5ThgcOZPmy6\nS1d3ClGZyRy4EMKuc+eMs83XrLF/97YtyenJdJjegehHo2lRrwUAWXlZBEwLYM2ENQQ2DrRZbs+p\nPfT/uj/7J+93KYALURnJHLgQ4pL56CPjelBXgjfAu+vfZWzQ2MLgDeBZzZOJIROZsWWG3XLl6X0L\nca2SHrgQVdypU8a2rE8+wekd2sWdPm3c7715M7Rubb7cibQTdJrRiV2P7SqzmCzxfCJdP+/K4WcO\nlwnS0vsW1xrpgQshHPrsM5g+HSIjXSv33nvGiWuuBG+Ad9a9w/3d7re5Ety/rj8DWw/kmx3flHn2\nxpo3eCb0GQneQpgkPXAhqrDsbGjZ0rhEpG5d+Pxzc+UOJWYRcv0Zdm3ww9/5Tq5Cxy4co8tnXYh9\nIhaf2j4286w9vJZJSycR+0QsbsroQ+w5tYcbvr6B/ZP3U7eGk0PWhagipAcuhLBr3jzo2hXeegt+\n/NE4kMWMsTP/hcf9w10K3mCcgvZgyIN2gzdAvxb98KzmycoDKwvTrHPfEryFME8CuBBVlNbw4Yfw\n3HPGzWDdu8OSJc7LnTqXwaacr8ipdYDYlFjT9R09d5Tvd3/Pi31fdJjPuqXsk8hPAKP3vWL/Cp7s\n/aTpuoQQEsCFqLJWrYL8fBg0yHg/fjx8U3bquYxnv55Lk5xQHu45kW+iTRQo8M76d5gYMpEmtezc\nF1rM2KCxRCZFsv/Mful9C1FOMgcuRBV1220wfDhMmmS8T08Hf3+IiQE/P9tlLBaN19+78O9+nzL0\nBh9u+fYWjjxzBHc3d4d1pWal0urjVsQ+HotvHV9T7Xvpj5eIOxXHxsSNMvctrkkyBy6EKGPvXtiy\nBcaNK0qrVQtGjjTu9Lbn3UUrQbvz7J030sm7E351/ErMVdvzv6j/MazdMNPBG+DxXo/zW/xv0vsW\nopxMBXCl1BCl1B6l1D6l1Es2ntdXSi1WSu1QSm1SSnUyW1YIUfGmTYNHHoGaNUumT5hg3Odtb7Dr\nww3TGNPqGdzcjE7B/V3vt7nlq7h8Sz6fbv6Up/s87VIbA+oHsOCuBUzuM9mlckIIg9MArpRyA6YD\ng4HOwBilVIdS2V4ForTW3YDxwCculBWiSsvIMIaz09MvT32nT8P8+fD442Wf9etnrETfurXss/At\neznlsZUPHxhbmDYmaAy/xf/GuSz7d3n/uu9XmtZqSu9mLl5VBozsOJLaHrVdLieEMNcD7w3Ea60P\na61zgfnA8FJ5OgGrALTWe4GWSilvk2WFqNIWLYLffnP9Nq/y+uILuPNO8LGxk0sp+4vZXlz4KX09\nH6ZBnaLLQBp7NeamVjexIHaB3fo+2fyJ9KKFuALMBPBmwNFi7xML0orbAYwEUEr1BloA/ibLClGl\nzZplrARfuNC1cjm5+S7XlZNjnLr2zDP289x/v9FDz84uSjt4/Cwxbt/z8bjHyuQf32283WH0XSd3\nEZcSx+hOo11uqxDi4lTUfeBvAx8rpbYDu4AowOV/faZMmVL4OiwsjLCwsApqnhBXRkICxMXBtm3Q\npYsxfO1p4rbLN+cv5+3IKaR9tMml+n78ETp0cHz1Z8uWEBQEv/4Ko0YZaU/NnkXLnFvp3q7s8vRh\n7YYxaekk9p/ZT5uGbUo8+3TzpzzW8zE83D1caqcQ16KIiAgiIiIq7POcbiNTSoUCU7TWQwrevwxo\nrfU7DsocBIKALmbLyjYyURX94x9G0P7gAwgLMw5VueMO5+X8nh3B8fo/s+K2fdzSo52purSGnj3h\nX/+CW291nPebb4wRgaVLISsnj9r/aMPXQxZz3809bOafHD6ZhjUbMiVsSmHa6YzTtP20LXuf3Gtq\n77cQoqTLsY1sC9BWKRWglPIA7gF+KdWIekqp6gWvJwGrtdZpZsoKUVXl5Rkrvh980Hg/apQxH+5M\nzKFkjnv+ResL9/FBuIkCBZasTOZc/nGGDnWed9QoWLsWTp6Ef3z7M7VyW9gN3gD3d7ufOTvmYNGW\nwrSvtn/F8MDhEryFuEKcBnCtdT7wJLACiAHma63jlFKPKKUeLsjWEditlIrDWHH+tKOyFf81hLj6\nrFgBzZtD587G+5EjjR5vTo7jcq/O/442uXfy+PUTWHvafAB/OvwZ6t79DG4mfi2vXdtY6DZ3Lny1\n62MmBjneAtbDtwc1q9dk3ZF1AORZ8pixZYYsXhPiCjI1B661Xg4ElkqbWez1ptLPHZUV4lowa1ZR\n7xugWTNjfnrVKhgyxHYZi0Xze/L/eGfADB4Z2pcXNh9ifcxh+nYOcFjX4ZOpHKmxjFQgMzeTmtVr\nOswPxmr0UU9tJX3oEf59350O8yqljMVs0d/QP6A/P+/5mYD6AXT37e60HiHEpSEnsQlxCaSkwJ9/\nwj33lEwfPdrxavRv/9xKvsriqdv74+lRjXb5w3l3qfNe+Kvf/4B/9iC6+4WwYv8KU20cMAByQj5h\nSIMn8fRw/rv8uK7jWLxnMRm5GXwS+QmTe0vvW4grSQK4EJfAt98a55DXLXVC6MiR8PPPxvy4Le+u\n/B8D6k0oPAnt3u6j+OuE8wC+5MjXTOw5gVEdR7F4z2JTbbyQcw63jr/w1RMPOs8M+NXxo0+zPrz2\n12scSj2hfjI9AAAgAElEQVTEiI4jTJUTQlwaEsCFqGBaG8PnDz1U9lnLltCqFaxeXfbZmfOZxKkf\n+fdd4wvTnrvzZtJqxrE9/pjd+n6NjCOz+hFeGj2IER1G8Ou+X8nJdzLRDiyIXcAtbW/Gp14jM18L\nMPaEf7DxAx7v9TjV3CpqF6oQojwkgAtRwTZvNhaq3XCD7ef2htH/+f1PNMzqSZ+OzQvTatf0oGXO\nrby95Ce79b2xdDY9Pe7D06Mazeo2o32j9vx18C+n7ZwdPZsJ3SY4zVfcnR3u5MaWNzKp+ySXygkh\nKp4EcCEqmHXxmrKzu3PUKPjpJ+Ou7uLm7/maezuXHc6+u+so/ki0PYyelZPH1pxv+eftE4o+v+Mo\nFsU5HnaPPx1P/Jl4hrS1s5rOjprVa7Jq/CoaeZnvtQshLg0J4ELYYLFAVJTr5dLTYcECY4W3PW3b\nGueUr19flLY+5jBnPaOYOqbsVQEvjBhMqtc24o6klHn2zsIV1MxtwW19Ohamjew4kiV7l5BvsX8Y\n4pwdc7g36F6qu1c398WEEFcdCeBC2PDHHxAa6voNYgsXQt++4Ff2RNISSh/q8s+F39CFe6hfu+w5\nqw3r1sQ/azBv/7SkzLOvts7mjuYTSqS1btAavzp+hXu2S7NoC3N2zmF8Nwe/ZQghrnoSwIWwYcEC\nyM0FV48ttrd4rbTRo40AbrFAXr6FtRdm8+ItD9jNP7LjKJYdKjksvv/YGRJrrOCte+8pk9/RMHrE\noQga1mxINx8HB6YLIa56EsCFKCU319jq9eijEB5uvtzO2ExiE5O47TbneTt2NLaYbd4Mn/yymmr5\ndRh7o/1DUV4aOYxTNddz+GRqYdor38+jRc5QAprWL5N/VMdRLI5bXOLoU6vyLF4TQlx9JIALUUpE\nBLRpA4884tod3pN/fIeaY8dR3eS0snU1+vR1XzPE54HCvd+2+DWqg0/mjby1aGlh2m9JX/NoH9u9\n9o7eHalTow5bkraUSL+QfYFf9v7C2KCx5hophLhqSQAXopQFC4zg2rUrZGZCfLy5cpvOL+SEx3ou\nZF8wlX/0aJi3+BwHPX7hP2PudZr/9rajWBJvDIsvWreL7GoneX7EzXbz2xpGXxi7kLCWYXjX8jbV\nRiHE1UsCuBDF5OUZw+ejRxvbwIYMMdcL/zUyjlz3VPoH3MCfB/80VVdQEOQG/ohf1kA6tnAeUF8Z\neTsnaq7i2OkLvLVsNn1q3o9HdXe7+a0BvPg1vbN3zJbFa0JUERLAhShm9WoICDBOSwMjgJuZB5/2\n+yKC3EcxrN0wlieYG3dXCprcPJcXBt1vKn8r3wY0zuzLmwt+ISp/Lq/d4TgQB/sEY9EWdp7cCcCB\nsweITYnl1vZOLgsXQlQKEsCFKGbBArjrrqL3t9wC69YZQ+mOrD+7kIeuH82QtkMITwgv0eu152Ta\nSZLyd/DoLYNMt29Yy1HMPPAitbLbMrhne4d5lVIlhtHn7JjD2C5j8XD3MF2fEOLqJQFciAL5+cYJ\naaNHF6XVrw/dusGaNfbL/bEtnpzqJ3lk6PV08u6E1po9p/Y4re/nPT8ztO1QPKuV3fttz8sjhmOp\neYIRrSaYym8N4BZt4Zsd3zA+WIbPhagqJIALUWDNGvD3h9atS6Y7G0b/IHwRndRIPKq7o5Qq7IU7\nsyhuEaM6jnKpjR1bePO477e8d/8YU/n7+PchNSuVL7d9SR2POoT4hLhUnxDi6iUBXIgCpYfPrYYO\ndbyQbe3phTwQWtRtH9p2qNN58NMZp4lMimRou6Eut3PGo2Np0qCWqbxuyo0RHUbw3IrnGN9tPMre\nAe1CiEpHArgQGMPnixeXHD63Cg6Gs2fh4MGyz9bsPEimxxEev7Xo6rGbW9/MxsSNpOfYP4d1yd4l\n3NL6Fryqe1VE8x0a1XEU2XnZ3NvV+VY1IUTlIQFcXJWysowFZDnOr7WuEOvWga+vcdFIaW5u9ofR\n3/1tEYF6BJ4eRXdj161Rlx6+PYg4FGG3vkVxixjdycZvC5fAgJYDiJwYiU9tn8tSnxDi8pAALq5K\nq1fDypWwdevlqc/e8LmVvWH0iOSFjO9VNhA7GkZPzUpl7eG13Nru8mznclNu9PDrcVnqEkJcPhLA\nxVUpPBxq1TIC+aWWn29cLGJr+NzqlluMtmRnF6VtjD1CRo0EJt8eVia/o4Vsv+77lRtb3UidGnUu\nsuVCiGuZBHBxVVq2DJ5/3rUAfuGC4+1e9mzYAE2aQHsH26obNYJOnYyhdqt3ly6mbf5wvDzLHn7e\ntWlXMnIzSDiTUObZwtiFLq8+F0KI0iSAi6tOQoIRjCdPNoJrbq65cvPmwfhybHN2NnxuVXoe/M/j\nCxnX3Xa3vXA7WXzJXviF7Av8degvbm9/u+sNFUKIYiSAi6tOeLgx59yokXGk6fbt5ssdOgSHD5uv\ny2Ixhs/NBPChQ4sC+NZ9SaR5xvHcnfYvExnadijL95ecB18Wv4zrm19Pg5oNzDdSCCFskAAurjrW\nAA4wYIC5YfScHPjrL/P5rdauz6VBo1wCA53n7dkTkpPhyBF4Z8lPtM67ndo17R9LOrD1QNYeXktW\nXlZh2qK4RYzueHlWnwshqjYJ4OKqkpkJa9cai8bACMgREc7LrV9vzGHfdZdrAfyF8H9Sf+RrpvK6\nucGgQcZq9D+SFjI22HEgblCzAV2bdmXNYWNiPiM3g9/3/87wDsPNN1AIIeyQAC6uKhEREBJinEEO\n0L+/EZzz8hyXs/bazQZ8qx1Zv3Cm/krT+YcOhS/nneRczR08f+ctTvMXnwf/PeF3evn1orFXY/MN\nFEIIOySAi6tK8eFzAG9vaN4coqPNlevUCc6fh8RE53WtjzlMrkcKRzP3cC7rnKn2DR4MW9N+omXO\nrdSrXcNp/uLz4AvjZPW5EKLimArgSqkhSqk9Sql9SqmXbDyvq5T6RSkVrZTapZSaUOzZIaXUDqVU\nlFJqcwW2XVRBy5bBsGEl05zNax89CsePQ69exjB3//7mhtE/W7GcgLzB9G7Wm/VH15tqn7c31L9+\nEfd2NxeIQ3xDOJN5hr2n9rIsfhkjOo4wVU4IIZxxGsCVUm7AdGAw0BkYo5TqUCrbE0CM1joYuBH4\nQCllPVvSAoRprUO01r0rrumiqomPN+bAu3YtmR4W5nhYfPlyY27a3d14b3YYfdWRcIa1HcqAgAEO\njz0tLi0njXyfSF6+y/nwORinoA1uM5jnVzxPUJMgOc5UCFFhzPTAewPxWuvDWutcYD5QehWOBqzH\nStUBTmutrbOWymQ94hq3bJmx17r0hVn9+xsHqOTn2y5Xetg9LMx5DzwtM4cTNf9i8q2DCWsZxurD\n5la+/XXwL3o3601tj9qm8oMxD/5b/G+X7exzIcS1wUxgbQYcLfY+sSCtuOlAJ6XUMWAH8HSxZxr4\nQym1RSk16WIaK6q28PCyw+cATZuCjw/s3Fn2WU4OrFplzE1bdekCp0/DsWP265oZvo5amR0IbN6Y\n3s16E5Mcw4XsC87bmBDOkLZDTHybIoPaDMKzmicjO450qZwQQjhSUT3jwUCU1toPCAFmKKWsXZS+\nWuvuwDDgCaVUvwqqU1QhGRnGavOBA20/tzeMbt0+1qRJUZqZefD528LpWc/otntW86RXs15O58G1\n1oQnhDO0rWt3eDf2asyJ50/gX9ffpXJCCOFINedZSAJaFHvvX5BW3APAWwBa6/1KqYNAB2Cr1vp4\nQXqKUuonjCH5ddgwZcqUwtdhYWGEhYWZ+hKi8vvrL+jRA+rVs/18wACYPx+efbZkeunh8+L5IyJg\nzBjbn7crczkz+n9VlL9gHtxR73rf6X3kWfLo5N3Jybcpq56nnS8mhLhmREREEOHKPlcnlNbacQal\n3IG9wM3AcWAzMEZrHVcszwwgWWs9VSnVFNgKdAOyADetdZpSqhawApiqtV5hox7trC2i6nrySWO7\n2Etl9jgYjh83hsZTUowetlVQEHz5JYSGlswfHQ333AN79pT9rMi4o1z3TQhZb5zEo7qx8i3iUAQv\nr3yZTRM32W3jtE3TiE2J5Yvbv3D16wkhRBlKKbTWynlO25wOoWut84EnMYJvDDBfax2nlHpEKfVw\nQbY3geuVUjuBP4AXtdZngKbAOqVUFLAJWGoreItrm9bGAjZbPWkrX1/jbPRdu4rSim8fKy0oyDj2\n9MSJss9m/L6cFrmDCoM3QJ9mfdidvJu0nDS7bSjP/LcQQlwqZobQ0VovBwJLpc0s9vo4xjx46XIH\ngeCLbKOoJLSGmBijp+yKffuMxWhBQY7zWVeXd+tmvC+9faw4d3fo18/If/fdJZ+tPBzOHe1K7seu\nWb0mPfx6sP7Iega3LfNXmYzcDDYc3cCCuxa48M2EEOLSke1dosJs2GAETYvFtXLW3nfp7WOllT7Q\nxd78t5Wt7WRpmTkc91zFU8PKBukBAQPsbieLOBRBD98e1K1R13EjhRDiMpEALipMRAScOwd797pW\nzt72sdKsAdxisb19zFb+0utFZq3YiFdWOzq3bFImf1jLMLsHuixPWC7D50KIq4oEcFFhVq+Gxo0h\nMtJ8mbQ02LgRbrZ/rXYhf3/jkpPYWKO3365dye1jpQUHG3vBk5OL0uZtCad7XduBONQ/lJ0nd5Ke\nk17mWXm2jwkhxKUkAVxUiNxc2LQJnnjCtQD+11/GPdt1TY5MW3vhzobPoWgefM2aorQd6eHcF2q7\noFd1L0J8Q9hwdEOJ9IQzCaTnpNO1aVeb5YQQ4kqQAC4qxLZt0Lq1cRTqJvs7scpYtaro7m8zrMPi\nZgJ48fwAW/clkV0jkQkD+9jPb2Me3Dp8rpxN0gshxGUkAVxUiIgII1iGhBiryjMyzJVbvdooZ9aA\nAfD775CUBL1NXI1TfCHb9OXL8c+5pcT2sTL5bcyDy/YxIcTVSAK4qBCrVxvBskYNYxvZtm3Oy6Sm\nGjeQ2drHbU9AADRsnMegwRab28dKCwmBI0fg1ClYeWg5g9s47rZf538dUSeiyMg1fgPJysti7eG1\n3NLahWECIYS4DCSAi4uWl2csKrvhBuN9nz7mhtHXrjXyeni4Vl/tceNpcsscU3mrVYO+fWHFyjyO\n1VjJZBvbx4qr5VGLYJ9gNh7dCMCaw2vo2rQrDWo2cK2RQghxiUkAFxctKgpatDBWoINxrKmZhWyu\nDp+DcaFIcp0/ON/Q3PWfYNQxddZGPLNaEdTK+X3cxe8HD4+X4XMhxNVJAri4aNb5b6s+fS5dAI87\nFcf57PNsSjS/Ui4sDPbpcELqmNsGVvx+8OX7l8v2MSHEVUkCuLho1vlvq9atISvLWGhmz/nzEBdn\nbiFaiboOreauzneReD6Rs5lnTZXp3h3c2i/n3t7mAvH1za9n+/HtxKXEcSbzDCG+Ia41UgghLgMJ\n4OKi5OfDunXG/dtWSjnvha9fbyxe8/R0rb7Vh1dzc6ub6enXk81Jm02VuZB3hprNEpg4xP72seJq\ne9QmqGkQr0e8zuA2g3FT8n8TIcTVR/5lEhdlxw7w8yt7IpqzhWylh93N0FoTcSiCAQEDCG0WanoY\nfe3htfQNuA6PatVN1zUgYAALYhfI/LcQ4qolAVxclIiIksPnVs4WspVn/nvf6X14uHvQsn5LQv1D\n2ZRkLoCvPryaAQGuVRbWMgyFYlCbQa41UgghLhMJ4OKi2AvEvXrB9u3GFrPS0tJg924jyLtU1+HV\nRmBVij7+fYhMjMSinV99FnEogrCWYS7VNSBgADOGzaCxV2PXGimEEJeJBHBRbhaLsZfbVgCvX9+4\nfGT37rLPNmwwFpbVrOlafdbhcwCf2j7U86xH/Ol4h2VSs1KJPxNPT7+eLtVVs3pNHuv1mGsNFEKI\ny0gCuCi3nTvB2xt87GyttjeMXt7579WHVzOgZVHBUH/n8+BrD68l1D8UD3cXT4sRQoirnATwKkRr\nWLbs8tVXevtYafZWopdn/nv/2f0oFG0atClMM7OQrTzz30IIURlIAK9CDh2CW2+FM2cuT33OAnFo\naNmV6BkZEB0N113nWl0RhyIY0HJAiRvBzCxkkwAuhKiqJIBXIdbe7o4dl74ui8W4Z9tRAO/SBY4e\nhXPnitI2boTgYKhVy7X6bAXiYJ9g9p3eR3pOus0y57LOsefUHno3c/G0GCGEqAQkgFchkZHGbWBR\nUZe+rpgYaNAAmjWzn6daNeM2sC1bitLKPf99aHWZleQ1qtWga9OubD221Wa59UfX08uvFzWq1XCt\nQiGEqAQkgFchmzfD6NHGELVZGRnlC/hm57FLD6OXZ/77YOpBci25tGvYruznO5gHL8/2MSGEqCwk\ngFcRublG4J440bUAvmgRPPCA6/WZDcTFF7JlZhp7w/v2dbGuQ8bwefH5bytH8+Ay/y2EqMokgFcR\nu3ZBy5ZGjzchwbhMxIzISGM4PDvbfF1auxbAN20yykRGGvPitWubrwuKDnCxxbqVTGtdIv1C9gVi\nU2Lp42/u/HMhhKhsJIBXEZGRRrD09IS2bY2gbLacUhAba76uuDgjCLdo4Tyvv78xL3/wYPnmv8Fx\nT7pFPaMRR84dKZG+/uh6evj2wLOai7elCCFEJSEBvIrYvNkI4GAsHDMzjJ6VZQT6O+5wbR78u5XR\ndB24y3R+6zB6eea/D6ceJiM3gw6NO9h8rpSyeaCLddhdCCGqKgngVURkZNHd2sHB5gJ4VBR06ADX\nX+9aAJ935H1S288wnb9PHyN4b9kC/fqZrweKet+25r+tbC1kizgsC9iEEFWbBPAq4Nw5OHLEmF8G\n8wHc2msPDnYtgCfpSM54mC8QGgrffQcdO0LduubrAXM96dIL2dJy0th1cheh/i7eliKEEJWIqQCu\nlBqilNqjlNqnlHrJxvO6SqlflFLRSqldSqkJZsuKi7d1qxGEqxdcd92tm3GYi8XJRV3WXntwsHGu\nubP8AAlJp8n1PM7+tN3kWWxcNWZDjx7GcH155r/N9KR7+vVk58mdZOcZK/E2HN1Ad9/u1Kzu4m0p\nQghRiTgN4EopN2A6MBjoDIxRSpWekHwCiNFaBwM3Ah8opaqZLCsuknUBm1XDhsYhKwcOmCvXsKHx\ns3+/87rmrd1M/fTeNKvTjL2n9ppqX61a0Gjs8wRev89UfqvE84mczz5PJ+9Ojj/foxbtG7Un+oQx\n7CDz30KIa4GZHnhvIF5rfVhrnQvMB4aXyqOBOgWv6wCntdZ5JsuKi1R8AZuVs4VsKSlw6pQxBw7m\nh9H/3BtJhzp9CPENIeqEuWH0fEs+GR2+JK/5KlP5rVYfWk3/gP4O57+tis+Dl761TAghqiIzAbwZ\ncLTY+8SCtOKmA52UUseAHcDTLpQVF8G6v7p3qeO+nc2Db94MvXqBW8HfALMr12NSIwlr24cQnxC2\nH99uqo17Tu0hLfcCUSfM5beKOBRBWECYqbzWefCM3AyiT0Rznb+Lt6UIIUQlU1GL2AYDUVprPyAE\nmKGUcvG4DlEeiYnG3HVAQMl0Zz3q0r32kBDnPXCLRXO6xmbu7mcEcLM98MikSNo0aGM64Fu50pMO\n9Q9l49GNbDy6kW4+3ajl4eJtKUIIUclUM5EnCSh+ZId/QVpxDwBvAWit9yulDgIdTJYtNGXKlMLX\nYWFhhDm6bFoARb3v0qPMznrgkZHw2GMl8zsL4H9GJeCWX5vgNr4kp7sTfSIarbXTIe7IxEgmdZ/E\nv9b8i9z8XKq7V3fyreBk2klSMlLo0qSL07wA7Rq143z2eebvnm+61y6EEJdTREQEERERFfZ5ZgL4\nFqCtUioAOA7cA4wplecwMBBYr5RqCrQHDgDnTJQtVDyAC3NKL2CzCggwLipJToYmTUo+09rogc+e\nXZTWvLlxnvrx4+Dra7uuxZsj8c03KmtSqwm1qtfiUOohWjVo5bCNm5I2MbH7RObsnENsSizdfLo5\n/15JkfRp1gc3ZW6QyE250ce/D3N2zmHpmKWmygghxOVUumM6derUi/o8p/86aq3zgSeBFUAMMF9r\nHaeUekQp9XBBtjeB65VSO4E/gBe11mfslb2oFosSbC1gA6NHHhxs+27w+HioUwd8fErmdzYPvv5w\nJMHeRZWZWciWlpNGwpkEuvl0c2nePDLRCOCuCG0WSr4ln+ubX+9SOSGEqIxMdW+01su11oFa63Za\n67cL0mZqrb8oeH1caz1Ya9214Geeo7KiYuTlGbd79epl+7m9YXF7Qd/ZPPiB7EiGBhUL4CYC8tZj\nW+natCse7h509+1uOoBvStrk8kEsYS3D6NeiH7U9ZPmFEKLqk5PYKrHYWGjWDOrXt/3c3jy4vWF3\nR/PgqWlZpHvF8LcbuhemmVnIFpkYSWgzIxB39+1uauFbviWfLUlb6N2st9O8xQ1oOYC/xv/lUhkh\nhKisJIBXYra2jxXnKIDbKudoCH3humhqZgTSuJ5XUX7fEKKOOwngSZGFV3oG+wSz4+QO8i35DsvE\nnYrDp7YPjbwaOcxni5k940IIURVIAK/E7PWkrTp2hEOHjMVsVtYbyHr0KJs/MBCOHYPz58s+W7Yz\nklYeJSsLqBdAVl4WJ9NO2qxfa82mxE2Fc9n1PevTpFYT4s/EO/xemxJdHz4XQohrjQTwSszeXLaV\nh4dx0tquYjd/RkdD+/bg5VU2v7u7cSGKrYVv209Gcl3zkpUppRwuZEs8n0i+zqdl/ZaFad19uzvt\ntUsAF0II5ySAV1JpacbZ5V27Os5XehjdWdC3t5AtSUVyZ6+yBR0tZLP2vosPa3f3cb6QzbqFTAgh\nhH0SwCupbduM4O3h4Thf6QDubNjd1jx43JEU8qqfZkjPwLL5HSxksxWIQ3xD2O7gSNXz2ec5ePYg\nXZs6+c1ECCGucRLAKylnC9isbAVwZwvfSvfAf1i7mYaZvajmXvavi6OFbJFJkWWGwkN8jPxaa5tl\ntiRtIcQ3xNRpbUIIcS2TAF5JORsKt+rWzZgDz883bh9LSSm6gcyWoCDYuxdycorSVu2LpGNd25UF\nNgrkRNoJzmWdK5Gem59L1PEoejUruUm9ae2meFX34vC5wzY/T4bPhRDCHAnglZSzoXCrevWgaVPj\n9LXNm6FnT2Oxmj1eXtCqlbFS3Sr2/CZubGe7Mnc3d4KaBrHjZMmVb7uSd9Gyfkvq1qhbpkyIr+N5\nc1nAJoQQzkkAr4SOHYPMTGjd2lx+6zC62V578XnwvHwLZzy3cM8N9gvaWshWfPtYafYWslm3nUkA\nF0II5ySAV0KbN9u+gcweawA322svPg/+x/Z9uOc2oHPLJnbz21rIVvwAl9Lsnch2KPUQ1d2r41/X\n33kjhRDiGicBvBLavt32QSz2WAOyNfA7U3wr2eLNkfhZHEd9W3u7IxPLLmArnt9WD1x630IIYZ4E\n8EooOtoIymaFhEBEBNSqZf+q0OKst5hZLLDxSCQhTRwH8C5NupBwJoGsvCwAzmaeJelCEp29O9vM\n71/Xn9z8XI5fOF4ifVPipsJz04UQQjgmAbwScjWAN2tmXB9qZvgcoFEj44KUAwfgQE4kw7o6Llij\nWg3aNWrH7uTdAGxO2kwP3x64u9leLaeUsjmMvilpk91hdyGEECVJAK9kzpyB1FRjpbhZSoH/Tb/S\npleC6TIhIRCxLpNMrz3cdUOI8/zFFrKZGQovPYyenZfN7uTd9PB1YW5ACCGuYRLAK5kdO4y93W4u\n/i+nbnibGsGLTecPCYEZP23HK6MTDep4Os/vU3Sgi5m93KUDeNSJKAIbBVLLo5bpNgohxLVMAngl\nEx1tBHBXWLSFhLQdHEzfbbpMSAhEp0TSpoa5IW3rkLjW2uEK9MLPL7X1zNG2MyGEEGVJAK9kXJ3/\nBth/Zj8ZuRmFc9RmBAcDzSK5voW5oNrNpxu7k3ez9/RealWvhV8dP4f52zRsw9mss5zJPAPYPnZV\nCCGEfRLAK5nyBPDoE9Hc2PJG9pzaQ74l31SZFi3AvcVmRl9nLoDXrVEX3zq+zNkxx9RCNDflRrBP\ncOGwu2whE0II10gAr0RycowjUTvb3p1lV/SJaPq16IdPbR8OnD1gqszZrDN4NT7NTcFtTdcT4hPC\nrKhZpreCWYfRT6adJDUrlXaN2pmuSwghrnUSwCuR2Fhj9XnNmq6VizoRRYhPCJ2bdDY9jL7jxA66\n+XTDTZn/KxLiE0JyerLprWDWeXProjdX6hJCiGud/ItZiZRn+ByMHniwTzBdvLuYDuDWoO+K7r7d\ncVfudPftbjr/9uPbZfhcCCHKQQJ4JVKeAH4y7SSZeZm0qNeCLk26sDvl0gXw65pfxxs3voFXdS9T\n+Ts07sDR80dZeWClrEAXQggXSQCvRHbscD2A7zi5g2CfYJRSRgA32wM/HkWwj2uV1a1Rl1dueMV0\n/mpu1ejSpAtbjm2hdzMTh7QLIYQoVO1KN0CYo3X59oBHn4gu7EkHNg7kwNkD5OTn4OHuYbdMZm4m\nB84eoHMTF1fLlUN3n+6kZqXSyKvRJa9LCCGqEumBVxJHjhiL15rYv9XTpqgTRT1pz2qetKzfkn2n\n9zksszt5N4GNAx0G+YpyQ8ANDGw18JLXI4QQVY0E8EriYhewWXX2dr4SvXjQv9TGBo1lxq0zLktd\nQghRlUgAryTKM/+dnpPO4dTDdGjcoTDNzDx41HHXF7AJIYS4vEwFcKXUEKXUHqXUPqXUSzae/10p\nFaWU2q6U2qWUylNK1S94dkgptaPg+eaK/gLXivLMf+9O3k2Hxh1KDIWbCeDRJ6MlgAshxFXOaQBX\nSrkB04HBQGdgjFKqQ/E8Wuv3tdYhWuvuwCtAhNY6teCxBQgreC5LjcupPEPotraCOQvg+ZZ8dp3c\nRTcfF39bEEIIcVmZ6YH3BuK11oe11rnAfGC4g/xjgHnF3iuT9Qg7zp2D5GRoa/5UU6Ds/DdA24Zt\nSbqQREZuhs0y+07vw6e2D3Vr1C1vc4UQQlwGZgJrM+BosfeJBWllKKVqAkOARcWSNfCHUmqLUmpS\neXBH7XcAACAASURBVBt6Ldu5E4KCwN3dtXK2Ang1t2oENgokLiXObpkQXxk+F0KIq11F94xvB9YV\nGz4H6FswtD4MeEIp1a+C66zyyjP/nW/JZ3fybptD4Y7ORC/PCWxCCCEuPzMHuSQBLYq99y9Is+Ue\nSg6fo7U+XvDfFKXUTxhD8utsFZ4yZUrh67CwMMLCwkw0r+qLjoZevVwr42go3NGZ6FEnong29Nny\nNFMIIYQDERERREREVNjnKa214wxKuQN7gZuB48BmYIzWOq5UvnrAAcBfa51ZkOYFuGmt05RStYAV\nwFSt9Qob9WhnbblW9egBM2ZAqAv3fczbNY9FcYtY+LeFZZ4t3buUz7Z+Rvi94SXStdZ4v+fNrsd2\n4VvH92KbLYQQwgGlFFprVd7yTnvgWut8pdSTGMHXDZiltY5TSj1iPNZfFGS9E/jdGrwLNAV+Ukrp\ngrrm2grewr7cXIiLM+bAXWFr/tvK3kr0pAtJVHOrJsFbCCEqAVNnoWutlwOBpdJmlnr/DfBNqbSD\nwOU50usyysmBkyehefNLX9fevUY9tWq5Vi76ZDRP9X7K5rOA+gGczTxLalYq9T3rF6aX5wITIYQQ\nV4Zs7yqHJUtg3LjLU1d59n9rrR2epuam3OjcpDMxyTEl0mUBmxBCVB4SwMshJsboGV8O5Qngx9OO\no9H41fGzm6ezd2diUmwEcNlCJoQQlYIE8HKIizOG0M+du/R1lecMdOv8t1L210bYmgd3NG8uhBDi\n6iIBvBzi4sDTE+LjzZc5cz6T1LQsl+q5mDvAg5s6DsSlA/jZzLOczjhN24YuHvcmhBDiipAA7qK8\nPCNw33wz7HN8rXYJg97+P0Z/+L5LdR07BkqBr4uLws2cplY6gEefiKZr0664KfkrIYQQlYH8a+2i\ngwfBxwdCQlwL4AczdhJ/LsZ5xmKs898ORsJtMnOft29tX/IseSSnJxeWkQVsQghReUgAd1FcHHTs\nCO3buxbAUz1iSbG4tvKtPPPfF7IvcOzCMdo3au8wn1KqRC9c5r+FEKJykQDuovIE8MMnU7HUOE2m\n1z4sFvOnza2Oi6Fz11yX2rfz5E46e3emmpvzLf6dvYu2kskKdCGEqFwkgLsoNhY6dYJ27YwAbub0\n19+3x+GVFoTKq8X2hGOm64rwHk1G0z9dal/0iWjTQ+HWHnhmbiYJZxLo7N3ZpbqEEEJcORLAXWTt\ngTdsCDVqGNvJnNkQH4uPeyfqZAeyere5YfTz6dnk1Ikno6Zrw+6uDIV3adKF3Sm7iUmJoX2j9tSo\nVsOluoQQQlw5EsBdoDXs2WMEcDA/jL77RCztG3TCp3p7th0yN+7+R9Q+cMsnIXWPS200s4DNynqt\n6Pbj22UBmxBCVDISwF2QlAReXtCggfHebAA/khlHzxadaNsgkD0p5nrUa+JiqZ7XgL2nzffA8yx5\nxKbEEtTU3M0njb0a41Xdi6X7lkoAF0KISkYCuAus899WZgP4GfdYbgzqRHDzQBKzzAXkqMQYOrrd\nwZ5T5nvgCWcS8KvjR22P2qbLdGnSheUJy2UFuhBCVDISwF1gnf+2at/e+WlsJ86kke+ZTL/OLekb\n2J6zbuaG0A9ciKV/s8Gcyz7H+ezzpsrsTt5N5yauLUTr7N2ZPEueBHAhhKhkJIC7oHQAt65Ed2TF\n9j3UTA/Eo7o7/YNak+eVyPn0bKd1pRBDWKcutGvYjr2nzPXaY5JjXF5J3qVJF1o3aE09z3oulRNC\nCHFlSQB3QWxsyQDeti0cOAD5+fbLrNsbSxM3Y9y9dk0Pqme0IGLXfof1XMjIIcfrELeEtKdD4w6m\n58FjUmLo0qSLqbxWg9sM5uW+L7tURgghxJUnAdwFcXEl58C9vMDbG44csV9m5/FY2tUvKtTAEsj6\nPY4D8sqofXhkBlC3Vg0CGwWangePSXG9B968XnMm9ZjkUhkhhBBXngRwk06dgtxc4xz04pwtZDuU\nHkv35kUBvHnN9uxMcjzuvjo2lsbaCMRme+A5+TkcOHuAwMaBTvMKIYSo/CSAm2Sd/y59sYizAH7a\n7f/bu/foOKvz3uPfZ3SXrLtkSbalGUn2yDZgY5eYSzDIkFCbcApZTcBOV9LSU8pKQ+Bk0TR2FzSw\nmrJoD6c97UlzIG1IXU4hhxAC5kADhCAgCcQOtjG2LGuk0R1fJOtm3WVpnz9mRh6N5vJKlqUZzfNZ\ny8t633n3zLslL/+097svtVT7NdvXLq+isTd8IB/qOEZFpqdMVUGVpWfgrrMuyrLLSE1MjXitUkqp\n2KcBblHgFDKfcAHe3T/M+bQOqjdWTp37VHkVp8bDB3Jjfy2bVnpa4M58J65uFxOTYR604x2Brkuh\nKqVU3NAAtyhwBLpPuAB/89AJUoYqSUu5sLHIjZc7GUgN34XeyTFuWOcd+Ja8jIL0Alr7wjxoZ24D\n2JRSSsUuDXCL5hLg7x6vpZDpzfYN5cUY2yiNn3QHLTMwPMZYehO3bL7wLNvKQLa5DGBTSikVuzTA\nLQoV4A4HnDwJIyMzX/vok1oqs6YHuM0mpA9X8dZHwbvR3zrkImnIMwLdx8pAtmNnjs16ERellFKx\nSwPcgnPn4OxZsNtnvpaY6AnxxiBTu5sGatm0auaD80KbkwPu4M32mtpjFJjpZSK1wEfPj9LS14Iz\n3xm2HkoppZYODXAL6uo8XeUJCcFfD9WN3sXxqWfZ/iqyqjh2KniL+lB7LZVZ01vSkVrgJ86eoDyn\nnOSE5NCVUEoptaRogFsQqvvcJ9ia6J5n2c3cfOWaGddfsaKKlsHggdzQf4wrVwS0wAvCt8Dnsga6\nUkqp2KYBbkGoKWQ+wdZEf+uQa2o1tUDXrHFyluBd6J3UcuP66WG8KmsV/aP9ITc1OXbmGJcX6gh0\npZSKJxrgFlhpgQcG+DvHa8mfDJ76N290MprewNj49Lndnla7m89smv4s2yY2nPnOkAu6HOvUAWxK\nKRVvLAW4iGwXkToRqReRbwV5/c9F5JCIHBSRj0XkvIjkWCkbC+YS4Ifaa6dWUwu0PDeDhNECPjg+\nfW73Lw43kDRcRs6ymauprS1YG7IbXaeQKaVU/IkY4CJiA74L/C5wGbBLRNb6X2OMecIYs8kYsxnY\nA9QYY3qtlI12o6OezUpWrw59zYoVMDAAfX0XzjX21bJxReh+96zxKt6tnd6irqk9RkGIVntVflXQ\ngWzD48O097ezOi/MDSqllFpyrLTAtwAuY0yLMWYc+BFwe5jrdwHPzbFs1Kmvh/JySA4zwFvE8xzc\nfyDbGWq5fm3oZvuKFCeHWqc32w+2HaMiM3hLOtRUsuNdx1mdt5qkhKTwFVFKKbWkWAnwlUCb33G7\n99wMIpIGbAd+Mtuy0SpS97mPfzf6yNh5RtMb+Oym0DuDOfOrqA9oUTf213LlyuABHmoq2bEzuoSq\nUkrFo/kexPZfgF8aY3rn+X0XzVwC/O2PGkkcWUFBdnrI6zeXVdExOj2Q/ddAn/H++U4auhtmbGqi\nz7+VUio+JUa+hA6gzO94lfdcMDu50H0+27I88sgjU19XV1dTXV1t4fYurePH4bbbIl/ndMJ//qfn\n65pjteRNhJl3Bly/zskjH17oQh8aGWc03T1tDXR/GckZFKYX0tLXQkVuxdT5Y53H+OMr/zjyDSql\nlFpUNTU11NTUzNv7WQnwA8BqEbEDJ/GE9K7Ai0QkG7gR+IPZlvXxD/BoUVsLf/EXka9zOuEf/9Hz\n9cG2WhwZ4QP8uvV2JlI6OdMzyPLcDH5+2EXScGnQEeg+vgVdpgW4roGulFIxIbBh+uijj17U+0Xs\nQjfGTAD3AW8Ax4AfGWOOi8i9IvKnfpfeAbxujBmOVPai7niOmpvh4MHZlZmY8AxMqwr9KHuKbzEX\nY6Cht5YNJeEDPDkpgZShSt4+4hn59s6xWgomwwfx2vy10+aCD44NcmrgFJW5lWFKKaWUWoqstMAx\nxvwMqAo491TA8V5gr5Wyi+Hf/92zpvmzz1ov09QERUWQkRH52rw8SEmB06fh9GQt1zv/W+QyxskH\nrnruuvFKDrYfozzEvHGfqoIqPj798dRxbWctVQVVJNhCLNKulFJqyYqbldgaGsDtnl2ZSEuoBlqz\nBo4dn2A4vZ7Pbo483d2+rIojn3ha1I19tVy5IkILPGAkug5gU0qp+BVXAR5sy89wrI5A93E64Sc/\nbyFhrIAV+ZkRr7+sqAp3nyeQz4QZge4TuJjLsTMa4EopFa/iJsBdLujuhv7g+4EE9XLbv5K5+uPI\nF3o5nfD/flNL7nlrzfZPVTjpnKz3jkBvDDtvHGBl1krOjZ6jb8Sz5Juuga6UUvErLgK8txeGhz3d\n4U1N1st9nPg0Z7Pfsny90wltI7WUpVkL8Js2VDGYdsI7An0VeVlpYa+f2tTE2wrXLnSllIpfcRHg\njY2etcwrK2f3HHww1cVQsvV+d6cTKKzl8iJrAb5mVT4ymcT/+eXb5EcYge7jm0rWP9pP11AX5bnl\nlu9PKaXU0hEXAe5yeQaYVVRYD/Cmkz2YtC4+GWmw/DmrVwOFtVy3xvqD84wRJz9vfzHkGuiBfFPJ\najtrWVewDpvExY9QKaVUgLj437+hwROuFRXWB7LVfOzCNp5JY4/1FnhamiGppI5bt1gP8OLEKnqy\n3wm7c5m/qoIq6s7W6QIuSikV5+IuwK22wA+4XRSPbKO1r3XG+uOhnBo4Rc6yVEoLci3fW2VOFdgm\nuHGdxRZ4gacFrs+/lVIqvsVFgM+lC732lIvy9CsozCikrb8tcgGg/mw9znznrO5t46oqmLRFHIHu\nsyZvDQ3dDRw5fUQDXCml4lhcBLivBe5wQGurZ4nUSJrPuVhftIbK3Eoau611o88lwHds2khG/+9E\nHIHuk5GcQdGyIt5rfU+3EVVKqTi25AO8vx8GBqCkBFJTobAQ2tsjl+uadHFVhdMT4Bafg7u6XazJ\nWzOr+6veWMHAP+yfVZmq/CqSE5Ipyy6LfLFSSqklackHuK/1LeI5ttKNPjlpGEp1cePla6jMu7Qt\n8LlYW7CW9YXrEV+llFJKxZ24CXAfKwFe334WgDUr82fVAl+oAN9UvImrSq665J+jlFIqelnajSyW\n+Qaw+VgJ8HeO1pM+sgabTTwtcAsBPjE5gbvHzeq81RGvvVh3b7qbP7ryjy755yillIpecdcCt7Ia\n22+bXBTYPKnvG8RmjAlbpqWvhaJlRaQlWRuMdrG0+1wppeJb3AW4lcVcjp924cj0BHhuWi6JtkS6\nhrrCllmo7nOllFIK4iDA59KF3jrgYn3xhUJWutHrz9bjzNMAV0optTCWdICfO+eZRlZScuFcYSGM\njEBfX+hyXcbFlkq/AM+tpKE7/Jro2gJXSim1kJZ0gDc2ep552/xqKeJphYfaVnRy0jCc7qL6igsB\nvjpvdcSpZBrgSimlFtKSDvDA7nOfysrQz8E/bjqNTKTiKL6wnrmVqWQa4EoppRbSkg7wwAFsPuGe\ng797rJ6MkempH+kZ+PD4MKcGTmHPsV/M7SqllFKWaYAH+LDZxfLEgACPsB56Y08j5bnlJNqW/LR6\npZRSUWJJB3ioLvRwAV53xkV51vRCJZkl9I/2MzA2EPxzzs5+DXSllFLqYizpAJ9LC7xtyMVlJdPD\n2CY2ynPLcfcEL6TPv5VSSi20JRvgAwPQ2wsrV858zeGAtjY4f37ma924uHr1zNZ0uG50DXCllFIL\nbckGeGOjp6VtC1LDlBRYvnzmtqLnJyYZSWukekOIAA8xkK2+WwNcKaXUwlqyAR6q+9wnWDf6Qdcn\n2MazWJGfOeP6cNuKagtcKaXUQluyAe5yzT7A36utZ9lY8MFooVrgvSO9DI4NUrKsJEgppZRS6tJY\nsgHe0BB8BLpP0BZ4i4uipBABHmIuuOusC2e+U3cHU0optaAsBbiIbBeROhGpF5FvhbimWkQOichR\nEXnb73yziHzkfW3/fN14JJG60IOtxnaiy0VFdvAAd+Q4aO9vZ3xifNp57T5XSim1GCKuPCIiNuC7\nwM3AJ8ABEXnZGFPnd0028M/ALcaYDhEp8HuLSaDaGNMzv7ce3ly60DuGXGyrvDbo9ckJyZQsK6Gl\nr4XVeRfeWANcKaXUYrDSAt8CuIwxLcaYceBHwO0B13wJ+IkxpgPAGOO/ebZY/Jx5MzgI3d1QWhr6\nmmAB3i0urnGG7ncPNpBNR6ArpZRaDFaCdSXQ5nfc7j3nzwnkicjbInJARL7s95oB3vSev+fibtca\ntxvKy4NPIfMpKICxMc9ccYCx8QnG0pu48YrQzfZgA9m0Ba6UUmoxzNfi3YnAZuAmIAN4X0TeN8Y0\nAJ82xpwUkUI8QX7cGPPLYG/yyCOPTH1dXV1NdXX1nG4m1BKq/nzbirrdsHkz/KaujYTRAgqy00OW\nCdxW1BhD/dl6XUZVKaVURDU1NdTU1Mzb+1kJ8A6gzO94lfecv3agyxgzAoyIyLvARqDBGHMSwBjT\nKSI/xdMlHzHAL0akAWw+lZUXAvyXx+vJHA8fxJW5lfy67ddTx6cHT5OamEpuWm6YUkoppdTMhumj\njz56Ue9npQv9ALBaROwikgzsBPYFXPMycL2IJIhIOnA1cFxE0kVkGYCIZAC3AEcv6o4tsBrg/s/B\nD7W6KE6OEOABU8m0+1wppdRiidgCN8ZMiMh9wBt4Av8HxpjjInKv52XzfWNMnYi8DhwBJoDvG2Nq\nRaQc+KmIGO9n/Ycx5o1LVx0PlwvuvDPydRUVcOSIt8xZF5W5kVvg7h43xhhERANcKaXUorH0DNwY\n8zOgKuDcUwHHTwBPBJxrAq68yHuctdm0wF96yfN1x4iLHeu2hb0+MyWTjKQMTg2coiSzxBPgeRrg\nSimlFt6SW4ltaAg6O8NPIfOpqLiwmEuvzcW1YaaQ+fh3o2sLXCml1GJZcgHum0KWkBD5WofDsyNZ\n/8A44+kt3HBFRcQy/tuKaoArpZRaLFEd4I//+E1+8PpvZlXGavc5QHIyFBfDT95qJnF4BdkZqRHL\n+OaCT0xO4O5xT1uVTSmllFooUR3g//KbZ/j+ey/OqkykJVQDVVTAvl+7yDpvbS63rwu9pa+FomVF\npCWlzer+lFJKqfkQ1QHedb6Jk0MtsyoTaReyQBUV8MEJFytSLAZ4biUN3Q3afa6UUmpRRXWADyY1\n02NmF+C/7HuWEkef5esrKuDUuIvVFldT862HriPQlVJKLaaoDfD+wVEmMjoYSmqeVbkTq/6SnmXv\nW76+shLIc7FxlbUAL8ooYuT8CAc+OaAtcKWUUosmagP8QH0bCYOlTKZ00zswYqnM0Mg4ExltDKc0\nW/6cigog38W1VdYCXESoyK3g9YbXWZOva6ArpZRaHFEb4B82NpM5Xkni8CoOnGiLXADPhiTYJmnt\nb7L8OaWOMcjsYOvl5ZbLVOZV0jnUqS1wpZRSiyZqA/xoexOFSQ6WnbfzobvZUpkDDZ6FzZv7rF0P\ncC6hmbLcVaSnJlkuU5lbSaItEUeOw3IZpZRSaj7N13ai866xu5nSzHI4B8c/sTaQ7Wh7E9nja2nu\nbbb8Oe4eN87CyAu4+Fudt3oqxJVSSqnFELUJ1DHYxPbKHUyaSdzd1gK84WwTVUk30dz7guXPcfe4\nqciZXYBfV3odHf2BO6oqpZRSCydqu9DPTjRzRamDynw7HQPNlsq0D7rZXHQt/aP9DI4NWirj7nFT\nkTu7AN9QtIG/vumvZ1VGKaWUmk9RG+BDyc1scZZz+SoHZyestcC7TRNXllVgz7bT0metTGNPI5V5\nlRdzq0oppdSCi8oA7+4fZjKlm40VJWyusDOYZC2Mh5KbuKaqAkeOw/Jz8Lm0wJVSSqnFFpUB/psT\nrSQNlZKclMBVzlVMpJ1kaGQ8bJlT3QOYpAGuKC/CkeOgqSfyVDJjjAa4UkqpmBSVAf5hYxOZEw4A\n0lOTSBgu5mBD+EFjv6ptInnIgc0mlOeUW2qBdw11kWRLIic1Zx7uWimllFo4URngtZ80U5jsmDrO\nGLfzYWP4bvSDTU1kG09L2pHjsDQX3N3j1uffSimlYlJUBri7u5myzAsro+Ul2Dna3hy2TO0nbopT\nPGWsdqE39jRq97lSSqmYFJUB/slQE2sKHVPHKzMcNHaFb4E39TZRnnMhwK10oc9lDrhSSikVDaIy\nwLtNMxvtF1rgFXl22iPMBT810sS6Yk8YL89YztD4EOdGz4UtowPYlFJKxaqoDPCh5Ca2OB1Tx+tW\n2OkaD98C7xU3mys8oS8illrhGuBKKaViVdQF+JmeQUzSOa4oL5o6t7nCwbmE0AE+OWkYTWvmunUX\nWu1WA1wHsSmlFDgcDkRE/1yCPw6H45L8zKJuLfQP6lpIHraTYLvwu8XVa8s4n97G+YlJEhNm/s5x\nvLUTmUhlVWHW1LlIU8lGz49yevA0q7JWzev9K6VULGppacEYs9i3sSSJyCV536hrgR90N5E16Zh2\nLmdZKraxXA43ngxa5v06N2mj0/fzjtQCb+5tpjSrVHcUU0opFZOiLsBrTzazPLl8xvm0MQcfNgTv\nRv+opYk8mRngTb2hp5Lp82+llFKxLOoC3N3TRFmWY8b5PLFzpLU5aJkTZ5pYkT49wMtzw3ehu3vc\nVObq82+llFKxKeoC/NRwM2uLZrbAS9IdNISYC97S72Z1/vTWdKQudF3ERSmllpa2tjaysrIsPctv\naWnBZrMxOTkJwK233sozzzwDwN69e9m6deslvdf5YCnARWS7iNSJSL2IfCvENdUickhEjorI27Mp\n66/HNLOhzDHjfHmunbb+4AHeOd7E5Sunh35+Wj5jE2P0jfQFLaNd6EoptbSUlpbS399vedCY/3Wv\nvfYaX/7yl4O+Fq0iBriI2IDvAr8LXAbsEpG1AddkA/8M3GaMuRz4otWygYZTmri6yjHj/NpiO2fG\nm4OWOZfYxFWV0wM80lxwDXCllFKxzEoLfAvgMsa0GGPGgR8Btwdc8yXgJ8aYDgBjTNcsyk5p7+zH\nJIywtrRwxmubyh2ck5kt8JGx85xP6+Da9fYZr4V6Dq7biCqlVOwoLy/niSeeYOPGjWRmZnLPPfdw\n5swZbr31VrKysrjlllvo6+ub0S2+bds2/uqv/orrr7+erKwstm/fTnd3d9DP2LZtG08//XTQ1775\nzW9yww03cO6cZ3XPp59+mvXr15Ofn8+OHTtobW29NBWPwEqArwTa/I7bvef8OYE8EXlbRA6IyJdn\nUXbKB3XNpAx7tgQNdPVaO2PpLUxOTn+2sb+ujYSRIpalJc8o48gOPhK9c6iT1MRUslOzQ92KUkqp\nKPLiiy/y1ltvUV9fz759+7j11lt5/PHH6erqYmJign/6p38CZnZ9P/fcc+zdu5fOzk5GR0d54okn\nLH+mMYZ77rmHo0eP8uabb5KZmcnLL7/M448/zksvvURnZydbt25l165d81pXq+ZrEFsisBnYAWwH\nHhaR1bN9k0PNzWSbmQPYAIrzliHn0zje2jnt/IGGJpaNBy8Tqgu9sVsHsCmlVCz5+te/TkFBASUl\nJWzdupWrr76aDRs2kJyczOc//3kOHToUtNzdd99NZWUlKSkp3HnnnRw+fNjS542NjbFr1y56e3t5\n5ZVXSElJAeCpp55iz549OJ1ObDYbu3fv5vDhw7S1tUV4x/lnZRWTDqDM73iV95y/dqDLGDMCjIjI\nu8BGi2WnvPjv/xuGz/LII49QXV1NdXX1tNdTRx381tXCZY7lU+eOtLkpTAwexuW55bzX+t6M89p9\nrpRSszNfY7rmuthbUdGF5bXT0tJmHA8MDHjff/oHFBcXT32dnp4+dV0kDQ0NHDlyhP3795OYeCEq\nW1paeOCBB3jwwQenPk9E6OjooLS0NOx71tTUUFNTY+nzrbAS4AeA1SJiB04CO4HA/oKXgf8lIglA\nCnA18PfACQtlp6RdtZYtWSt5ZM+fB309BztHWluAT02dazjbRGnm7FrgGuBKKTU7sbLK6nyNHl+/\nfj1f+9rX2L59O7/4xS9wOp0AlJWV8dBDD82p2zywYfroo49e1D1G7EI3xkwA9wFvAMeAHxljjovI\nvSLyp95r6oDXgSPAB8D3jTG1ocqG+qyTI8HngPsUp9o5caZ52rmOwSachaEDvKm3acZvZO5eXcRF\nKaWWotms5x7p2rvuuovHHnuMz3zmM7jdbgDuvfdeHnvsMWprawHo6+vjhRdemPsNXwRLC4EbY34G\nVAWceyrg+AlgxuiAYGVD6aWJK8Ps2uLIddDQ3TDtXNekm4324K3p3NRcjDH0jvSSm5Y7dd7d4+Yr\nG75i5ZaUUkotssBWdbhWtv9rkVrjVq79yle+wtjYGDfffDPvvPMOd9xxB4ODg+zcuZPW1lays7P5\n7Gc/yxe+8AUrVZlXEi27z4iIYXc2DV93U7kiL+g1e/a+xA8PP82pf9g3dc72rSJ++yeH2LxmRdAy\nG5/cyL/d/m9sKtk0dW7V36/iV3/8K+w5M6eeKaVUPBIR3Y3sEgn1vfWen3Off3QtpSqTlBfnhnx5\ng91OnzRPHXv2Du9nQ0VxyDKBm5qMnB+hc6hTtxFVSikV06IqwFNHgs8B97mmysFI6oW54L+qbSJ5\nyBF0j3AfR/b0gWzNvc2UZZeRYEuYt/tWSimlFlpUBXhOiDngPvaiHMDQcroX8OwdHmreuE/gamy6\nC5lSSqmlIKoCvDjVEfZ1m01IGbGzv96zpGrtySaKguwd7i+wC12nkCmllFoKoirAHTnhwxgg2zj4\nqMUT4O4eN46c8GEcOBdcV2FTSim1FERVgK8tdkS8pijFTt2pZgBOjTSxrjhyC7y5t3lqBKC7V1vg\nSimlYl9UBXi4OeA+pVl2Wno9LfBeaWJTefgAz0nNIdGWSPewZwcafQaulFJqKYiqAL9mrSPiNVVF\nDk6NeEaij6Q18en1kVvT/iuyuXvclOdG7qpXSimlollUBbhnlHl4V5Ta6THNnGjvQiaTKFseI8J4\nRAAAClxJREFUeUtQXzf6mcEzpCelk5WSNR+3q5RSKo4F7j++0KIqwK341Bo7IyktfFDXROqItZZ0\neY5nKlljjw5gU0opBXv37mXr1q0X/T7ztXnKXFhaCz2arLcvxyQO8U7dx+SJtTB25Dg40XWC4mXF\nGuBKKaWmtgENZ3JyEpstetu50XtnIdhsQvJwGe+21LAizVoL3JHjoLmvWQewKaVUjGpvb+f3f//3\nWb58OYWFhdx///0APP3006xfv578/Hx27NhBa2vrVBmbzcZTTz2F0+kkLy+P++67D4C6ujq++tWv\n8v7775OZmUlenmf/jbvvvps/+7M/43Of+xyZmZnU1NTw2muvsXnzZrKzs7Hb7Re9Beh8irkAB8ic\nsNNie5vKvNl1oesiLkopFXsmJye57bbbKC8vp6WlhY6ODnbu3Mm+fft4/PHHeemll+js7GTr1q0z\n9ul+9dVX+fDDD/noo494/vnneeONN1i7di1PPvkk1157LefOnaO7u3vq+ueee46HH36Yc+fOcf31\n17Ns2TKeeeYZ+vr6ePXVV3nyySfZt29f4C0uipgM8MJkB5PLOrh8lbUwtufY9Rm4UkrFqP3793Py\n5En+7u/+jrS0NJKTk7nuuut48skn2bNnD06nE5vNxu7duzl8+DBtbW1TZffs2UNmZialpaVs27aN\nw4cPh/2s22+/nWuuuQaA5ORkbrjhBi677DIALr/8cnbu3Mk777xz6So7CzH3DBygNNNO3SRcVWmt\nBZ6VkkVqYioHTx7UAFdKqTmQR+dnsJb59uy3LG1ra8Nut894Ht3S0sIDDzzAgw8+6Hlv73Ptjo4O\nSktLASgqKpq6Pj09nYGBgbCf5Svns3//fnbv3s3Ro0cZGxtjbGyML37xi7Ouw6UQkwG+utDOm6eE\na9dZ38/bkePg6JmjrMxceQnvTCmllqa5BO98KS0tpbW1dcagsrKyMh566KEZ3eZWhBrAFnj+S1/6\nEvfffz+vv/46SUlJfOMb3+Ds2bOz/rxLISa70K8odWAbXEFWRorlMuU55ThyHLqNqFJKxZgtW7ZQ\nUlLC7t27GRoaYnR0lF//+tfce++9PPbYY9TW1gLQ19fHCy+8YOk9i4qKaG9vZ3x8POx1AwMD5Obm\nkpSUxP79+3n22Wenve5bpnsxxGSA/+HNW/jv1z0zqzKOHId2nyulVAyy2Wy88soruFwuysrKKC0t\n5fnnn+eOO+5g9+7d7Ny5k5ycHDZs2MDPfvazqXKBrWn/45tuuonLLruM4uJili9fHvKzv/e97/Hw\nww+TnZ3Nd77zHe66666Q77nQZDF/e/AnIuZS3stzHz9H/dl6vl397Uv2GUopFatEZFFbk0tZqO+t\n9/ycfwOImwBXSikVmgb4pXOpAjwmu9CVUkqpeKcBrpRSSsUgDXCllFIqBmmAK6WUUjFIA1wppZSK\nQRrgSimlVAyKyaVUlVJKzS+73b6oi5IsZXa79WW/Z8PSPHAR2Q78Tzwt9h8YY/424PUbgZcBt/fU\ni8aY73hfawb6gElg3BizJcRn6DxwpZRSceOSzwMXERvwXeB3gcuAXSKyNsil7xpjNnv/fMfv/CRQ\nbYzZFCq8411NTc1i38Ki0vrXLPYtLKp4rn881x20/hfLyjPwLYDLGNNijBkHfgTcHuS6UL9FiMXP\niVvx/o9Y61+z2LewqOK5/vFcd9D6XywrwboSaPM7bveeC3StiBwWkVdFZL3feQO8KSIHROSei7hX\npZRSSnnN1yC2D4EyY8yQiOwAXgKc3tc+bYw5KSKFeIL8uDHml/P0uUoppVRcijiITUSuAR4xxmz3\nHu8GTOBAtoAyTcDvGGO6A85/GzhnjPn7IGV0BJtSSqm4cjGD2Ky0wA8Aq0XEDpwEdgK7/C8QkSJj\nzGnv11vw/GLQLSLpgM0YMyAiGcAtwKPzXQmllFIq3kQMcGPMhIjcB7zBhWlkx0XkXs/L5vvAF0Tk\nq8A4MAz4djwvAn7qbV0nAv9hjHnjUlREKaWUiidRsx+4Ukoppaxb8OldIpIiIr8RkUMi8rH3uTgi\nkisib4jICRF5XUSyF/reFoqI2ETkoIjs8x7HTd3Bs7iPiHzk/Tew33suLr4HIpItIj8WkeMickxE\nro6juju9P/OD3r/7ROT+eKk/gIh8Q0SOisgREfkPEUmOl/qLyAPe//M/FpH7veeWdN1F5AciclpE\njvidC1lnEdkjIi7v/w+3RHr/BQ9wY8wosM0Yswm4EtjhfW6+G/i5MaYK+AWwZ6HvbQE9ANT6HcdT\n3SH44j7x8j34R+A1Y8w6YCNQR5zU3RhT7/2ZbwZ+BxgEfkqc1F9EVgBfBzYbYzbgeay4iziov4hc\nBvxX4Co8/+/fJiKVLP26/xDPImj+gtbZO/36TmAdsAP4nkRa29YYs2h/gHTgt8Cn8PxHVuQ9XwzU\nLea9XcI6rwLeBKqBfd5zcVF3v+9BE5AfcG7Jfw+ALKAxyPklX/cgdb4FeC+e6g+sAFqAXDzhvQ/4\nTDzUH/gC8C9+xw8B3wSOx0Hd7cARv+OgP29vsH/L77r/BK4O996LskKatwv5EHAKeNMYc8BbodMA\nxphTwPLFuLcF8A94/uH6Dz6Il7r7+C/u8yfec/HwPSgHukTkh95u5O97Z2rEQ90D3QU86/06Lupv\njPkE+B9AK9AB9Bljfk581P8osNXbfZwO3AqUEh91D7Q8RJ0DF03rIPiiaVMWJcCNMZPG04W+Ctji\n7V4JHE235EbXicjngNPGmMOEXnoWlmDdA3zaeLpRbwW+JiJbiYOfP55W12bgn731H8TzW3c81H2K\niCQBvwf82HsqLuovIjl4lqG242mNZ4jIHxAH9TfG1AF/i6f38TXgEDAR7NKFvK8oMec6L+oa5caY\nfqAG2A6cFpEiABEpBs4s4q1dKp8Gfk9E3MBzwE0i8gxwKg7qPsUYc9L7dyeeVfu2EB8//3agzRjz\nW+/xT/AEejzU3d8O4ENjTJf3OF7q/xnAbYzpNsZM4Hn+fx1xUn9jzA+NMVcZY6qBXuAEcVL3AKHq\n3IGnV8JnlfdcSIsxCr3AN+pORNKAz+J5DrIP+CPvZX+IZ3vSJcUY85fGmDJjTAWeBXF+YYz5MvAK\nS7zuPiKSLiLLvF/7Fvf5mPj4+Z8G2kTEt8zwzcAx4qDuAXbh+QXWJ17q3wpcIyKp3sFJN+MZzBoX\n9RfPctqISBnweTyPUOKh7sL0HtdQdd4H7PTOTCgHVgP7w76x92H5ghGRK4C9eH55sAH/1xjzNyKS\nBzyP5zeQFuBOY0zvgt7cAhLPHuoPGmN+L57q7v2H+VM83Ua+xX0ej5fvgYhsBP4VSALcwN1AAnFQ\nd/D8AoenjhXGmHPec3Hxs4ep5aR34ln06hDwJ0AmcVB/EXkXyMNT928YY2qW+s9eRJ7FM2A5HzgN\nfBtPr+OPCVJnEdmDZ7T+OPCAibDwmS7kopRSSsUg3adbKaWUikEa4EoppVQM0gBXSimlYpAGuFJK\nKRWDNMCVUkqpGKQBrpRSSsUgDXCllFIqBmmAK6WUUjHo/wPEe4vrSp5PsAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -2464,47 +2467,11 @@ "outputs": [], "source": [] }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## trying more\n", - "\n", - "The rest below is just some unsorted experiments to try a few more things." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, { "cell_type": "markdown", "metadata": {}, "source": [ - "TODO: The following is not correct because when we change the sample size, then the rejection region also changes." + "Next we try exact power for the already available proportion_ztest" ] }, { @@ -2517,26 +2484,7 @@ { "data": { "text/plain": [ - "[0.047092254594638852,\n", - " 0.047775312714239675,\n", - " 0.051602724514375933,\n", - " 0.05859877650596669,\n", - " 0.068789625299809865,\n", - " 0.082186755965548378,\n", - " 0.098772671226915978,\n", - " 0.118489659792336,\n", - " 0.1412321565494524,\n", - " 0.16684288140347131,\n", - " 0.19511265080608356,\n", - " 0.22578351411938377,\n", - " 0.25855468655666075,\n", - " 0.29309063531712998,\n", - " 0.32903062340607336,\n", - " 0.36599901949754499,\n", - " 0.4036157318615764,\n", - " 0.44150620799796952,\n", - " 0.47931054705529291,\n", - " 0.51669138801043579]" + "(2.4738633753705956, 0.013366080075435313)" ] }, "execution_count": 71, @@ -2545,14 +2493,10 @@ } ], "source": [ - "[power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)]" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "We can also calculate this in vectorized form for the set of sample sizes and all three tests:" + "p0 = 0.5\n", + "pa = 0.6\n", + "diff = pa - p0\n", + "smprop.proportions_ztest(nobs_ * (pa), nobs_, value=p0, alternative='two-sided', prop_var=p0)" ] }, { @@ -2565,26 +2509,17 @@ { "data": { "text/plain": [ - "array([[ 0.04709225, 0.02625388, 0.07020749],\n", - " [ 0.04777531, 0.03102743, 0.07728123],\n", - " [ 0.05160272, 0.03820442, 0.0883212 ],\n", - " [ 0.05859878, 0.04792633, 0.10324071],\n", - " [ 0.06878963, 0.06032282, 0.12191353],\n", - " [ 0.08218676, 0.07549525, 0.14416465],\n", - " [ 0.09877267, 0.09350311, 0.16976553],\n", - " [ 0.11848966, 0.11435385, 0.19843379],\n", - " [ 0.14123216, 0.1379965 , 0.2298369 ],\n", - " [ 0.16684288, 0.16431907, 0.26359926],\n", - " [ 0.19511265, 0.19314968, 0.29931183],\n", - " [ 0.22578351, 0.22426089, 0.33654338],\n", - " [ 0.25855469, 0.25737665, 0.37485255],\n", - " [ 0.29309064, 0.29218144, 0.41379979],\n", - " [ 0.32903062, 0.32833054, 0.45295869],\n", - " [ 0.36599902, 0.36546115, 0.49192593],\n", - " [ 0.40361573, 0.40320337, 0.53032969],\n", - " [ 0.44150621, 0.4411907 , 0.56783618],\n", - " [ 0.47931055, 0.47906961, 0.60415428],\n", - " [ 0.51669139, 0.51650774, 0.63903825]])" + "(0.70824505759861689,\n", + " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12,\n", + " 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25,\n", + " 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38,\n", + " 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51,\n", + " 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64,\n", + " 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101,\n", + " 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114,\n", + " 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127,\n", + " 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140,\n", + " 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153]))" ] }, "execution_count": 72, @@ -2593,32 +2528,10 @@ } ], "source": [ - "power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None])" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count." + "#power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, use_idx=1) #this raises exception\n", + "\n", + "pzt = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=p_null)\n", + "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) #use_idx=False raises exception" ] }, { @@ -2631,7 +2544,7 @@ { "data": { "text/plain": [ - "(-0.39840953644479782, 0.69032832946419354)" + "(0.5, 0.6, 153)" ] }, "execution_count": 73, @@ -2640,7 +2553,7 @@ } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null)" + "p0, pa, nobs_" ] }, { @@ -2653,7 +2566,45 @@ { "data": { "text/plain": [ - "(-0.56343616981901101, 0.57313791338407638)" + "array([[ 6.00000000e+01, 7.63288179e-03, 1.00000000e+00],\n", + " [ 6.10000000e+01, 1.22036066e-02, 1.00000000e+00],\n", + " [ 6.20000000e+01, 1.90520464e-02, 1.00000000e+00],\n", + " [ 6.30000000e+01, 2.90490222e-02, 1.00000000e+00],\n", + " [ 6.40000000e+01, 4.32662880e-02, 1.00000000e+00],\n", + " [ 6.50000000e+01, 6.29648259e-02, 0.00000000e+00],\n", + " [ 6.60000000e+01, 8.95550744e-02, 0.00000000e+00],\n", + " [ 6.70000000e+01, 1.24523921e-01, 0.00000000e+00],\n", + " [ 6.80000000e+01, 1.69327297e-01, 0.00000000e+00],\n", + " [ 6.90000000e+01, 2.25252906e-01, 0.00000000e+00],\n", + " [ 7.00000000e+01, 2.93264235e-01, 0.00000000e+00],\n", + " [ 7.10000000e+01, 3.73843327e-01, 0.00000000e+00],\n", + " [ 7.20000000e+01, 4.66854271e-01, 0.00000000e+00],\n", + " [ 7.30000000e+01, 5.71450573e-01, 0.00000000e+00],\n", + " [ 7.40000000e+01, 6.86046505e-01, 0.00000000e+00],\n", + " [ 7.50000000e+01, 8.08365156e-01, 0.00000000e+00],\n", + " [ 7.60000000e+01, 9.35565055e-01, 0.00000000e+00],\n", + " [ 7.70000000e+01, 9.35565055e-01, 0.00000000e+00],\n", + " [ 7.80000000e+01, 8.08365156e-01, 0.00000000e+00],\n", + " [ 7.90000000e+01, 6.86046505e-01, 0.00000000e+00],\n", + " [ 8.00000000e+01, 5.71450573e-01, 0.00000000e+00],\n", + " [ 8.10000000e+01, 4.66854271e-01, 0.00000000e+00],\n", + " [ 8.20000000e+01, 3.73843327e-01, 0.00000000e+00],\n", + " [ 8.30000000e+01, 2.93264235e-01, 0.00000000e+00],\n", + " [ 8.40000000e+01, 2.25252906e-01, 0.00000000e+00],\n", + " [ 8.50000000e+01, 1.69327297e-01, 0.00000000e+00],\n", + " [ 8.60000000e+01, 1.24523921e-01, 0.00000000e+00],\n", + " [ 8.70000000e+01, 8.95550744e-02, 0.00000000e+00],\n", + " [ 8.80000000e+01, 6.29648259e-02, 0.00000000e+00],\n", + " [ 8.90000000e+01, 4.32662880e-02, 1.00000000e+00],\n", + " [ 9.00000000e+01, 2.90490222e-02, 1.00000000e+00],\n", + " [ 9.10000000e+01, 1.90520464e-02, 1.00000000e+00],\n", + " [ 9.20000000e+01, 1.22036066e-02, 1.00000000e+00],\n", + " [ 9.30000000e+01, 7.63288179e-03, 1.00000000e+00],\n", + " [ 9.40000000e+01, 4.66088119e-03, 1.00000000e+00],\n", + " [ 9.50000000e+01, 2.77817268e-03, 1.00000000e+00],\n", + " [ 9.60000000e+01, 1.61622222e-03, 1.00000000e+00],\n", + " [ 9.70000000e+01, 9.17567722e-04, 1.00000000e+00],\n", + " [ 9.80000000e+01, 5.08299924e-04, 1.00000000e+00]])" ] }, "execution_count": 74, @@ -2662,7 +2613,28 @@ } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null)" + "pv = [smprop.proportions_ztest(x, nobs_, value=p0, alternative='two-sided', prop_var=p0)[1] for x in np.arange(60, 99)]\n", + "pv = np.asarray(pv)\n", + "np.column_stack((np.arange(60, 99), pv, pv <=0.05))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The power using the exact distribution is lower than using the asymptotic normal distribution.\n", + "The rejection region looks correct, so how do we verify that we calculated the power correctly?\n", + "\n", + "\n", + "PASS reports the following values\n", + "\n", + "```\n", + " Exact Z-Test Z-Test Z-Test Z-Test\n", + " Target Test S(P0) S(P0)C S(P) S(P)C\n", + "n P0 P1 Alpha Power Power Power Power Power\n", + "10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958\n", + "50 0.5000 0.6000 0.0500 0.23706 0.33613 0.23706 0.33613 0.23706\n", + "```" ] }, { @@ -2675,7 +2647,10 @@ { "data": { "text/plain": [ - "(-0.79681907288959564, 0.42555611641912894)" + "(0.33613256480043147,\n", + " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", + " 17, 18, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n", + " 47, 48, 49, 50]))" ] }, "execution_count": 75, @@ -2684,9 +2659,26 @@ } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null)" + "p0, pa, nobs_ = 0.5, 0.6, 50\n", + "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "0.33613 is the same as reported by PASS for the exact power of the score test, `S(P0)`. Unfortunately for testing purposes, in this example Wald and score test report identical numbers for n=50." ] }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, { "cell_type": "code", "execution_count": 76, @@ -2697,7 +2689,10 @@ { "data": { "text/plain": [ - "(8.293564511085938e-17, 0.99999999999999989)" + "(0.33613256480043147,\n", + " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", + " 17, 18, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n", + " 47, 48, 49, 50]))" ] }, "execution_count": 76, @@ -2706,7 +2701,15 @@ } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null)" + "pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None)\n", + "power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958" ] }, { @@ -2719,7 +2722,7 @@ { "data": { "text/plain": [ - "(5.864435705996961e-17, 1.0)" + "(0.17958430719999999, array([ 0, 1, 2, 8, 9, 10]))" ] }, "execution_count": 77, @@ -2728,63 +2731,110 @@ } ], "source": [ - "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null)" + "nobs_ = 10\n", + "pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None)\n", + "power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1)" ] }, { - "cell_type": "code", - "execution_count": null, - "metadata": { - "collapsed": true - }, - "outputs": [], - "source": [] + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is the same as the Wald test, while the score test has much lower power in this example. It is only around 0.048 which is the same in PASS and our calculations at the provided print precision." + ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 78, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.048035123200000036, array([ 0, 1, 9, 10]))" + ] + }, + "execution_count": 78, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, "source": [ - "?smprop.proportion_confint()" + "Know we know how to use it, and I added keywords to the `power_binom_proptest` above, we can drop the use of lambda functions." ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 79, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.17958430719999999, array([ 0, 1, 2, 8, 9, 10]))" + ] + }, + "execution_count": 79, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "smprop.proportion_confint()" + "power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=1)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 80, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.048035123200000036, array([ 0, 1, 9, 10]))" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "from statsmodels.stats.proportion import proportion_effectsize\n", - "es = proportion_effectsize(0.4, 0.5)\n", - "smpow.NormalIndPower().solve_power(es, nobs1=60, alpha=0.05, ratio=0)\n", - "# R pwr 0.3447014091272153" + "power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=1)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 81, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(0.17958430720000004, (2, 8))\n", + "(0.048035123199999974, (1, 9))\n" + ] + } + ], "source": [ - "smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9)" + "print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=0))\n", + "print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=0))" ] }, { @@ -2796,6 +2846,20 @@ "outputs": [], "source": [] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Summary\n", + "\n", + "Now, we have almost all the necessary pieces working and verified on a few example. The next step is to clean this up, convert it to usage friendly function or classes and convert the examples to unit tests.\n", + "\n", + "We have now two exact hypothesis tests, `minlike` and `central`, two tests based on asymptotic normality, `wald` and `score`, and we have three ways of calculating the power, using the exact distribution, using the asymptotic normal distribution, and the already existing power calculation based on effect size that does not distinguish that variance is different under the null and under the alternative.\n", + "\n", + "We are still missing some examples, power calculations for confidence intervals and equivalence tests, where some functions are already available in statsmodels.stats.proportions. We still need a function that finds the sample size given the functions for the power. \n", + "Vectorization for different alternatives or number of observations depends on the implementation details and does not work across all cases. " + ] + }, { "cell_type": "code", "execution_count": null, @@ -2805,6 +2869,15 @@ "outputs": [], "source": [] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## trying more\n", + "\n", + "The rest below is just some unsorted experiments to try a few more things." + ] + }, { "cell_type": "code", "execution_count": null, @@ -2814,16 +2887,378 @@ "outputs": [], "source": [] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "TODO: The following is not correct because when we change the sample size, then the rejection region also changes." + ] + }, { "cell_type": "code", - "execution_count": null, + "execution_count": 82, "metadata": { "collapsed": false }, - "outputs": [], - "source": [ - "low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82\n", - "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", + "outputs": [ + { + "data": { + "text/plain": [ + "[0.047092254594638852,\n", + " 0.047775312714239675,\n", + " 0.051602724514375933,\n", + " 0.05859877650596669,\n", + " 0.068789625299809865,\n", + " 0.082186755965548378,\n", + " 0.098772671226915978,\n", + " 0.118489659792336,\n", + " 0.1412321565494524,\n", + " 0.16684288140347131,\n", + " 0.19511265080608356,\n", + " 0.22578351411938377,\n", + " 0.25855468655666075,\n", + " 0.29309063531712998,\n", + " 0.32903062340607336,\n", + " 0.36599901949754499,\n", + " 0.4036157318615764,\n", + " 0.44150620799796952,\n", + " 0.47931054705529291,\n", + " 0.51669138801043579]" + ] + }, + "execution_count": 82, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "[power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)]" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We can also calculate this in vectorized form for the set of sample sizes and all three tests:" + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[ 0.04709225, 0.02625388, 0.07020749],\n", + " [ 0.04777531, 0.03102743, 0.07728123],\n", + " [ 0.05160272, 0.03820442, 0.0883212 ],\n", + " [ 0.05859878, 0.04792633, 0.10324071],\n", + " [ 0.06878963, 0.06032282, 0.12191353],\n", + " [ 0.08218676, 0.07549525, 0.14416465],\n", + " [ 0.09877267, 0.09350311, 0.16976553],\n", + " [ 0.11848966, 0.11435385, 0.19843379],\n", + " [ 0.14123216, 0.1379965 , 0.2298369 ],\n", + " [ 0.16684288, 0.16431907, 0.26359926],\n", + " [ 0.19511265, 0.19314968, 0.29931183],\n", + " [ 0.22578351, 0.22426089, 0.33654338],\n", + " [ 0.25855469, 0.25737665, 0.37485255],\n", + " [ 0.29309064, 0.29218144, 0.41379979],\n", + " [ 0.32903062, 0.32833054, 0.45295869],\n", + " [ 0.36599902, 0.36546115, 0.49192593],\n", + " [ 0.40361573, 0.40320337, 0.53032969],\n", + " [ 0.44150621, 0.4411907 , 0.56783618],\n", + " [ 0.47931055, 0.47906961, 0.60415428],\n", + " [ 0.51669139, 0.51650774, 0.63903825]])" + ] + }, + "execution_count": 83, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count." + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.39840953644479782, 0.69032832946419354)" + ] + }, + "execution_count": 84, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.56343616981901101, 0.57313791338407638)" + ] + }, + "execution_count": 85, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.79681907288959564, 0.42555611641912894)" + ] + }, + "execution_count": 86, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 87, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(8.293564511085938e-17, 0.99999999999999989)" + ] + }, + "execution_count": 87, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": 88, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(5.864435705996961e-17, 1.0)" + ] + }, + "execution_count": 88, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 89, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [ + "#?smprop.proportion_confint()" + ] + }, + { + "cell_type": "code", + "execution_count": 90, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.081984475816844427, 0.38468219084982225)" + ] + }, + "execution_count": 90, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smprop.proportion_confint(count, nobs)" + ] + }, + { + "cell_type": "code", + "execution_count": 91, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "0.34470140912721514" + ] + }, + "execution_count": 91, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "from statsmodels.stats.proportion import proportion_effectsize\n", + "es = proportion_effectsize(0.4, 0.5)\n", + "smpow.NormalIndPower().solve_power(es, nobs1=60, alpha=0.05, ratio=0)\n", + "# R pwr 0.3447014091272153" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "259.154426739506" + ] + }, + "execution_count": 92, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 93, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.94849873047984967,\n", + " ((0.75834970530451862,\n", + " 0.86051080550098236,\n", + " -2.5599758686988578,\n", + " 1.6821766224528543),))" + ] + }, + "execution_count": 93, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82\n", + "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", " variance_prop=None, discrete=True, continuity=0,\n", " critval_continuity=0)\n", " " @@ -2831,11 +3266,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 94, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.95000875677852759,\n", + " ((0.75207687201030093,\n", + " 0.86590768457946699,\n", + " -1.7341121433755891,\n", + " 2.3848884863189261),))" + ] + }, + "execution_count": 94, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm',\n", @@ -2845,11 +3295,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 95, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.95811685170327532,\n", + " ((0.75220160721176865,\n", + " 0.86582602622195959,\n", + " -1.812033626524528,\n", + " 2.4628099694678625),))" + ] + }, + "execution_count": 95, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm',\n", @@ -2859,11 +3324,23 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 96, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.95447058338704227,\n", + " ((158.0, 181.0, 5695.1110612524499, 6528.3652241583422),))" + ] + }, + "execution_count": 96, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom',\n", @@ -2873,11 +3350,26 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 97, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.91140841850002685,\n", + " ((0.76242701448039996,\n", + " 0.85913192581906617,\n", + " -1.4383338187061427,\n", + " 2.2137816599366142),))" + ] + }, + "execution_count": 97, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm',\n", @@ -2905,11 +3397,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 98, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(49.0, 51.0)" + ] + }, + "execution_count": 98, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs = 0.4, 0.6, 100\n", "smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05)" @@ -2917,11 +3420,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 99, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(12.0, 28.0)" + ] + }, + "execution_count": 99, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "value, nobs = 0.4, 50\n", "smprop.binom_test_reject_interval(value, nobs, alpha=0.05)" @@ -2929,22 +3443,44 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 100, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.39832112950330101, 0.6016788704966991)" + ] + }, + "execution_count": 100, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "smprop.proportion_confint(50, 100, method='beta')" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 101, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(78.0, 84.0)" + ] + }, + "execution_count": 101, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs = 0.7, 0.9, 100\n", "smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05)" @@ -2952,11 +3488,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 102, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(0.65848903119285485, ((78.0, 85.0, 1930.0, 2105.0),))" + ] + }, + "execution_count": 102, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8\n", "smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom',\n", @@ -2966,11 +3513,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 103, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "0.61042723749210825" + ] + }, + "execution_count": 103, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8\n", "smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05)" @@ -2978,11 +3536,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 104, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "0.71661671146632" + ] + }, + "execution_count": 104, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8\n", "smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05)" @@ -3017,7 +3586,7 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 105, "metadata": { "collapsed": true }, @@ -3038,20 +3607,31 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 106, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "856.38473506679793" + ] + }, + "execution_count": 106, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "# Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes\n", "n_frac1 = 0.5\n", - "n_frac2 = 1 - frac1\n", + "n_frac2 = 1 - n_frac1\n", "\n", "# if defined by ratio: n2 = ratio * n1\n", "ratio = 1\n", "n_frac1 = 1 / ( 1. + ratio)\n", - "n_frac2 = 1 - frac1\n", + "n_frac2 = 1 - n_frac1\n", "\n", "\n", "# If we use fraction of nobs, then sample_size return nobs is total number of observations\n", @@ -3063,11 +3643,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 107, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "0.89999999999999991" + ] + }, + "execution_count": 107, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "#nobs = 858\n", "power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05)" @@ -3075,11 +3666,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 108, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(1.6448536269514729, 1.2815515655446004)" + ] + }, + "execution_count": 108, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "alpha=0.05; power=0.9\n", "stats.norm.isf(alpha), stats.norm.isf(1 - power)\n" @@ -3087,11 +3689,22 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 109, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "1.2815515655446004" + ] + }, + "execution_count": 109, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "crit_alpha = stats.norm.isf(alpha)\n", "(np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt" @@ -3099,22 +3712,44 @@ }, { "cell_type": "code", - "execution_count": null, + "execution_count": 110, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "0.89999999999999991" + ] + }, + "execution_count": 110, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "stats.norm.cdf(_)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 111, "metadata": { "collapsed": false }, - "outputs": [], + "outputs": [ + { + "data": { + "text/plain": [ + "(array([ 30., 49.]), array([ 51., 70.]))" + ] + }, + "execution_count": 111, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05)" ] diff --git a/notebooks/proportion_one_power.py b/notebooks/proportion_one_power.py index d4f3f83..eb62dba 100644 --- a/notebooks/proportion_one_power.py +++ b/notebooks/proportion_one_power.py @@ -493,7 +493,7 @@ def power_binom_reject(low, upp, prop, nobs): # In[44]: -def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), item=None, use_idx=False): +def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwds=None, item=None, use_idx=False): """calculate power for proportion test by explicit numeration of sample space @@ -501,6 +501,9 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), ite None if return is pvalue, integer for index of pvalue if tuple is returned """ + if kwds is None: + kwds = {} + sample_space = np.arange(nobs + 1) try: # TODO: how do we vectorize, if res were a instance with pvalue attribute, then it would be easier. @@ -512,9 +515,9 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), ite except Exception: # assume test_func is not vectorized if item is None: - res = [test_func(x, nobs, p_null, *args) for x in sample_space] + res = [test_func(x, nobs, p_null, *args, **kwds) for x in sample_space] else: - res = [test_func(x, nobs, p_null, *args)[item] for x in sample_space] + res = [test_func(x, nobs, p_null, *args, **kwds)[item] for x in sample_space] pvalues = np.asarray(res) rej_indicator = (pvalues <= alpha) @@ -809,11 +812,117 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, +# Next we try exact power for the already available proportion_ztest + +# In[71]: + +p0 = 0.5 +pa = 0.6 +diff = pa - p0 +smprop.proportions_ztest(nobs_ * (pa), nobs_, value=p0, alternative='two-sided', prop_var=p0) + + +# In[72]: + +#power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, use_idx=1) #this raises exception + +pzt = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=p_null) +power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) #use_idx=False raises exception + + +# In[73]: + +p0, pa, nobs_ + + +# In[74]: + +pv = [smprop.proportions_ztest(x, nobs_, value=p0, alternative='two-sided', prop_var=p0)[1] for x in np.arange(60, 99)] +pv = np.asarray(pv) +np.column_stack((np.arange(60, 99), pv, pv <=0.05)) + + +# The power using the exact distribution is lower than using the asymptotic normal distribution. +# The rejection region looks correct, so how do we verify that we calculated the power correctly? +# +# +# PASS reports the following values +# +# ``` +# Exact Z-Test Z-Test Z-Test Z-Test +# Target Test S(P0) S(P0)C S(P) S(P)C +# n P0 P1 Alpha Power Power Power Power Power +# 10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958 +# 50 0.5000 0.6000 0.0500 0.23706 0.33613 0.23706 0.33613 0.23706 +# ``` + +# In[75]: + +p0, pa, nobs_ = 0.5, 0.6, 50 +power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) + + +# 0.33613 is the same as reported by PASS for the exact power of the score test, `S(P0)`. Unfortunately for testing purposes, in this example Wald and score test report identical numbers for n=50. + # In[ ]: +# In[76]: + +pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None) +power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1) + + +# 10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958 + +# In[77]: + +nobs_ = 10 +pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None) +power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1) + + +# This is the same as the Wald test, while the score test has much lower power in this example. It is only around 0.048 which is the same in PASS and our calculations at the provided print precision. + +# In[78]: + +power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) + + +# Know we know how to use it, and I added keywords to the `power_binom_proptest` above, we can drop the use of lambda functions. + +# In[79]: + +power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=1) + + +# In[80]: + +power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=1) + + +# In[81]: + +print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=0)) +print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=0)) + + +# In[ ]: + + + + +# ## Summary +# +# Now, we have almost all the necessary pieces working and verified on a few example. The next step is to clean this up, convert it to usage friendly function or classes and convert the examples to unit tests. +# +# We have now two exact hypothesis tests, `minlike` and `central`, two tests based on asymptotic normality, `wald` and `score`, and we have three ways of calculating the power, using the exact distribution, using the asymptotic normal distribution, and the already existing power calculation based on effect size that does not distinguish that variance is different under the null and under the alternative. +# +# We are still missing some examples, power calculations for confidence intervals and equivalence tests, where some functions are already available in statsmodels.stats.proportions. We still need a function that finds the sample size given the functions for the power. +# Vectorization for different alternatives or number of observations depends on the implementation details and does not work across all cases. + # In[ ]: @@ -830,14 +939,14 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # TODO: The following is not correct because when we change the sample size, then the rejection region also changes. -# In[71]: +# In[82]: [power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)] # We can also calculate this in vectorized form for the set of sample sizes and all three tests: -# In[72]: +# In[83]: power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None]) @@ -854,27 +963,27 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # ## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count. -# In[73]: +# In[84]: smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null) -# In[74]: +# In[85]: smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null) -# In[75]: +# In[86]: smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null) -# In[76]: +# In[87]: smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null) -# In[77]: +# In[88]: smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null) @@ -884,17 +993,17 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[ ]: +# In[89]: -get_ipython().magic('pinfo smprop.proportion_confint') +#?smprop.proportion_confint() -# In[ ]: +# In[90]: -smprop.proportion_confint() +smprop.proportion_confint(count, nobs) -# In[ ]: +# In[91]: from statsmodels.stats.proportion import proportion_effectsize es = proportion_effectsize(0.4, 0.5) @@ -902,7 +1011,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # R pwr 0.3447014091272153 -# In[ ]: +# In[92]: smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9) @@ -922,7 +1031,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[ ]: +# In[93]: low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', @@ -931,7 +1040,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[ ]: +# In[94]: low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', @@ -939,7 +1048,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[ ]: +# In[95]: low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', @@ -947,7 +1056,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[ ]: +# In[96]: low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', @@ -955,7 +1064,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[ ]: +# In[97]: low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', @@ -973,30 +1082,30 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[ ]: +# In[98]: low, upp, nobs = 0.4, 0.6, 100 smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) -# In[ ]: +# In[99]: value, nobs = 0.4, 50 smprop.binom_test_reject_interval(value, nobs, alpha=0.05) -# In[ ]: +# In[100]: smprop.proportion_confint(50, 100, method='beta') -# In[ ]: +# In[101]: low, upp, nobs = 0.7, 0.9, 100 smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) -# In[ ]: +# In[102]: low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', @@ -1004,13 +1113,13 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[ ]: +# In[103]: low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) -# In[ ]: +# In[104]: low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8 smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) @@ -1031,7 +1140,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[ ]: +# In[105]: # from Lachine 1981 equ (3) and (4) @@ -1046,16 +1155,16 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): return stats.norm.cdf(crit_pow) -# In[ ]: +# In[106]: # Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes n_frac1 = 0.5 -n_frac2 = 1 - frac1 +n_frac2 = 1 - n_frac1 # if defined by ratio: n2 = ratio * n1 ratio = 1 n_frac1 = 1 / ( 1. + ratio) -n_frac2 = 1 - frac1 +n_frac2 = 1 - n_frac1 # If we use fraction of nobs, then sample_size return nobs is total number of observations @@ -1065,30 +1174,30 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): nobs -# In[ ]: +# In[107]: #nobs = 858 power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05) -# In[ ]: +# In[108]: alpha=0.05; power=0.9 stats.norm.isf(alpha), stats.norm.isf(1 - power) -# In[ ]: +# In[109]: crit_alpha = stats.norm.isf(alpha) (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt -# In[ ]: +# In[110]: stats.norm.cdf(_) -# In[ ]: +# In[111]: smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05) From 7fb0160bb7f8fd5eef00e276c61ec7fbe2e20238 Mon Sep 17 00:00:00 2001 From: Josef Date: Sat, 26 Dec 2015 23:45:19 -0500 Subject: [PATCH 4/4] ENH: proportion notebook, add GLM, cornercase, sensitivity-experiment --- notebooks/proportion_one_power.ipynb | 1132 ++++++++++++++++++++------ notebooks/proportion_one_power.py | 378 ++++++--- 2 files changed, 1144 insertions(+), 366 deletions(-) diff --git a/notebooks/proportion_one_power.ipynb b/notebooks/proportion_one_power.ipynb index 58044b9..30c660b 100644 --- a/notebooks/proportion_one_power.ipynb +++ b/notebooks/proportion_one_power.ipynb @@ -171,7 +171,7 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "**Two sided hypothesis**" + "We can check some corner case behavior to see if the function handles those correctly. It does not yet do so. beta/exact confidence interval contains a NaN if the count is all of the same kind, normal and agresti_coull return proportions that are negative or larger than one. (I opened https://github.com/statsmodels/statsmodels/issues/2742 )" ] }, { @@ -183,8 +183,96 @@ "outputs": [ { "data": { + "text/html": [ + "
\n", + "\n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + " \n", + "
0 lower0 upper1 lower1 uppern-1 lowern-1 uppern lowern upper
betaNaN0.1157030.0008440.1721690.8278310.9991560.884297NaN
wilson0.0000000.1135130.0059090.1667040.8332960.9940910.8864871.000000
normal0.0000000.000000-0.0309010.0975670.9024331.0309011.0000001.000000
agresti_coull-0.0211980.134712-0.0083050.1809180.8190821.0083050.8652881.021198
jeffrey0.0000160.0796780.0036200.1454180.8545820.9963800.9203220.999984
\n", + "
" + ], "text/plain": [ - "0.55100632188415744" + " 0 lower 0 upper 1 lower 1 upper n-1 lower n-1 upper \\\n", + "beta NaN 0.115703 0.000844 0.172169 0.827831 0.999156 \n", + "wilson 0.000000 0.113513 0.005909 0.166704 0.833296 0.994091 \n", + "normal 0.000000 0.000000 -0.030901 0.097567 0.902433 1.030901 \n", + "agresti_coull -0.021198 0.134712 -0.008305 0.180918 0.819082 1.008305 \n", + "jeffrey 0.000016 0.079678 0.003620 0.145418 0.854582 0.996380 \n", + "\n", + " n lower n upper \n", + "beta 0.884297 NaN \n", + "wilson 0.886487 1.000000 \n", + "normal 1.000000 1.000000 \n", + "agresti_coull 0.865288 1.021198 \n", + "jeffrey 0.920322 0.999984 " ] }, "execution_count": 5, @@ -193,7 +281,23 @@ } ], "source": [ - "smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided')" + "count_ = 0\n", + "confints0 = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods]\n", + "count_ = 1\n", + "confints1 = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods]\n", + "count_ = nobs - 1\n", + "confintsnm1 = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods]\n", + "count_ = nobs\n", + "confintsn = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods]\n", + "pd.DataFrame(np.column_stack((confints0, confints1, confintsnm1, confintsn)), index=confint_methods, \n", + " columns=['0 lower', '0 upper', '1 lower', '1 upper', 'n-1 lower', 'n-1 upper', 'n lower', 'n upper'])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Two sided hypothesis**" ] }, { @@ -206,7 +310,7 @@ { "data": { "text/plain": [ - "(-0.86333169460343107, 0.38795512282614564)" + "0.55100632188415744" ] }, "execution_count": 6, @@ -215,7 +319,7 @@ } ], "source": [ - "smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided')" + "smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided')" ] }, { @@ -228,7 +332,7 @@ { "data": { "text/plain": [ - "(-0.79681907288959564, 0.42555611641912894)" + "(-0.86333169460343107, 0.38795512282614564)" ] }, "execution_count": 7, @@ -236,10 +340,210 @@ "output_type": "execute_result" } ], + "source": [ + "smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided')" + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(-0.79681907288959564, 0.42555611641912894)" + ] + }, + "execution_count": 8, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ "smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided', prop_var=p_null)" ] }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "**Aside: Corner case for tests**\n", + "\n", + "Many normal distribution based hypothesis tests have problems with observations where the count is zero. Various solutions have been proposed, one of them is to add 0.5 to all zero observations. PASS also adds a small number like 0.001 for the power calculations in this case. There is currently no option for this in my functions." + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "x = 0\n", + "0.402459950796\n", + "(-inf, 0.0)\n", + "(-1.2565617248750864, 0.20891238174069848)\n", + "\n", + "x = 1\n", + "0.402459950796\n", + "(-6.4900077298790499, 8.5831970263090806e-11)\n", + "(-1.214676334045917, 0.22448956218811145)\n" + ] + } + ], + "source": [ + "print('x = 0')\n", + "count_ = 0\n", + "p_null_ = 0.05\n", + "print(smprop.binom_test(count_, nobs, prop=p_null_, alternative='two-sided'))\n", + "print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided'))\n", + "print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided', prop_var=p_null_))\n", + "print('\\nx = 1')\n", + "count_ = 0.05\n", + "p_null_ = 0.05\n", + "print(smprop.binom_test(count_, nobs, prop=p_null_, alternative='two-sided'))\n", + "print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided'))\n", + "print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided', prop_var=p_null_))" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Generalized Linear Model Regression Results \n", + "==============================================================================\n", + "Dep. Variable: ['y1', 'y2'] No. Observations: 1\n", + "Model: GLM Df Residuals: 0\n", + "Model Family: Binomial Df Model: 0\n", + "Link Function: identity Scale: 1.0\n", + "Method: IRLS Log-Likelihood: -1.7718\n", + "Date: Sat, 26 Dec 2015 Deviance: 0.0000\n", + "Time: 23:42:34 Pearson chi2: 0.00\n", + "No. Iterations: 4 \n", + "==============================================================================\n", + " coef std err z P>|z| [0.025 0.975]\n", + "------------------------------------------------------------------------------\n", + "const 0.2333 0.077 3.022 0.003 0.082 0.385\n", + "==============================================================================\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "m:\\josef_new\\eclipse_ws\\statsmodels\\statsmodels_py34_pr\\statsmodels\\genmod\\generalized_linear_model.py:205: UserWarning: The identity link function does not respect the domain of the Binomial family.\n", + " (family.link.__class__.__name__, family.__class__.__name__))\n" + ] + } + ], + "source": [ + "import statsmodels.api as sm\n", + "\n", + "res = sm.GLM([[7, 30 - 7]], [[1]], family=sm.genmod.families.Binomial(link=sm.genmod.families.links.identity)).fit()\n", + "print(res.summary())" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Test for Constraints \n", + "==============================================================================\n", + " coef std err z P>|z| [0.025 0.975]\n", + "------------------------------------------------------------------------------\n", + "c0 0.2333 0.077 -0.863 0.388 0.082 0.385\n", + "==============================================================================\n" + ] + }, + { + "data": { + "text/plain": [ + "('HO: const = 0.300000',\n", + " array(0.38795512282614564),\n", + " array([[ 0.08198448, 0.38468219]]))" + ] + }, + "execution_count": 11, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "tt = res.t_test('const - %f' % p_null)\n", + "print(tt)\n", + "'HO: const = %f' % p_null, tt.pvalue, tt.conf_int()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The pvalue is exactly the same as the Wald test version of `proportions_ztest`. The confidence interval is identical to `proportion_confint` with method `\"normal\"`." + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " Generalized Linear Model Regression Results \n", + "==============================================================================\n", + "Dep. Variable: ['y1', 'y2'] No. Observations: 1\n", + "Model: GLM Df Residuals: 0\n", + "Model Family: Binomial Df Model: 0\n", + "Link Function: identity Scale: 1.0\n", + "Method: IRLS Log-Likelihood: -1.7718\n", + "Date: Sat, 26 Dec 2015 Deviance: 0.0000\n", + "Time: 23:42:34 Pearson chi2: 0.00\n", + "No. Iterations: 4 \n", + "==============================================================================\n", + " coef std err z P>|z| [0.025 0.975]\n", + "------------------------------------------------------------------------------\n", + "const -0.0667 0.077 -0.863 0.388 -0.218 0.085\n", + "==============================================================================\n" + ] + }, + { + "name": "stderr", + "output_type": "stream", + "text": [ + "m:\\josef_new\\eclipse_ws\\statsmodels\\statsmodels_py34_pr\\statsmodels\\genmod\\generalized_linear_model.py:205: UserWarning: The identity link function does not respect the domain of the Binomial family.\n", + " (family.link.__class__.__name__, family.__class__.__name__))\n" + ] + } + ], + "source": [ + "res = sm.GLM([[7, 30 - 7]], [[1]], family=sm.genmod.families.Binomial(link=sm.genmod.families.links.identity), offset=[[p_null]]).fit()\n", + "print(res.summary())" + ] + }, { "cell_type": "markdown", "metadata": {}, @@ -249,7 +553,7 @@ }, { "cell_type": "code", - "execution_count": 8, + "execution_count": 13, "metadata": { "collapsed": false }, @@ -260,7 +564,7 @@ "(0.025000000000000019, 0.025000000000000008, 0.025000000000000019)" ] }, - "execution_count": 8, + "execution_count": 13, "metadata": {}, "output_type": "execute_result" } @@ -272,7 +576,7 @@ }, { "cell_type": "code", - "execution_count": 9, + "execution_count": 14, "metadata": { "collapsed": false }, @@ -293,7 +597,7 @@ }, { "cell_type": "code", - "execution_count": 10, + "execution_count": 15, "metadata": { "collapsed": false }, @@ -306,7 +610,7 @@ " (-2.2785472457940248, 0.011346996472929981))" ] }, - "execution_count": 10, + "execution_count": 15, "metadata": {}, "output_type": "execute_result" } @@ -317,7 +621,7 @@ }, { "cell_type": "code", - "execution_count": 11, + "execution_count": 16, "metadata": { "collapsed": false }, @@ -330,7 +634,7 @@ " (-2.4540616477331247, 0.0070626381724014512))" ] }, - "execution_count": 11, + "execution_count": 16, "metadata": {}, "output_type": "execute_result" } @@ -356,7 +660,7 @@ }, { "cell_type": "code", - "execution_count": 12, + "execution_count": 17, "metadata": { "collapsed": false }, @@ -394,7 +698,7 @@ }, { "cell_type": "code", - "execution_count": 13, + "execution_count": 18, "metadata": { "collapsed": false }, @@ -427,7 +731,7 @@ }, { "cell_type": "code", - "execution_count": 14, + "execution_count": 19, "metadata": { "collapsed": false }, @@ -454,7 +758,7 @@ }, { "cell_type": "code", - "execution_count": 15, + "execution_count": 20, "metadata": { "collapsed": false }, @@ -536,7 +840,7 @@ }, { "cell_type": "code", - "execution_count": 16, + "execution_count": 21, "metadata": { "collapsed": false }, @@ -547,7 +851,7 @@ "(-0.84882088476305617, 0.40293351466675675, 29.0)" ] }, - "execution_count": 16, + "execution_count": 21, "metadata": {}, "output_type": "execute_result" } @@ -561,7 +865,7 @@ }, { "cell_type": "code", - "execution_count": 17, + "execution_count": 22, "metadata": { "collapsed": false }, @@ -590,7 +894,7 @@ " 1., 1., 1., 1.])}" ] }, - "execution_count": 17, + "execution_count": 22, "metadata": {}, "output_type": "execute_result" } @@ -601,7 +905,7 @@ }, { "cell_type": "code", - "execution_count": 18, + "execution_count": 23, "metadata": { "collapsed": false }, @@ -612,7 +916,7 @@ "(-0.84882088476305617, 0.79853324266662162, 29.0)" ] }, - "execution_count": 18, + "execution_count": 23, "metadata": {}, "output_type": "execute_result" } @@ -623,7 +927,7 @@ }, { "cell_type": "code", - "execution_count": 19, + "execution_count": 24, "metadata": { "collapsed": false }, @@ -634,7 +938,7 @@ "(-0.84882088476305617, 0.20146675733337838, 29.0)" ] }, - "execution_count": 19, + "execution_count": 24, "metadata": {}, "output_type": "execute_result" } @@ -652,7 +956,7 @@ }, { "cell_type": "code", - "execution_count": 20, + "execution_count": 25, "metadata": { "collapsed": false }, @@ -665,7 +969,7 @@ " (-2.4128139764969032, 0.011189332556866095, 29.0))" ] }, - "execution_count": 20, + "execution_count": 25, "metadata": {}, "output_type": "execute_result" } @@ -683,7 +987,7 @@ }, { "cell_type": "code", - "execution_count": 21, + "execution_count": 26, "metadata": { "collapsed": false }, @@ -694,7 +998,7 @@ "(-0.84882088476305606, 0.20146675733337843, 29.0)" ] }, - "execution_count": 21, + "execution_count": 26, "metadata": {}, "output_type": "execute_result" } @@ -706,7 +1010,7 @@ }, { "cell_type": "code", - "execution_count": 22, + "execution_count": 27, "metadata": { "collapsed": false }, @@ -719,7 +1023,7 @@ " (-2.4128139764969028, 0.011189332556866103, 29.0))" ] }, - "execution_count": 22, + "execution_count": 27, "metadata": {}, "output_type": "execute_result" } @@ -768,7 +1072,7 @@ }, { "cell_type": "code", - "execution_count": 23, + "execution_count": 28, "metadata": { "collapsed": false }, @@ -783,7 +1087,7 @@ }, { "cell_type": "code", - "execution_count": 24, + "execution_count": 29, "metadata": { "collapsed": false }, @@ -824,7 +1128,7 @@ " [ 3.00000000e+01, 2.05891132e-16, 1.00000000e+00]])" ] }, - "execution_count": 24, + "execution_count": 29, "metadata": {}, "output_type": "execute_result" } @@ -853,7 +1157,7 @@ }, { "cell_type": "code", - "execution_count": 25, + "execution_count": 30, "metadata": { "collapsed": true }, @@ -866,7 +1170,7 @@ }, { "cell_type": "code", - "execution_count": 26, + "execution_count": 31, "metadata": { "collapsed": false }, @@ -901,7 +1205,7 @@ }, { "cell_type": "code", - "execution_count": 27, + "execution_count": 32, "metadata": { "collapsed": false }, @@ -912,7 +1216,7 @@ "0.047092254594638484" ] }, - "execution_count": 27, + "execution_count": 32, "metadata": {}, "output_type": "execute_result" } @@ -931,7 +1235,7 @@ }, { "cell_type": "code", - "execution_count": 28, + "execution_count": 33, "metadata": { "collapsed": false }, @@ -942,7 +1246,7 @@ "0.07020749078421995" ] }, - "execution_count": 28, + "execution_count": 33, "metadata": {}, "output_type": "execute_result" } @@ -962,7 +1266,7 @@ }, { "cell_type": "code", - "execution_count": 29, + "execution_count": 34, "metadata": { "collapsed": false }, @@ -973,7 +1277,7 @@ "(4.0, 14.0)" ] }, - "execution_count": 29, + "execution_count": 34, "metadata": {}, "output_type": "execute_result" } @@ -1001,7 +1305,7 @@ }, { "cell_type": "code", - "execution_count": 30, + "execution_count": 35, "metadata": { "collapsed": true }, @@ -1012,7 +1316,7 @@ }, { "cell_type": "code", - "execution_count": 31, + "execution_count": 36, "metadata": { "collapsed": false }, @@ -1023,7 +1327,7 @@ "(4.0, 14.0)" ] }, - "execution_count": 31, + "execution_count": 36, "metadata": {}, "output_type": "execute_result" } @@ -1041,7 +1345,7 @@ }, { "cell_type": "code", - "execution_count": 32, + "execution_count": 37, "metadata": { "collapsed": false }, @@ -1052,7 +1356,7 @@ "(0.030154943102089313, 0.040052547682131269)" ] }, - "execution_count": 32, + "execution_count": 37, "metadata": {}, "output_type": "execute_result" } @@ -1070,7 +1374,7 @@ }, { "cell_type": "code", - "execution_count": 33, + "execution_count": 38, "metadata": { "collapsed": false }, @@ -1081,7 +1385,7 @@ "(0.0093165673111771617, 0.016937311492549543, 0.026253878803726705)" ] }, - "execution_count": 33, + "execution_count": 38, "metadata": {}, "output_type": "execute_result" } @@ -1101,7 +1405,7 @@ }, { "cell_type": "code", - "execution_count": 34, + "execution_count": 39, "metadata": { "collapsed": false }, @@ -1112,7 +1416,7 @@ "(0.030154943102089313, 0.016937311492549543, 0.047092254594638852)" ] }, - "execution_count": 34, + "execution_count": 39, "metadata": {}, "output_type": "execute_result" } @@ -1125,7 +1429,7 @@ }, { "cell_type": "code", - "execution_count": 35, + "execution_count": 40, "metadata": { "collapsed": false }, @@ -1136,7 +1440,7 @@ "(0.0093165673111771617, 0.040052547682131269, 0.049369114993308427)" ] }, - "execution_count": 35, + "execution_count": 40, "metadata": {}, "output_type": "execute_result" } @@ -1149,7 +1453,7 @@ }, { "cell_type": "code", - "execution_count": 36, + "execution_count": 41, "metadata": { "collapsed": false }, @@ -1160,7 +1464,7 @@ "0.95290774540536105" ] }, - "execution_count": 36, + "execution_count": 41, "metadata": {}, "output_type": "execute_result" } @@ -1179,7 +1483,7 @@ }, { "cell_type": "code", - "execution_count": 37, + "execution_count": 42, "metadata": { "collapsed": false }, @@ -1190,7 +1494,7 @@ "(0.0093165673111771617, 0.030154943102089313)" ] }, - "execution_count": 37, + "execution_count": 42, "metadata": {}, "output_type": "execute_result" } @@ -1202,7 +1506,7 @@ }, { "cell_type": "code", - "execution_count": 38, + "execution_count": 43, "metadata": { "collapsed": false }, @@ -1213,7 +1517,7 @@ "0.047092254594638852" ] }, - "execution_count": 38, + "execution_count": 43, "metadata": {}, "output_type": "execute_result" } @@ -1236,7 +1540,7 @@ }, { "cell_type": "code", - "execution_count": 39, + "execution_count": 44, "metadata": { "collapsed": true }, @@ -1251,7 +1555,7 @@ }, { "cell_type": "code", - "execution_count": 40, + "execution_count": 45, "metadata": { "collapsed": false }, @@ -1262,7 +1566,7 @@ "(0.018633134622354323, 0.060309886204178625)" ] }, - "execution_count": 40, + "execution_count": 45, "metadata": {}, "output_type": "execute_result" } @@ -1273,7 +1577,7 @@ }, { "cell_type": "code", - "execution_count": 41, + "execution_count": 46, "metadata": { "collapsed": false }, @@ -1284,7 +1588,7 @@ "(0.16894012072030201, 0.080105095364262538)" ] }, - "execution_count": 41, + "execution_count": 46, "metadata": {}, "output_type": "execute_result" } @@ -1329,7 +1633,7 @@ }, { "cell_type": "code", - "execution_count": 42, + "execution_count": 47, "metadata": { "collapsed": false }, @@ -1374,7 +1678,7 @@ }, { "cell_type": "code", - "execution_count": 43, + "execution_count": 48, "metadata": { "collapsed": false }, @@ -1405,7 +1709,7 @@ }, { "cell_type": "code", - "execution_count": 44, + "execution_count": 49, "metadata": { "collapsed": true }, @@ -1468,7 +1772,7 @@ }, { "cell_type": "code", - "execution_count": 45, + "execution_count": 50, "metadata": { "collapsed": false }, @@ -1500,7 +1804,7 @@ }, { "cell_type": "code", - "execution_count": 46, + "execution_count": 51, "metadata": { "collapsed": false }, @@ -1530,7 +1834,7 @@ }, { "cell_type": "code", - "execution_count": 47, + "execution_count": 52, "metadata": { "collapsed": false }, @@ -1553,7 +1857,7 @@ }, { "cell_type": "code", - "execution_count": 48, + "execution_count": 53, "metadata": { "collapsed": false }, @@ -1583,7 +1887,7 @@ }, { "cell_type": "code", - "execution_count": 49, + "execution_count": 54, "metadata": { "collapsed": false }, @@ -1608,7 +1912,7 @@ " [ 0.8 , 0.99994761, 0.99994761]])" ] }, - "execution_count": 49, + "execution_count": 54, "metadata": {}, "output_type": "execute_result" } @@ -1630,7 +1934,7 @@ }, { "cell_type": "code", - "execution_count": 50, + "execution_count": 55, "metadata": { "collapsed": false }, @@ -1655,7 +1959,7 @@ " (0.99994761271288934, (4, 15))]" ] }, - "execution_count": 50, + "execution_count": 55, "metadata": {}, "output_type": "execute_result" } @@ -1673,7 +1977,7 @@ }, { "cell_type": "code", - "execution_count": 51, + "execution_count": 56, "metadata": { "collapsed": false }, @@ -1681,10 +1985,10 @@ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 51, + "execution_count": 56, "metadata": {}, "output_type": "execute_result" }, @@ -1692,7 +1996,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAegAAAFwCAYAAABzZegiAAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzt3Xd4VFXixvHvSSUBAgEpAoIRRAQFRZqydKU3KQIqIAqi\ngL3iWlgLltWfDUEsIKICikhRqkIEEQxLU6RLB0Gk94TM+f0xwc0iZZK5kzvl/TxPHpNwc+a9huTl\nnnOLsdYiIiIiwSXK7QAiIiLydypoERGRIKSCFhERCUIqaBERkSCkghYREQlCKmgREZEgdN6CNsZ8\naIzZZYz5+RzbvGWMWWeMWWaMucrZiCIiIpHHlyPokUCzs/2hMaYFUN5aeynQF3jXoWwiIiIR67wF\nba39Adh3jk3aAR9nbfsTUMgYU8KZeCIiIpHJiTXo0sDWbB9vz/qciIiI5JJOEhMREQlCMQ6MsR24\nKNvHZbI+9zfGGN34W0REIo611uT0a3wtaJP1diaTgf7AOGNMHWC/tXbX2QYK54dzDBo0iEGDBrkd\nI2C0f6ErnPcNwnf/jh7P4P0ZP/LB0H9Tql5d9h3bx8H0fRzO3MdRzz6Om31kRO/jZOw+iDuESU8i\nOiOZ2Mxk8tlkEqOSKRiTTFJcMsn5kimaP5niBZMpUSiZUsnJlC1WhHLFkyl9QRJxsdGu7We4fv9O\nMSbH3Qz4UNDGmM+AhkBRY8wW4BkgDrDW2vestVONMS2NMeuBI0CvXCUREREWrdnG29Om8e3mafye\nbzaJxyuQcNiSeHw/RRKKcOkF5YOyZMV55y1oa+3NPmwzwJk4IiKR5fCxdIZP+4Gxi6fxy7HppMf9\nTtmMprS99EbuafkuVS4uHvZHmHJmTqxBS5aGDRu6HSGgtH+hK5z3DUJv/xas3MKQGdOYvWUaOxPm\nkP9YJWoUasGQeu/To0nNvx0Jh9r+5VS4719umbxcEzbG2HBegxYROZODR07w7rR5fL5kGr8cn0ZG\n3G7KZTSjxaUtuKdlUy4vW8ztiBJAxphcnSSmghYRCYAfVmzinRnTmLNtGrsSvqfAscrULNyCnte1\n4JbG1xATratcI4UKWkTERfsPH2fY1Ll8sXQav6ZPIyNmHymZzWl5aXPubdWUS8sUdTuiuEQFLSKS\nx2Yv+41hs6Yxd8d0/kiYS8GjV1KrSAtuq9uCrg2v1lGyACpoEZE8MWvxOh4ZP4SV6dPIjD7EJZ7m\ntLysOfe2uoHypYq4HU+CUG4LWmdxi4j46OlPvub5X27nH/nuYlSzcXSuV01HyRIwKmgRkfM4memh\n+QuDmXPoXd5tPIk7W1zrdiSJACpoEZFz2LHnEDVfuI2DdgeL+qVR/dJSbkeSCKG5GRGRs/hu6Xou\neeFaCkYXYfvzqSpnyVMqaBGRM3h+7HRuGFuXDmUGsPLl90jKH+92JIkwmuIWEcnG47G0evEVZh54\nk7fqjWdAm3puR5IIpYIWEcnyx74j1HjudvbYDSy8K42al5VxO5JEME1xi4gAqcs3UO7Z64iLSmD7\nc/NUzuI6FbSIRLxXxn9Lk8+uo1Wp3qx9ZSSFC+RzO5KIprhFJHJ5PJZ2L/8fU/e9ymvXjeX+9g3d\njiTyFxW0iESkPw8cpcazffjDs4q5vRdSt0o5tyOJ/A9NcYtIxJn/62bKDvoHBsOWQT+onCUoqaBF\nJKK8MTGV+qPqcEOJW/nt36O5oFCi25FEzkhT3CISETweS+dX32binsG8WOsTHu10vduRRM5JBS0i\nYW//4ePUGHQX2zOXMqfXAupXTXE7ksh5qaBFJKwtWrONRu92oIhJYfPTP1I8Ob/bkUR8ojVoEQlb\nQ7/+gTof1qLeBR3Z9OpYlbOEFB1Bi0jY8Xgst7z+LuP+GMSz1UfxZNfmbkcSyTEVtIiElYNHTlBr\n0AA2ZS5gVvf5NLm6gtuRRHJFBS0iYWPJuh00GNqRJFOKDf9cQKmiBd2OJJJrWoMWkbDw3rQF1Hyv\nFnWKtGbzv79QOUvI0xG0iIS8nm9+wOgdT/BktRE8e2trt+OIOEIFLSIhrddbIxiz5RW+6TqPFjUv\nczuOiGOMtTbvXswYm5evJyLhbcXGXVR990o+b/0tnepVdTuOyBkZY7DWmhx/nQpaREJVykO3Uixf\nKdJeeMXtKCJnlduC1hS3iISkV8Z/y1bzAz89/KvbUUQCQmdxi0jI2X/4OE8uuJsnrhqiu4NJ2NIU\nt4iEnPrPPM2GQyvZ9n/j3Y4icl6a4haRiDA1bTU/nBhG2l3L3I4iElCa4haRkOHxWG7+7C46FH2a\nGhVLux1HJKBU0CISMu4cOooMc4TP7u/ndhSRgNMUt4iEhDVb/2Tk1sf5uM1U4mKj3Y4jEnA6SUxE\nQsKlD/eiYGxhlrz4uttRRHJEJ4mJSNh6Y2IqG813bNE1zxJBtAYtIkHt4JETPDbvLh654i09oUoi\niqa4RSSoNXn2OVbu+w+/vz7J7SgiuaIpbhEJO7MWr2PO0TeZ32eJ21FE8pymuEUkKHk8li6j76ZN\n4Se4tnJZt+OI5DkVtIgEpQHvfcZxs4dxD97rdhQRV2iKW0SCzm879jJ8w8N80GIS+eL0a0oik04S\nE5Ggc/mjdxIbFcfPLw1xO4qI33SSmIiEhWHfzGct37DxwZVuRxFxldagRSRoHD6WzgPf9eW+y96g\nbPFCbscRcZUKWkSCRqfX/o8kW5ZXe3VyO4qI6zTFLSJBIXX5BmYefpXUXouIisrxcp1I2NERtIi4\nzuOxdP6oP80KPkL9qiluxxEJCipoEXHdQyO+4JDZxpcPPeh2FJGgoSluEXHV5l37eWvtAwy9/gsS\n88W6HUckaOg6aBFx1ZWP9eekPcmqV4a7HUUkIELmOmiPB6I0sS4iwIczfmKlncD6B3TNs8jp8rwq\nl+ihNCICHE8/yYDpfbnrktdIuTDZ7TgiQSfPC3rGjLx+RREJRp1fe5NEW4y37+zmdhSRoORTQRtj\nmhtjVhtj1hpjHjvDnycZYyYbY5YZY34xxtx2trFU0CIy/9fNfHPgRT7vOUzXPIucxXlPEjPGRAFr\ngSbADmAR0NVauzrbNgOBJGvtQGPMBcAaoIS19uRpY9kCBSzbt0NSksN7IiIhweOxlHqoHVck1+Lb\np590O45IwOX2JDFfjqBrAeustZuttRnAWKDdadtYoGDW+wWBPaeX8ynXXguzZ+c0poiEi4EfT2Sf\nWceEhx5xO4pIUPOloEsDW7N9vC3rc9kNASobY3YAy4H7zjZYs2aa5haJVDv2HOK1X+/l1YbDScof\n73YckaDm1ElizYCl1tpSwNXAO8aYAmfcMKugdTm0SORp9epTXMIN3NO2vttRRIKeL9dBbwfKZvu4\nTNbnsusFvAhgrf3NGLMRqAT85/TBvvhiEH/+CffeCx07NqRhw4a5Ci4ioeWT7xaz3DOGVff+6nYU\nkYBKTU0lNTXV73F8OUksGu9JX02A34E0oJu1dlW2bd4B/rDW/ssYUwJvMVez1u49bSxrreX22+Hq\nq+Gee/zOLyIhID0jk+RHa3NzhXt4v39Pt+OI5KmAnSRmrc0EBgAzgV+BsdbaVcaYvsaYO7M2ex64\nzhjzMzALePT0cs5O69AikaXb6+8QZwsy/O4ebkcRCRmu3It7zx5ISYHduyFe54mIhLVFa7ZRe+RV\nfN3hB1rWquR2HJE8F8jLrBxXtChUrgzz57vx6iKSl9oPv496+fqrnEVyyLXHVmiaWyT8PTV6CrvN\nL0x6eKDbUURCjgpaRALij31HePHnATx/7VAKF8jndhyRkOPa86BPnoTixWHlSihZMs8iiEgeqfnP\nR9hzfCcbXhvtdhQRV4XUGjRATAw0bgwzZ7qVQEQC5fO5y1l8chRT7nnN7SgiIcu1ggZNc4uEo/SM\nTG7/qi/dSw2mysXF3Y4jErJcL+hZs8DjcTOFiDhp4MdfAR4+HHC721FEQpqrBV22rPeSqyVL3Ewh\nIk768Jch3FH5IWKiXf31IhLyXP8Jat5c09wi4eLLH37hUNw6Xuzewe0oIiHP9YLWOrRI+Hhy8hAa\n5O9LYr5Yt6OIhDzXLrM65ehRKFECtm+HpKQ8iyIiDtv4+z7Kv3UJy/qsouolunZS5JSQu8zqlMRE\nuPZamD3b7SQi4o/7R31E2fSWKmcRh7he0KBpbpFQdzLTw9Td7/DE9QPcjiISNoKqoPNwtl1EHPTC\nuOnEeQrTu1kdt6OIhI2gKOgqVSA9HdavdzuJiOTGkEVv0638AKKicrzMJiJnERQFbQw0bappbpFQ\nNGvxOvbELebfPbu4HUUkrARFQYPWoUVC1WPjh1I77g6SCya4HUUkrLh+mdUpe/ZASgrs3g3x8XkW\nSUT8sHPvYUq9Uo553ZdQt0o5t+OIBKWQvczqlKJFoXJlmD/f7SQi4qsHP/qEkicaqJxFAiBoCho0\nzS0SSjwey4RtQ3ioni6tEgkEFbSI5Mobk1IBywPtG7kdRSQsBVVB16oFW7bAzp1uJxGR83lt3hBu\nLK1Lq0QCJagKOiYGGjeGmTPdTiIi57Jg5RZ+j5/D6726ux1FJGwFVUGDprlFQsFDY96lGt0pWaSA\n21FEwlZQFvSsWeDxuJ1ERM5k/+HjLEz/gJc69nc7ikhYC7qCLlvWe8nV0qVuJxGRM3lk1DiKpFen\nWY2KbkcRCWtBV9CgaW6RYOXxWD5b/zYDatzjdhSRsBeUBd28OUyf7nYKETndyJlppEfv44mbmrsd\nRSTsBWVB16/vneI+eNDtJCKS3fOz3qbFBf2Ii412O4pI2AvKgk5MhGuvhdmz3U4iIqes2LiLzXHf\n8GbP292OIhIRgrKgQevQIsHmvtHvc1lmZ1IuTHY7ikhECPqCzsOHbYnIWRw9nkHqoXd5ro3uuy2S\nV4K2oKtUgfR0WL/e7SQi8s9PJlIwozyd6lV1O4pIxAjagjYGmjbVNLdIMBix4m3uuFJHzyJ5KWgL\nGrQOLRIMPp+7nMOxG3julvZuRxGJKEFd0NdfD3Pneqe6RcQdT095h0YF7yIxX6zbUUQiSlAXdNGi\nUKkSzJ/vdhKRyPTbjr2sjfmCN3r0cTuKSMQJ6oIG3VVMxE33jxrJxemtueLiEm5HEYk4QV/QWocW\ncUd6RibT9wzlyWY6OUzEDUFf0LVqwZYtsHOn20lEIsvz46YRl1mEXjfUcjuKSEQK+oKOiYHGjWHm\nTLeTiESWof8Zwi0V7sEY43YUkYgU9AUNmuYWyWvTF61lb/xSXul5k9tRRCJWyBT0rFng8bidRCQy\nPD7hHa6N703hAvncjiISsWLcDuCLsmW9l1wtXQrXXON2GpHwtmPPIX5mNAu6LXc7ikhEC4kjaNA0\nt0heefCj0Vx4ohG1L7/I7SgiEU0FLSJ/8XgsX20fwiP173E7ikjEC5mCbtAAliyBgwfdTiISvv7v\nqzkYori3bQO3o4hEvJAp6MREuPZamD3b7SQi4ev/fnibjhcNICpKl1aJuC3PC3rX4V25/lpNc4sE\nzvxfN7Mzfi6v9bzV7SgiggsFPWfTnFx/7amCttbBQCICwMNj3uWqqB6ULFLA7SgighsFvTH3BV2l\nivfRk+vXOxhIRNh78Bg/ZXzIvzv3dzuKiGTJ84KevSn3i8jGQNOmmuYWcdojo8ZxQXoNmlxdwe0o\nIpIlzwt6//H9bD2wNddfr3VoEWd5PJYxG95mQC09tUokmOR5QTe6uJFf69DXXw9z53qnukXEfx9M\nX8jJ6AM8cVNzt6OISDYhV9BFi0KlSjB/voOhRCLY4O+G0LJYf2KiQ+aqS5GIkOc/kY1TGjN742ys\nH6dia5pbxBnLf9vJlripvHlbL7ejiMhp8rygKxatSEZmBhv3b8z1GM2bw/TpDoYSiVD3f/Iel3u6\nUK5EYbejiMhp8rygjTE0SmnE7I25P5u7Vi3YsgV27nQwmEiEOXIsg7lHhvNcW11aJRKMfCpoY0xz\nY8xqY8xaY8xjZ9mmoTFmqTFmhTHmnIvMjS9u7Nc6dEwMNG4MM2fmegiRiPfEJxNISq9Ih7pXuh1F\nRM7gvAVtjIkChgDNgCpAN2NMpdO2KQS8A7S21l4BdD7XmKeOoLUOLeKej34dQu9qurRKJFj5cgRd\nC1hnrd1src0AxgLtTtvmZuBLa+12AGvtn+caMKVwCvHR8azZsyY3mQFvQc+aBR5ProcQiVhjU5dx\nJHYTz91y+o+yiAQLXwq6NJD9ziLbsj6XXUWgiDFmjjFmkTGm+7kGdGIdumxZ7yVXS5fmegiRiPXM\nN0NonHQ3+eJi3I4iImfh1EliMUB1oAXQHHjKGHPOewb6uw4NmuYWyY312/eyLuZL3ujR2+0oInIO\nvvzzeTtQNtvHZbI+l9024E9r7XHguDFmLlAN+NtjLQYNGgTAwRMHmbl3Jp5OHqJM7v6d0KwZvPQS\nPPFErr5cJCLdN+pDUjLaUrlccbejiISl1NRUUlNT/R7HnO9ELWNMNLAGaAL8DqQB3ay1q7JtUwl4\nG+/RczzwE9DFWrvytLFs9ter+HZFxt80nqolquYq/NGjUKIEbN8OSUm5GkIkoqRnZJJ/YAU+aP45\nPa+v6XYckYhgjMFaa3L6dec9dLXWZgIDgJnAr8BYa+0qY0xfY8ydWdusBmYAPwMLgfdOL+czaXSx\nf+vQiYlQpw7M8W+mXCRiPDt2KvGZxVXOIiHAp7lla+10a+1l1tpLrbUvZX1uuLX2vWzbvGqtrWKt\nrWqtfduXcRun+L8OrbuKifhu2OK36V7xHrdjiIgPXL07fsOLGzJ381wyPZm5HuPUiWJ+XFItEhGm\npq1mf/xyXu5xztsUiEiQcLWgSxQoQemCpVm6M/fXSlWp4n305Pq/nY4mItkNnDCUa/P1ISl/vNtR\nRMQHrj9fzt91aGOgaVNdbiVyLtv/PMQv5hNev+Uut6OIiI9cL2gn1qF1PbTIuT3w0ceUPtGEmhXL\nuB1FRHzkekE3uLgB87fMJyMzI9djXH89zJ3rneoWkf/l8Vgm7RjCow11cphIKHG9oIskFKFCkQos\n2rEo12MULQqVKsH8+Q4GEwkTb02eiyGa/q3ruR1FRHLA9YIG/9ehQdPcImfz9g8jaF7iDqKicnyf\nBBFxUVAUtNahRQJjyx8H2BA7iZduvtXtKCKSQ0FR0PXK1eOnbT9x/OTxXI9RuzZs3gw7dzoYTCTE\nPfHpOEqfuJ5KFxVzO4qI5FBQFHRSfBJXFL+ChdsW5nqMmBho3BhmznQwmEiIm7h5BHfWvN3tGCKS\nC0FR0KB1aBGnTfzxV47FbeWxTk3djiIiuRA0Be3UOvSsWeDxOBRKJIQ99/UI6uS7jfhYX54qKyLB\nJmgKum7Zuiz9fSlH0o/keoyyZb2XXC3N/Z1DRcLC4WPpLM38hGc79HI7iojkUtAUdGJsItUvrM78\nrf5dzKxpbhF4duw3JKVXoslVFdyOIiK5FDQFDd516DkbdbmViL8++vlDulx6h9sxRMQPQVXQjVMa\nM3uTfyeKNWgAS5bAwYMOhRIJMYvX7uDPfD/y/M0d3Y4iIn4IqoKuU6YOK3ev5MDxA7keIzER6tSB\nOf4diIuErCfGfcxlnk4UK5zf7Sgi4oegKuj4mHhql67NvC3z/BpH09wSqTwey5z9I3i4sa59Fgl1\nQVXQ4Mw6dPPmMH06WOtQKJEQ8c6UH4iysfS6obbbUUTET0FX0E6sQ1ep4n305Pr1DoUSCRFvzhtB\ns+K368EYImEg6Aq6RqkabNi3gT1H9+R6DGOgaVNNc0tk2frHQX6LnciL3bq7HUVEHBB0BR0bHUvd\ni+ry/ebv/RqnRQv45huHQomEgH9+9jmlTjSicrnibkcREQcEXUGDc9dDz58Phw45FEokyH21aQR9\naujkMJFwEZQF7cQ6dFIS1K3rPVlMJNxNXrCKo3GbeLxTc7ejiIhDgrKgryp5FTsO7WDX4V1+jdOu\nHUya5FAokSD27JQR1M7Xg3xxejCGSLgIyoKOjoqmQbkGfj/dqm1bmDoVMjIcCiYShI4cy2BJ5mj+\n1V7T2yLhJCgLGpxZhy5VCi69FObOdSiUSBB6btxUCqZfyg3VK7odRUQcFLQF7cQ6NGiaW8LfyGUf\n0rmCjp5Fwk3QFnSV4lXYf3w/Ww9s9WucUwWtu4pJOFqy7nd2J8zjhZs7ux1FRBwWtAUdZaK809x+\nrkNXrgxxcbBsmUPBRILIP8eNpmJmR0okF3A7iog4LGgLGnCkoI3RNLeEJ4/H8t2+D3lID8YQCUtB\nXdCNUxoze+NsrJ/z0ypoCUfDvv4RQxR3NL3W7SgiEgBBXdAVi1YkIzODjfs3+jXOddfB9u2waZMz\nuUSCwRtzR9C0mB6MIRKugrqgjTF/HUX7IzoaWreGyZMdCibish1/HmZ97ARe7KoHY4iEq6AuaHBm\nHRo0zS3hZeCnn1PyRH2uuLik21FEJECCv6BTGjmyDn3DDbBoEezd61AwERd9tXEEfa65w+0YIhJA\nQV/QKYVTiI+OZ82eNX6Nk5gIjRt7b/0pEsq++WkNR+LXM7BTC7ejiEgABX1BG2P+Oor2l6a5JRz8\na9IIasb1ICE+1u0oIhJAQV/QAI0vbuzIOnTr1jBzJhw/7kAoERccPZ7B4pMfM6idrn0WCXchUdCN\nUrwPzvBYj1/jFCsG1arBbP8PxkVc8fy46RTIuITmNSq5HUVEAiwkCrpMUhmKJBRhxR8r/B5L09wS\nykYs/ZCO5XX0LBIJQqKgwXu5lVPr0JMng8e/g3GRPLds/U52JaYy+Jab3I4iInkgZAq6cYoz69AV\nKkDRopCW5kAokTz0xNhPqHiyAyWTC7odRUTyQMgUdMOLGzJ381wyPZl+j6Vpbgk1Ho/lu70jeLCR\nprdFIkXIFHSJAiUoXbA0S3cu9XssFbSEmuFTF4LJpE+zum5HEZE8EjIFDc6tQ9eoAQcOwNq1DoQS\nyQOvp47g+gv0YAyRSBJSBe3UOnRUFLRtq6NoCQ079x5hXex4XuzSw+0oIpKHQqqgG1zcgPlb5pOR\nmeH3WJrmllAxcPQXlDjxD6pecqHbUUQkD4VUQRdJKEKFIhVYtGOR32M1agQrVsCuXQ4EEwmgLzeM\n4I6rdXKYSKQJqYIG59ah4+OhWTP4+msHQokEyLS0tRzJt4Ynb2rtdhQRyWMhV9BOrUODprkl+A2a\nNJJrYrvrwRgiESjkCrpeuXr8tO0njp/0/4kXLVtCaiocOeJ/LhGnHTtxkv9kjNKDMUQiVMgVdFJ8\nElcUv4KF2xb6PVbhwlC7tvcJVyLB5oVxM8ifUY6WNSu7HUVEXBByBQ3OrUODprkleH24ZAQdLtHR\ns0ikCsmCdnIdum1b74liJ086MpyII37Z8Ac7E79j8M1d3I4iIi4JyYKuW7YuS39fypF0/xePy5aF\ncuVg/nwHgok4ZOCYT7j0ZHtKFU1yO4qIuCQkCzoxNpHqF1Zn/lZnWlXT3BJMPB7LrD8/5P4Gmt4W\niWQhWdDgXYees9HZy62sdWQ4Eb+8Py0NG53OXS3quR1FRFzkU0EbY5obY1YbY9YaYx47x3Y1jTEZ\nxpgOzkU8s8YpjZm9yZkTxapWBY/He2cxEbf935wRNCnSSw/GEIlw5y1oY0wUMARoBlQBuhljKp1l\nu5eAGU6HPJM6ZeqwcvdKDhw/4PdYxmiaW4LDrr1HWRf7BS926el2FBFxmS9H0LWAddbazdbaDGAs\n0O4M290DjAf+cDDfWcXHxFO7dG3mbZnnyHgqaAkGT3wynuInruWq8qXdjiIiLvOloEsDW7N9vC3r\nc38xxpQC2ltrhwF5Ni/n5Dp0vXqwYQNs2+bIcCK5Mv63Edx2lU4OExHnThJ7A8i+Np0nJe3kOnRM\nDLRqBZMnOzKcSI7NWLSewwkrebpLG7ejiEgQiPFhm+1A2Wwfl8n6XHY1gLHGGANcALQwxmRYa/9W\nd4MGDfrr/YYNG9KwYcMcRs72oqVqsGHfBvYc3UPRxKK5HueUdu3gvfegXz+/hxLJsUETP6J6zK0k\nxse5HUVE/JCamkpqaqrf4xh7nmuLjDHRwBqgCfA7kAZ0s9auOsv2I4Ep1toJZ/gze77Xy6mWn7ak\nd/XedLjc/xPHDx+GUqVg61YoVMiBcCI+On4ik/xPlmNip+m0qX2F23FExEHGGKy1OZ5ZPu8Ut7U2\nExgAzAR+BcZaa1cZY/oaY+4805fkNIQ/nFyHLlAA6teHadMcGU7EZ4M/n0liZimVs4j8xZcpbqy1\n04HLTvvc8LNsm6dnuDROaUyPiT0cG+/U2dxduzo2pMh5fbB4BDdecofbMUQkiITsncROuarkVew4\ntINdh3c5Ml6bNjB9OqSnOzKcyHmt2Lib3xNn8eIt+lehiPxXyBd0dFQ0Dco1cOzpViVLwuWXgwPr\n+yI+GTjmU8qfbEPpojrxQUT+K+QLGpxdhwbdtETyjsdjmbl7BPfV0/S2iPyvsChoJ6+HBj08Q/LO\niOn/wUYfoV+r+m5HEZEgExYFXaV4FfYf38/WA1vPv7EPKlWC/Plh8WJHhhM5q9dmj6BRci+io8Li\nR1FEHBQWvxWiTJR3mtuhdWiA9u01zS2B9ce+o6yJHcdgPRhDRM4gLAoacLygtQ4tgfbkJ19R7ERt\nrqlwkdtRRCQIhU1BN05pzOyNs3HqTmW1a8OuXd4HaIgEwufrP6RnNT0YQ0TOLGwKumLRimRkZrBx\n/0ZHxouOhrZtdRQtgfHt4g0cTPiFp29q63YUEQlSYVPQxpi/jqKdomluCZSnv/qI6jG3UCAh3u0o\nIhKkwqagwfl16CZNYOlS+PNPx4YU4ejxk/x0/COebNXL7SgiEsTCqqCdXodOSIDrr4dvvnFkOBEA\nHvpwPEmecrS/tprbUUQkiIVVQackpxAfHc+aPWscG1PT3OKkzEzLyHUv8/C1j7kdRUSCXFgVNECj\nlEaOrkO3agXffQfHjjk2pESw5z6bhYlJ5/GOLd2OIiJBLuwKuvHFjR1dhy5aFKpXh2+/dWxIiVDW\nwutpr3BHpUd15zAROa+w+y3RKMX74AyP9Tg2pqa5xQkfTF3M0YS1/LtHN7ejiEgICLuCLpNUhiIJ\nRVjxxwrwaBkPAAAgAElEQVTHxmzXDqZMgcxMx4aUCPT0jJe58cIHSIiLczuKiISAsCto8F5u5eQ6\ndEoKlCgBCxc6NqREmK9/XM8fiXMYekcft6OISIgIy4JunOLsOjTo4Rninwe+eJWGBe/igqQCbkcR\nkRARlgXd8OKGzN08l0yPc3PS7drBxIl6RrTkXNrKnfyWbxzDb7/H7SgiEkLCsqBLFChB6YKlWbpz\nqWNjVq/uvdRq9WrHhpQI0e+jt7kqphsVLizudhQRCSFhWdDg/Dq0MTqbW3Juw7ZDLIkazrDuD7sd\nRURCTNgWdJvL2jBmxRjHbvsJKmjJub7vvcclXE/tipe4HUVEQkzYFvT1l1zP4fTDLNi2wLExGzTw\nTnH//rtjQ0oY27M/ndnHXue1Drqtp4jkXNgWdJSJol+Nfryz6B3HxoyLgxYtvNdEi5xP/+GfUozK\ntKt1tdtRRCQEhW1BA9x21W1MXTeVXYd3OTamprnFF8dPePhy5ys83eRRt6OISIgK64JOTkimc+XO\nvL/kfcfGbNEC5s2DQ4ccG1LC0KMfTCEhJpG7mzVxO4qIhKiwLmiA/jX7M3zxcE56TjoyXlISXHst\nzJjhyHAShjwe+GDVK9x3zWMYY9yOIyIhKuwLulrJalxc+GImr5ns2Ji6q5icy0uf/YAncRfPdO7o\ndhQRCWFhX9DgPYoekjbEsfHatoVvvoGMDMeGlDBhLfx7wcv0qPAwMdHRbscRkRAWEQXd4fIOrPpz\nFSt3r3RkvNKloXx571q0SHYfTV3BoYKL+L8ePd2OIiIhLiIKOi46jjur38k7ac5dcqVpbjmTp6b9\nm9bF76FAvgS3o4hIiDNO3mnrvC9mjM3L18tu+8HtXDnsSjbdv4mk+CS/x1uxAlq1gk2bvLcBFZm+\nYAstJ1/F9sd+48LCyW7HEZEgYYzBWpvjpoiII2iA0kmlaXJJE0YvH+3IeFWqQEwMLF/uyHASBu4f\n9wb/KNBL5SwijoiYggYYUHMA7yx6x5H7cxujaW75r6Wr97I24SPe7fmA21FEJExEVEHXL1efKBPF\nnE1zHBlPdxWTU+4eMZQrYtpRuUwZt6OISJiIqII2xtC/Zn/H7s993XWwdSts3uzIcBKiNu84Rhpv\n8/bNj7gdRUTCSEQVNED3at1J3ZTK1gNb/R4rJgZat4bJzt0DRULQXe+OpGx0bRpcXtntKCISRiKu\noAvEFeCWK29h+OLhjoynae7Itu/ASWYefpWX2+iRkiLirIgraIB+NfvxwZIPOHHyhN9j3XADpKXB\nvn0OBJOQc++7X1IkthRdrqvrdhQRCTMRWdCVLqjElSWuZPzK8X6PlT8/NGwIU6f6n0tCy4kTlnHb\nXuafDXT0LCLOi8iCBhw9WUyXW0Wmf374LXGJJ7i3RSu3o4hIGIrYgm5dsTXbD21nye9L/B+rNcyc\nCSf8nzGXEOHxwLBfXqZ/tUeJMhH7YyQiARSxv1liomK465q7HLk/d/HicMUVMHu2A8EkJLw+djEZ\nhdbw7E3d3I4iImEqYgsaoHf13kxYPYG9x/b6PVbXrvDuuw6EkqBnLQye+zLdLn6A+Jg4t+OISJiK\n6IIulr8YbSq2YeTSkX6P1bs3LFsG8+c7EEyC2mfT17M/eTZv9ujjdhQRCWMRXdDgPVls6H+G4rEe\nv8bJlw+efRYee8x7hCXha+CU12h+wV0UTizodhQRCWMRX9C1StciOV8y09dP93usW2+FAwdgyhQH\ngklQmv3TLrYVGse7ve51O4qIhLmIL2hjDANqDXDkkqvoaHjxRRg4EE6edCCcBJ17Pn2L2vm7clGR\n4m5HEZEwF/EFDdClShfStqfx297f/B6rVSsoWhQ+/tiBYBJUfl5ziFWJwxnW4yG3o4hIBFBBAwmx\nCfS6qhfD/jPM77GMgZdfhmeegWPHHAgnQePu99/jsrgmXFW2vNtRRCQCqKCz3F3jbkYtH8XRjKN+\nj3XttVCzJgwZ4kAwCQrbfk9nAa/zZmfd1lNE8oYKOktKcgp1ytRhzC9jHBlv8GB45RU9RCNc3D30\nM0rFXU7TK6u7HUVEIoQKOpv+NfszZNEQrAPXSVWq5L1H90svORBMXHXgoIdpB1/hhRY6ehaRvKOC\nzqZp+aYcTj/Mgm0LHBlv0CD44APYts2R4cQlDwz7mqSEBHr8o4nbUUQkgqigs4kyUfSr0c+xp1yV\nLg133uktaglNJ07Ap5te5tG6j2KMcTuOiEQQ48R0rs8vZozNy9fLjX3H9nHJW5ewuv9qShQo4fd4\n+/dDxYqQmgqVK/ufT/LWwGE/8ObmnhwcvIaYqBi344hICDLGYK3N8b/wdQR9muSEZDpX7sz7S953\nZLzCheHRR+GJJxwZTvKQxwNvL32ZO694WOUsInnOp4I2xjQ3xqw2xqw1xvztTBljzM3GmOVZbz8Y\nY650Pmre6V+zP8MXD+ekx5nbgQ0YAEuWwI8/OjKc5JG3x/3KiaKLGHzTbW5HEZEIdN6CNsZEAUOA\nZkAVoJsxptJpm20A6ltrqwHPA84cfrqkWslqlCtUjslrJjsynh6kEXqshedn/5vOZe8hMS7B7Tgi\nEoF8OYKuBayz1m621mYAY4F22Tew1i601h7I+nAhUNrZmHlvQK0BDElz7k4j3bt7r4n++mvHhpQA\n+mLGVvYWm8xb3fu5HUVEIpQvBV0a2Jrt422cu4B7A9P8CRUMOlzegVV/rmLl7pWOjBcd7b0meuBA\nyMx0ZEgJoEcnvk7jIrdxQYFkt6OISIRy9CQxY0wjoBcQ8nd0iIuOo0/1PgxdNNSxMVu1guRkGD3a\nsSElAL5P28uW5I8Y1vMBt6OISATz5dTU7UDZbB+Xyfrc/zDGVAXeA5pba896g8tB2S4KbtiwIQ0b\nNvQxat7re01frhx2JYObDCYpPsnv8U49SKNrV+jSBRK0tBmU7vl4KNVLtqVCsYvcjiIiISg1NZXU\n1FS/xznvddDGmGhgDdAE+B1IA7pZa1dl26Ys8B3Q3Vq78BxjBf110Kfr/EVnGpZrSP9a/R0b88Yb\noW5dePhhx4YUh6xce4wrPkhh4d2zqZWiC9dFxH+5vQ7apxuVGGOaA2/inRL/0Fr7kjGmL2Ctte8Z\nY94HOgCbAQNkWGtrnWGckCvo7zd9z93f3M2v/X517E5Sq1ZBgwawdq33OmkJHg0eHsaOxGmse9aZ\nM/hFRAJa0E4JxYK21nLlsCt5q8VbNE5p7Ni4vXtDsWLw4ouODSl+2rHzJBe9fBkTe35Mm6vquh1H\nRMKE7iQWIMYY+tfs79j9uU8ZNAjeew+2/201X9wy4J0vKZZYUuUsIkFBBe2D7tW6k7opla0Htp5/\nYx+VKQN9+uhBGsHi4EHL5L0v868bQv4CBBEJEypoHxSIK8AtV97C8MXDHR33scdg4kTvmrS465Fh\n35K/0An6NGjtdhQREUAF7bN+NfvxwZIPOHHyhGNjJid7S1oP0nDXzp2Wj357kQdrP0KU0Y+EiAQH\n/TbyUaULKnFliSv5ctWXjo47YAAsXgwLFjg6rPgoPR3qPvg2RUodYGDrm92OIyLyFxV0DvSv2d/R\n+3OD90Ea//qXHqThlpsfXcC2S17gh3vGExcd53YcEZG/qKBzoHXF1mw7uI2lvy91dNwePWDvXvjm\nG0eHlfN484PdTIrrwqiOH1C+aIrbcURE/ocKOgdiomK4u8bdjl9yFR3tvR768cf1II28suCnTB5Z\ncAu3XXMLXa9u43YcEZG/UUHnUO/qvfly1ZfsPbbX0XFbt/beVeyTTxwdVs5g1y5oNvg5Lrs8g2Gd\nn3M7jojIGamgc6hY/mK0rtiakUtHOjruqQdpPPUUHD/u6NCSTXo6NOk7Ha5+n1l3jSEmypfnxYiI\n5D0VdC4MqDmAof8Zisd6HB23bl2oXh3ecXYGXbLp8/AW1l9xG5N7jqFkgZJuxxEROSsVdC7UKl2L\n5HzJTF8/3fGxBw/2Hknv3+/40BHv/RHpfG5v4olGD9Iwpb7bcUREzkkFnQuBuj83QOXK0LYtvPKK\n40NHtLQ0uPfrh7muakmeavyI23FERM5LT7PKpWMZxyj7RlkW3rGQ8kXKOzr2tm1QrRr8/DOULu3o\n0BFp1y6ofNM4Yps/weoHFlM4n57xKSJ5R0+zymMJsQn0uqoXw/4zzPGxy5TxPo7yX/9yfOiIk54O\nrXqu5nijAUy7bbzKWURCho6g/bBx30Zqvl+TLQ9sITE20dGx9+2DihVh3jyoVMnRoSNK3wFH+KxA\nLV7r/AB3XtPb7Tgirrv44ovZvHmz2zHCUrly5di0adPfPp/bI2gVtJ/ajGlD+8vac0f1Oxwf+9//\nhoUL4Utnb/8dMUaMsNw/tzttWsXySacRGJPjnw+RsJNVFm7HCEtn+3+rKW6XnDpZLBB/4QcM8J7c\ntHCh40OHvbQ0uG/0cC686mfeb/+OyllEQo4K2k9NyzflUPohFm5zvkUTErzr0I8+qgdp5MSuXdCm\n73+Iuv5ppnQf7/jyg4hIXlBB+ynKRNGvRj+GLHL2KVen9OgBe/bA1KkBGT7spKdD+257SW/XmRE3\nDqNi0YpuRxIRyRUVtANuu+o2pq6byq7DuxwfOybG+yCNgQP1IA1fPPCghw3VunNbnRvpWLmj23FE\nJAC2bt1KUlKST0uLmzdvJioqCo/He+fHli1bMnr0aABGjRpFvXr1AprVHypoByQnJHNT5Zt4as5T\nAVmLbtMGkpLg008dHzqsjBwJ47a/RMrl+3nlhpfdjiMiAXLRRRdx8OBBn88tyb7d1KlT6d69+xn/\nLNiooB3yyg2vsHzXcu6ddq/jJa0HaZxfWho88NZsTJ23+bLr58RGx7odSUTELypohxTKV4iZt85k\n0Y5F3DPtHsdLum5duOoqGDrU0WHDwq5d0L77dqI63crYzp9QOkm3XxMJRSkpKbz66qtUq1aNggUL\n0qdPH/744w9atmxJUlISTZs25cCBA3+btm7UqBFPP/00//jHP0hKSqJ58+bs3XvmRwI3atSIESNG\nnPHPHnnkEerXr8+hQ4cAGDFiBJUrV6Zo0aK0aNGCLVu2BGbHz0IF7aBC+Qoxs/tMlvy+hP5T+zv+\ntKvBg+Gll+DAAUeHDWnp6dDxpgxiunXlwXr9aHJJE7cjiYgfJkyYwHfffcfatWuZPHkyLVu25KWX\nXuLPP/8kMzOTt956C/j71PSYMWMYNWoUu3fv5sSJE7z66qs+v6a1lj59+rBixQpmzZpFwYIFmTRp\nEi+99BITJ05k9+7d1KtXj27dujm6r+ejgnZYUnwS02+dzvJdy+n/jbMlXaWKdz1aD9L4rwcfhN8r\nD6TKpQV5ot4TbscRET/dc889XHDBBVx44YXUq1eP2rVrU7VqVeLi4rjxxhtZunTpGb+uV69elC9f\nnvj4eG666SaWLVvm0+ulp6fTrVs39u/fz5QpU4iPjwdg+PDhDBw4kIoVKxIVFcXjjz/OsmXL2Lp1\nq2P7ej4q6ABIik9i+i3T+eWPX7j767sdLelBg+Ddd2HHDseGDFkjR8KEVRPIuHQ8n9w4miijv84i\n/jLGmbfcKlGixF/vJyQk/O3jw4cPA/xtGbFkyf8+3z0xMfGv7c5n/fr1TJ48mWeeeYaYmJi/Pr95\n82buu+8+ihQpQpEiRShatCjGGLZv356r/coN/UYLkILxBZl2yzRW/rmSvlP6OlbSF10Ed9yhB2mk\npcFDg9dx4oa7GN/lc4omFnU7kkhYsNaZt0Bz6uzrypUrM3LkSJo3b87atWv/+nzZsmUZPnw4e/fu\nZe/evezbt4/Dhw9Tp04dR17XFyroADpV0mv2rKHP5D6OlfTjj8OECbB8uSPDhZxdu6BDl2MU6t2J\n55oMolbpWm5HEpE8lpMTcc+3bZcuXRg8eDDXX389GzZsAKBv374MHjyYlStXAnDgwAHGjx+f+8C5\noIIOsAJxBZh6y1R+2/cbd0y+g0yP/3cbKVIE3nwTmjSBV1+FkycdCBoi0tOhc2co1rM/11aowt01\n7nY7kog45PSj4nMdJWf/s/MdTfuybY8ePXj66adp0qQJW7ZsoX379jz++ON07dqVwoULU7VqVaZP\nn+7LbjhGT7PKI0fSj9B6TGvKFSrHh20/JDoq2u8xf/sN7rwT9u+HDz/0XoYV7gYMgHmHR5BR81XS\n+qRRIK6A25FEQoqeZhU4eppViMofl59vbv6GrQe3cvvk2x05ki5fHr791ltaTZt6bwd67JgDYYPU\nyJEwZdEytld+jC9v+lLlLCJhTQWdhxJjE5nSbQrbD27ntkm3OVLSxkCvXvDzz94j6mrV4PvvHQgb\nZNLS4JGn9mNv6sTbLd7i8mKXux1JRCSgNMXtgqMZR2k3th3F8xdnVPtRxETFnP+LfDRpkveIukUL\n7/XShQs7NrRrdu2CGjUtpR/sQI2KpRnSMjBPDhOJBJriDhxNcYeBxNhEJnedzO4ju+nxVQ9Oepw7\ny6tdO1ixAqKj4Yor4KuvHBvaFadOCqvU6zVsgR281vQ1tyOJiOQJHUG76FjGMW4cdyOF8xXmkw6f\nOHokDTBvHvTu7S3qIUPgwgsdHT5PDBgAS/bM47drOpHWO41yhcu5HUkkpOkIOnB0BB1GEmITmNh1\nIgdOHOCWCbc4eiQNUK+e91rpyy/3rk1/+GHe3EDAKSNHwrR5O9lcoxsftftI5SwiEUVH0EHg+Mnj\ndPy8I/lj8/Nph08D8qjE5cu9R9MFC8J770GFCo6/hKPS0qBVm5OUH9SUppX+wbONnnU7kkhY0BF0\n4OgIOgzli8nHhJsmcDTjKN2+7EZGZobjr1GtGixc6H3YRp063hPIgu0GJzt3wvvvQ+vWcMMN0GDQ\n0xTIH8UzDZ5xO5qISJ7TEXQQOXHyBJ2+6ERsVCxjO40lLjouIK+zcSP07Qt//gkffADVqwfkZc7L\nWli1ynvm+aRJsGYNNGvmPdHNXPY1j3x/N4vvXEzx/MXdCSgShnQEnXObN28mJSWFkydPEhV19uNa\nHUGHsfiYeMZ3Hk+mzaTL+C6kZ6YH5HVSUmDGDLjvPu/lWI89BkePBuSl/ubkSZg7Fx56CCpWhObN\nYft2eO457+VUg4asZlq+Hgz49jbGdRqnchYRv4waNYp69er5PY5TD+fICRV0kImPieeLzl9graXz\nF50DVtLGQM+e3hucbNninQKfMycgL8Xhw96He/TsCSVLwv33e9fCP/8cNm/OOsO82gp6TulG/ZH1\nqVi0Ir/d+xvXXXRdYAKJSMSw1p63XD0e5x4J7CQVdBCKi47j886fE22i6fR5J06cPBGw1ypRAsaM\ngddf9xZo796wb5//4/7+u/dktNatoVQp7zOsa9aEJUu8b4MGwdVXw/Jdy+j0eSeu//h6ripxFb/d\n+xtP1n+SQvkK+R9CRELOtm3b6NixI8WLF6dYsWLce++9AIwYMYLKlStTtGhRWrRowZYtW/76mqio\nKIYPH07FihUpUqQIAwYMAGD16tXcfffdLFiwgIIFC1KkSBEAevXqRb9+/WjVqhUFCxYkNTWVqVOn\nUr16dQoVKkS5cuX4VzA809dam2dv3pcTX6WfTLcdxnWwrT5tZY9nHA/46x04YG3//taWKmXt+PE5\n+1qPx9oVK6x94QVra9e2tnBha7t2tXbMGGv37//79mnb0mybz9rYC1+90L7242v28InDzuyEiJxT\nMP8ezszMtNWqVbMPPfSQPXr0qD1x4oSdP3++nTRpkr300kvtmjVrbGZmpn3hhRfsdddd99fXGWNs\nmzZt7MGDB+2WLVtssWLF7IwZM6y11n700Ue2Xr16//M6t912my1cuLBdsGCBtdbaEydO2O+//96u\nWLHCWmvtL7/8YkuWLGknTZpkrbV206ZNNioqymZmZp4z/9n+32Z9PuedmZsvyu1bMP/FCFbpJ9Nt\nx3EdbctPW9pjGcfy5DV/+MHaSpWsvfFGa7dvP/t2GRnWpqZa++CD1pYvb23ZstYOGGDtrFnWnjhx\n5q/5ccuPtvknzW2Z/ytj31r4lj2afjQwOyEiZxTMv4cXLFhgixcv/rcibNGihR0xYsRfH2dmZtrE\nxES7ZcsWa623oH/88ce//vymm26yL7/8srX27AXds2fPc2a5//777YMPPmitda+gnb11lTguNjqW\nMR3HcMuEW+gwrgMTukwgX0y+gL5m3bqwbBm88IL3EZYvvAB33AFRUd715BkzYPJk+OYbKFcO2raF\n8eO969hnW+r5ftP3PDf3OdbvXc/AfwxkYpeJxMfEB3Q/RCTnzL+cORnKPpPzM8W3bt1KuXLl/nam\n9ObNm7nvvvt46KGHvGNnrStv376diy66CIASJUr8tX1iYiKHDx8+52ud+rpT0tLSePzxx1mxYgXp\n6emkp6fTuXPnHO+Dk1TQISA2OpbPOn7GrRNupf3Y9kzsOjHgJR0fD88+670Pdp8+MHq098SuefO8\n11G3awfPPw+n/R3/H9ZaZm+czbNzn2XbwW38s94/6V61e0BuxCIizshNsTrloosuYsuWLXg8nv8p\n6bJly/Lkk0/SrVu3HI95thPETv/8zTffzL333suMGTOIjY3lgQceYM+ePTl+PSfpJLEQERMVwycd\nPiE5IZl2Y9txLCNvHvx85ZUwf773kZY9esDWrTBzJvTvf/ZyttYyff106o6oS7+p/eh9dW/WDFjD\n7VffrnIWkbOqVasWF154IY8//jhHjx7lxIkT/Pjjj/Tt25fBgwezcuVKAA4cOMD48eN9GrNEiRJs\n27aNjIxz3wDq8OHDJCcnExsbS1paGp999tn//Ll14dpxFXQIiYmKYfSNoymaUJS2Y9uy52je/Osu\nOtpb0F26QKFznFxtrWXymsnU+qAWD818iHtr38vKfivpXq274w8CEZHwExUVxZQpU1i3bh1ly5bl\noosu4vPPP6d9+/Y8/vjjdO3alcKFC1O1alWmT5/+19edfjSc/ePGjRtTpUoVSpYsSfHiZ7+vwtCh\nQ3nqqacoVKgQzz//PF26dDnrmHlFdxILQSc9J+n/TX/GrBhD/rj8VClWxftW/L//LZwv7x4E7bEe\nvlr1Fc/Pex5rLU/Vf4obL7+RKKN//4kEG91JLHCcvpOYCjqEWWvZenArv/7xK7/u9r6t3L2SlbtX\nkhSfROVilf9W3k5eX5zpyeSLlV/w/NznSYhN4Kn6T9GmYhtX/qUpIr5RQQeOClrOy2M9bD2w1Vva\nf/xvcRfOV5gqxatQ+YLKf5V25WKVc1TcJz0nGfPLGF6Y9wJFEorwVP2naF6huYpZJASooANHBS25\n5rEethzY8rcj7lW7V5GckPxXWZ864q5crDJJ8Ul/fX1GZgajfx7N4HmDKZ1UmqfrP03jlMYqZpEQ\nooIOHBW0OM5jPWzev/lvR9yr/lxF0YSiVClehQrJFfh63deUTy7PU/WfosHFDdyOLSK5oIIOHBW0\n5BmP9bBp/yZ+/eNXVv+5mrpl6+oBFiIhTgUdOCpoERHJNRV04Oh50CIiIhFAd48QEYkg5cqV04md\nAVKuXDlHx/NpitsY0xx4A+8R94fW2pfPsM1bQAvgCHCbtXbZGbbRFLeIiESUgE1xG2OigCFAM6AK\n0M0YU+m0bVoA5a21lwJ9gXdzGiQcpKamuh0hoLR/oSuc9w20f6Eu3Pcvt3xZg64FrLPWbrbWZgBj\ngXanbdMO+BjAWvsTUMgYU4IIE+5/ybR/oSuc9w20f6Eu3Pcvt3wp6NLA1mwfb8v63Lm22X6GbURE\nRMRHOotbREQkCJ33JDFjTB1gkLW2edbHjwM2+4lixph3gTnW2nFZH68GGlhrd502ls4QExGRiJOb\nk8R8ucxqEVDBGFMO+B3oCnQ7bZvJQH9gXFah7z+9nHMbUEREJBKdt6CttZnGmAHATP57mdUqY0xf\n7x/b96y1U40xLY0x6/FeZtUrsLFFRETCW57e6lNERER8E5CTxIwxzY0xq40xa40xj53hzy8zxvxo\njDlujHkwEBkCyYf9u9kYszzr7QdjzJVu5MwNH/atbdZ+LTXGpBlj6rqRM7fOt3/ZtqtpjMkwxnTI\ny3z+8uH718AYs98YsyTr7Uk3cuaWL98/Y0zDrL+fK4wxc/I6oz98+P49nLVvS4wxvxhjThpjCruR\nNad82LckY8xkY8yyrH27zYWYuebD/hU2xkzI+v250BhT+byDWmsdfcNb+uuBckAssAyodNo2FwDX\nAM8BDzqdIZBvPu5fHaBQ1vvNgYVu53Zw3xKzvX8lsMrt3E7uX7btvgO+Bjq4ndvh718DYLLbWQO4\nf4WAX4HSWR9f4HZuJ/fvtO1bA9+6ndvB791A4MVT3zdgDxDjdnYH9+8V4Kms9y/z5XsXiCPo897Y\nxFr7p7V2MXAyAK8faL7s30Jr7YGsDxcSOteE+7JvR7N9WADw5GE+f/ly0x2Ae4DxwB95Gc4Bvu5f\nqJ6s6cv+3Qx8aa3dDt7fNXmc0R++fv9O6QaMyZNk/vNl3yxQMOv9gsAea22odIQv+1cZmA1grV0D\nXGyMKXauQQNR0L7c2CSU5XT/egPTAprIOT7tmzGmvTFmFTAFuD2PsjnhvPtnjCkFtLfWDiP0iszX\nv5vXZk0jfuPTNFvw8GX/KgJFjDFzjDGLjDHd8yyd/3z+3WKMScA7O/dlHuRygi/7NgSobIzZASwH\n7sujbE7wZf+WAx0AjDG1gLJAmXMNqqdZBZAxphHeM9r/4XYWJ1lrJwITjTH/AJ4HbnA5kpPeALKv\nH4VaSZ/PYqCstfZo1j30J+IttXARA1QHGgP5gQXGmAXW2vXuxnJcG+AHa+1+t4M4qBmw1Frb2BhT\nHphljKlqrT3sdjCHvAS8aYxZAvwCLAUyz/UFgSjo7Xj/ZXBKmazPhQuf9s8YUxV4D2hurd2XR9n8\nlaPvnbX2B2PMJcaYItbavQFP5z9f9q8GMNZ4n8d3AdDCGJNhrZ2cRxn9cd79y/7Lzlo7zRgzNMy+\nf4hDrSwAAAGaSURBVNuAP621x4Hjxpi5QDW864PBLic/f10Jnelt8G3fegEvAlhrfzPGbAQqAf/J\nk4T+8eVn7xDZZhyz9m/DOUcNwGJ5NP9dLI/Du1h++Vm2fQZ4yO0Ffqf3L+sbtQ6o43beAOxb+Wzv\nVwe2up3byf07bfuRhNZJYr58/0pke78WsMnt3A7vXyVgVta2iXiPVCq7nd2p/cvarhDeE6gS3M7s\n8PfuHeCZrPdL4J0yLuJ2dgf3rxAQm/V+H+Cj843r+BG09eHGJllPuvoP3hMBPMaY+7J+iIJ+KsOX\n/QOeAooAQ7OOxDKstbXcS+0bH/etozGmB5AOHANuci9xzvi4f//zJXke0g8+7l8nY8zdQAbe718X\n9xLnjC/7Z61dbYyZAfyMd/rwPWvtShdj+ywHfz/bAzOstcfcyppTPu7b88BHxpifs77sURsaMzu+\n7t/lwChjjAfvlQZ3nG9c3ahEREQkCOlpViIiIkFIBS0iIhKEVNAiIiJBSAUtIiIShFTQIiIiQUgF\nLSIiEoRU0CIiIkFIBS0iIhKE/h+XXcHSF61cxAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -1717,7 +2021,7 @@ }, { "cell_type": "code", - "execution_count": 52, + "execution_count": 57, "metadata": { "collapsed": false }, @@ -1745,7 +2049,7 @@ }, { "cell_type": "code", - "execution_count": 53, + "execution_count": 58, "metadata": { "collapsed": false }, @@ -1825,7 +2129,7 @@ " (0.97806235410269993, (20, 40))]" ] }, - "execution_count": 53, + "execution_count": 58, "metadata": {}, "output_type": "execute_result" } @@ -1839,7 +2143,7 @@ }, { "cell_type": "code", - "execution_count": 54, + "execution_count": 59, "metadata": { "collapsed": false }, @@ -1919,7 +2223,7 @@ " (0.98673744747201031, (20, 39))]" ] }, - "execution_count": 54, + "execution_count": 59, "metadata": {}, "output_type": "execute_result" } @@ -1931,7 +2235,7 @@ }, { "cell_type": "code", - "execution_count": 55, + "execution_count": 60, "metadata": { "collapsed": false }, @@ -2011,7 +2315,7 @@ " [ 99. , 0.98673745, 0.97806235]])" ] }, - "execution_count": 55, + "execution_count": 60, "metadata": {}, "output_type": "execute_result" } @@ -2025,7 +2329,7 @@ }, { "cell_type": "code", - "execution_count": 56, + "execution_count": 61, "metadata": { "collapsed": false }, @@ -2033,10 +2337,10 @@ { "data": { "text/plain": [ - "" + "" ] }, - "execution_count": 56, + "execution_count": 61, "metadata": {}, "output_type": "execute_result" }, @@ -2044,7 +2348,7 @@ "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfAAAAFwCAYAAABHHCk+AAAABHNCSVQICAgIfAhkiAAAAAlwSFlz\nAAALEgAACxIB0t1+/AAAIABJREFUeJzs3Xdc1fX+wPHXBxQR90ABUdy4UHBSmlKZq2GObmmWVtrO\n1m3e3y29dW+7rLSulTezTMtRZomZGW5xgQNQwQ0OcKCyx/n8/vhymGd8D+IA38/Hg0fnfL6fz/l8\njtfrm89WWmuEEEIIUbm4XekGCCGEEMJ1EsCFEEKISkgCuBBCCFEJSQAXQgghKiEJ4EIIIUQlJAFc\nCCGEqIScBnCl1Cyl1Eml1E4HeT5RSsUrpaKVUsHF0ocopfYopfYppV6qqEYLIYQQ1zozPfCvgcH2\nHiqlhgJttNbtgEeA/xakuwHTC8p2BsYopTpcdIuFEEII4TyAa63XAWcdZBkOzCnIGwnUU0o1BXoD\n8Vrrw1rrXGB+QV4hhBBCXKSKmANvBhwt9j6xIM1euhBCCCEu0qVYxKYuwWcKIYQQophqFfAZSUDz\nYu/9C9I8gBY20m1SSsmh7EIIIa4pWutyd3rN9sAV9nvWvwD3AyilQoFUrfVJYAvQVikVoJTyAO4p\nyGuX1vqa/Hn99deveBvk+8v3l+8v312+/+X9uVhOe+BKqe+BMKCRUuoI8DpG71prrb/QWi9TSg1T\nSiUA6cADBcE4Xyn1JLAC4xeFWVrruItusRBCCCGcB3Ct9VgTeZ60k74cCCxHu4QQQgjhgJzEdhUI\nCwu70k24ouT7h13pJlxR1/L3v5a/O8j3v1iqIsbhK4JSSl8tbRFCCCEuNaUU+jIsYhNCCCHEVUQC\nuBBCCFEJSQAXQgghKiEJ4EIIIUQF0xrWrYOHHoK0tEtTR0WcxCaEEEII4MwZmDMHvvjCCOKTJl26\nuqQHLoQQQgC5ufDtt3DXXbB2rflyFotmVUQe48ZB69awbRvMnAmxsfDcc1C79qVprwRwIYQQ17SM\nDPj0U2jbFmbPhuuvh7FjYdw4OHbMfrl8i4VXvvmZOs/34pYVTcnt+iUJ+y18+y3ccAOoS3y1lwRw\nIYQQlVpMDHz2GWRnu1bu7Fl4801o1Qr++gt+/BH+/BOefRbi4qBFC+jaFd5/3+idW+Xm5fPsVz9S\n+/kQPo7+F092+webn1jJkUb/446f+7HjxI6K/YJ2yEEuQgghKqXjx+H11+HnnyEoCE6dgu++M147\nknw2nbumvcfmqAxGN57Cqy940bGj7bz79sHTT8Phw/DBR3n8cnA+/4v/N9Ut9fh7z3/y2phhuLkZ\nXW2LtvDV9q/4v1X/x7iu45gaNpU6NerYbYcc5CKEEOKK+vVXGDXK6LVeDunp8K9/GYG6fn0jyK5c\nafScb7oJPvgALJay5fLyLTz2+Xf4/acDh9P2MWh0Itt79iK34U67dbVvD8uWwT2vrmT48q58v3cm\nU0M/4fwHG5ly762FwRvATbnxcI+HiXk8hrNZZ+n0WScWxi6skJvHbLrS16kVu1ZNCyGEqDyOHdN6\n9Git27TR+rXXtPb21nrxYufl0tO1fu45rYcM0TopyXx9ubkW/eWXFu3np/WYMVofPFg2z4EDWvfr\np3VYmNaHDhWlf7V8k671TB/t9UxP/d/f1muttbZYLHpO9Bzd+N3GetrGadpisZT5vMOph/WoH0bp\nltNa6p/jfraZx541h9bo3l/21scvHLf5vCDulT9uXkzhivyRAC6EEJVDfr7Wn32mdePGWr/6qtYZ\nGUb6li1at2ih9SuvaJ2XZ7vsqlVat26t9dixWr/+utbNmmm9aZPzOpdsjNG1nu6tGz5xh161/rzD\nvHl5Wr/9ttG+t6Yn6lbPjdNuL/jpidNn69y8/DL5E04n6N5f9tZDvxuqT1w4obXWOjM3U7+5+k3d\n8J2GespfU3RGTobzRtrgKOBfbACXOXAhhBCm7d4Njzxi7HH+4gvo0qXk85QUuPtuqF4dvv8eGjUy\n0s+dgxdfNIajP/8cbrvNSF+61Djs5L33YPz4svXl5OZz57sfsPzCe4z1eQOvNlFsStzI0jFLCagf\nYLedWmveWPoNb0a+QC/3h1n0zCv4NLS/nys3P5epq6fyv6j/8dx1z/Hfrf8lqGkQHw76kFYNWrn6\nx2TKxc6BX/Get/UH6YELIcRVKz9f6//8x+jVfv658d6e3Fyt//53rVu10nr7dq1//VVrf3+tJ03S\nOjW1bP6YGK3bttX62WeNslbLt+zVtZ8J1fWfvlGv3XVQa230aKdtnKZ93/fV64+st1l/SnqKHvnD\nSB30WZDecWKHS98z4mCEvu3723R4fLhL5coD6YELIYS4lM6cgfvvN7Zd/fAD+PubK/ftvEwm/vAy\nDc7ezPev38FNNzmu4557jNdzv7fw8KxPWHL2TUY1msK85x6nmnvJNdfh8eGM/3k8Hw7+kHFdxxWm\nL4tfxqSlkxjbZSxv3PQGntU8Xf26l83F9sAlgAshhLBr61bjZLKRI+Htt42hcTMOpR5ixA8j8K3Z\nkm0nNzLj1umM7jTaYZm8PHjipZN8mXo3dermsfj+2dwc0tZu/pjkGG6fdztjuozhlRte4YUVLxCe\nEM7sO2cT1jLMhW95Zcg2MiGEEHbl5xvz1a7S2pirHjrUOMjkgw/MB+8/9v9B6FehjO82nt/uW8yK\n+37nqfCnmLdrnsNyO5K3Ee7fi8eGDiDl3dUOgzdA5yadiZwYyZoja/D7wI/03HR2PLqjUgTviiA9\ncCGEqKJOnoRhw4zzuefPB3d3c+XS0uDRR2HnTli0CNq1M1dOa82769/l48iPmTdqHgNaDih8tjt5\nN4O+HcTbA9/m/m73lyk7b9c8Ji+fzMzbZjKy40hzFRbIzssm6kQUof6hLpW70mQIXQghRBkJCTB4\nsHGe99q10KmTcd63s/O5t8cfo/+0e7nO8hJLPhiCl5e5+i5kX+CBJQ9w9PxRFv1tEf51y06Ux6XE\nccu3t/CvG//FgyEPApBvyecfq/7BjzE/suSeJQQ1dXKMWhUiQ+hCCHGVu9x9k61boX9/eOklmDoV\nfvrJCOJvveW43J9RCfSZ2Y/OPoFEt76PPee2m6rvyLkjhM4KpWHNhqyZsMZm8Abo6N2RVeNXMSVi\nCjO3zuRc1jnumH8Hm5M2s3nS5msqeFcE6YELIcQlkpdnrN5OTzeCqNtl6DKtWGH0ur/4Au68syj9\n2DHo2xdeew0eeKBsuYVrd3L30qGM8X2d7559mEWxi3h6+dOsf3C9w/3WCWcSGDhnIJP7TOa5654z\n1cb9Z/Zz85ybybPkMaLDCD4c/CHV3U1OsFchMoQuhBBXofx8mDDBmIfOyjLO6J4yxXk5reGdd2Dg\nQOjZ07U658417p9etAj69Sv7fO9eGDAAZs2CW28tSv/vsvU8vnokk9t+yrRJfytMn7ZpGl9s+4L1\nD66nQc0GZT4vJjmGwd8N5rUBr/Fwj4ddauuRc0fYdmwbIzqOcKlcVSIHuQghxFUmP1/rBx/U+sYb\njXO/jx83DjJZutR52VdeMY4abd5c6+Rk83U++94W7d/confvdpxv40bjMBbr8aVvzAvX6qXG+s35\ny23mfyb8GT3g6wE6KzerRPq2Y9u0z/s++rsd35lvpCiBizzIRebAhRCiAmkNTz5p3JD1yy/g5QU+\nPsZd0w8+aCwus+ett2DJEoiMhLFj4d57jZ68M6Pf+5SP0nvx0Bef0Lmz47yhofD11zB8ONz71nxe\nixrP5/2X8I+7B9vM//6g92nk1YgHljyARRtXfG04uoEh3w3hs2GfcW/Xe503UFwaFxP9K/IH6YEL\nIa4SLlw4Vabc009r3aeP1ufOlX3+2Wdad+midVpa2WfTpxu3ellv58rNNW7Ueu01x3U+/vlc7f53\nf/39hlXa+11vvemoiZtBtNYPfzpPu7/gpxeudX7UaEZOhr7uq+v0y3+8rP888Kf2ftf7shw1WtUh\nt5EJIUTFyc3V+tZbtf6//3OtnMWi9Ysvat2jh9Znz9rPM2GC1vfcU/KXhNmzjSHzAwdK5j9+3Lit\na9ky2583de4yrV5son9av0trrfXi2MU64KMAfTrjtMO2rty/Unu/6613HN9p9uvplPQU3e6Tdrrh\nOw11xMEI0+WEfZclgANDgD3APuAlG8/rA4uBHcAmoFOxZ4cK0qOAzQ7quKR/UEIIYcbkyVrfdJPW\nPj5ab9hgvtzdr/+sO4dc0Kcdx06dkaF19+5af/SR8X7hQqOuuDjb+des0bpJk5J3W2ut9ee/rtfq\nRW89c1nJRj4T/oy+/fvb7V5juf3Ydu39rne5gnDiuUQdkxzjcjlh2yUP4Bh7xROAAKA6EA10KJXn\nXeCfBa8DgZXFnh0AGpio51L+OQkhhFMzZ2odGGj0oBcu1LpdO2MRmjOPfz5X85qbHv/jo6bqOXRI\n66ZNtZ46VWtvb+PGLkfef1/rnj21zipYR7Zw7U6tXmyi35xfdhg7Oy9b9/myj35v/Xtlnh04c0D7\nfeCnF8QsMNVOcWldbAA3s4itNxCvtT6stc4F5gPDS+XpBKwqiMJ7gZZKKe+CZwo5MEYIcZVbvRr+\n+U/jfur69WHUKGMb16uvOi43d9V2Pj/4DLPDVvNn4q+sOrjKaV0BAfDa59v514mezP7xFCEhjvM/\n9xy0aAHPPgurdxzkb0uG8kSbafzj7iFl8nq4e/DD6B94b8N7rD+yvjA9JT2Fwd8N5pV+rzi9VERU\nDmYCazPgaLH3iQVpxe0ARgIopXoDLQDrUTwa+EMptUUpNenimiuEEBXvwAG4+25jH3Xxc7+nT4cF\nC4zgbkvMoWTGh4/gucDPGX9jP2beNpOJv0wkLSfNYX2nM07z3pFRhPasybyzzzptn1Lwv//B8rXJ\n3Dx7EHf5vMqnD4+xmz+gfgCz7pjFmEVjOJVxirScNG79/lbu6nQXT/Z+0ml9onKoqJ7x20ADpdR2\n4AmM+W7r5oe+WuvuwDDgCaWUjeMFhBDi4lgs5St3/jzccYfR+x44sOSzhg1h5kzj5LK0UjE5IyuX\nvtPuIrTmfbz/4CgAhrUbRv+A/ry88mW79eVb8hmzaAyjO47m93HLWX9kPcvilzltZ606eXg/djdD\nWtzF/Ocfd5r/tva3MabLGO776T7+tuBvdG7SmTdvetNpOVF5OD2JTSkVCkzRWg8peP8yxrj9Ow7K\nHASCtNZppdJfBy5orT+0UUa//vrrhe/DwsIICwtz4asIIa5VmZnG/ubJk+Ghh8yXy883jhv194fP\nPrN/0ccDD4Cnp3G9plXXl5/kZPZhkt5fQjX3or7Q2cyzBH0exNyRc0vcxmX16p+vsilxEyvuW0E1\nt2r8eeBPHljyALsf303dGnXttvXllS+z/fh2wu8Nx93N3LViufm53DTnJurWqMvPd/98TR5XejWJ\niIggIiKi8P3UqVPRl/IkNsCdokVsHhiL2DqWylMPqF7wehIwu+C1F1C74HUtYD0wyE49l2KNgBDi\nGvDUU1r372+s1j5zxny5B17apQeEWXROjuN8qanGNq8VK4z34z/+Sns8F6gPn0y1mf+XPb/o1h+3\n1mnZJTd8L45drJt/2FyfTDtZIn3ikon60aX2F8At2bNEN/+wuU5JT3H+pUrJzsvWefl5LpcTlx6X\ncRvZXiAeeLkg7RHg4YLXoQXP44CFQL2C9FYFAT8K2GUta6eOS/1nJYSogsLDtW7RwgjcjzxibAMz\nY+r3yzRT0HO3mDjfVGv9++9GEP/wxw1aveitl23e4zD/uMXj9NPhTxe+j0uJ043fbawjEyPL5D2b\neVY3+6CZza1dCacTtPe73nrj0Y2m2ikqj8sSwC/HjwRwIYSrkpO19vPT+q+/it43bqx1jJOtyvuO\nntJuL/jpMbNe1u0/ba+z87JN1XffY8c1zzXTr3/3q9O8pzNOa9/3ffWaQ2v0+azzusP0DvrLbV/a\nzb9kzxLd9pO2Oj2naN9aRk6G7vZ5N/3Jpk9MtU9ULhcbwOU2MiFEpaS1MX/doYNxe5fVxx/Db7/B\n77/bntO2WDQt/34PjT392P6fjxg2dxiD2gzimdBnnNSnuW3ucOrnBDH3wX+bauPPe37mhT9eoEuT\nLnh7efPF7V84zH/PwntoUa8F797yLgAPLXmIjLwMvh/5PcreBL2otC72NjLZny2EqBDjxsGnn16+\n+r76Co4cgTfeKJn++OOQlGRcJGLLU1/M4yS7WPnyfwD4YNAH/HvtvzmVccphfXN2zCEx7TBfj3/d\nYb7i7uxwJ738epF0PolPhzr/w/lk6Cd8s+MbtiRtYdb2WWxM3MiXt38pwVvYJD1wIcRFW7oUnngC\nsrON27bq1DFXTmv7K78d2bcP+vaFNWugY8eyz//4Ax59FGJjoUaNovQtexPp87/uzBkUzribexSm\nTw6fTL4lnxm3zrBZX+L5RLrP7M6K+1YQ7BPsUlvzLHnkWfLwrOZpKv/cnXOZsnoKqVmprJmwho7e\nNr6gqBKkBy6EuKLS0+Gpp4wrKgcONN8LT0+H4GDYtMm1+nJzjd7+lCm2gzfALbdAly7w0UdFaXn5\nFob89wFurPVUieAN8PqA11kQu4DdybvLfJbWmom/TOSp3k+5HLwBqrlVMx28AcYGjWVAwAA+v/Vz\nCd7CIemBCyEuyssvw9Gjxilme/dCv35GL7xePeflvv/eCMK//26+vv/7p2b7NsVvvznuve/fD336\nwM6d4OcHd703nfBj33LqnfV4elQrk/+TyE/4dd+v/D7u9xJD1l9u+5KZ22ay8aGNso9aVCjpgQsh\nrpiYGOOIzw8+MN4HBsKwYcZCMkfi4mDWLFi3DvbsgQ0bzNU3d9V23k5vy8f/Pe906L1NG5g0CV55\nBcK37GXR6Sksuvdbm8Eb4LGej3Hk3JESp6IdSj3Eq6te5Zs7v5HgLa460gMXQpSLxQIDBsDYsfDY\nY0XpCQnGqWjx8dCgQdlyWhtD7XfcAU8/DV98AYsWOe+F5+VbaPB8PzwaJ/H8gEd49QYnt4wAFy5A\nYMdczozoyx0B4/nx7084zL8sfhnP/v4sux/bjbubOwPnDGRI2yG82PdFp3UJ4SrpgQshrohvvjEW\nrT38cMn0tm1h+PCS88/F/fgjnDplLHoDmDDB6IVv3Oi4vsf/+y1a5bF6UjjTNk3jQvYFp22sUwdu\n/sc06nvWY/5zzs8PH9p2KK3qt+KzLZ/x2ZbPyMrL4vnrnndaTogrQXrgQgiXnT4NnTpBeDh07172\n+cGDxlWc+/ZBo0ZF6RcuGHPeP/xgrCK3+uILWLwYli+3Xd+R5HO0er8jXw38mQcG9WbMojEENw3m\npX4vOWznibQTdPmsCxse2kD7Ru1NfbfYlFgGzDbOMF//4HrT5YRw1cX2wCWACyFcNnEi1K4N06bZ\nz/PII0bw/s9/itL+/ncj+H/9dcm8OTnGNZ7z58N115X9rJ6vPs/53FT2vTcLgJjkGG6acxMHJh+g\nlkctu214cMmDNKrZiPcGvefK12NqxFT86/rzUHcXbkYRwkUSwIUQl9W6dXDPPcYe67r2L8/iyBEI\nCTGGx729YfduuOkm479NmpTNb68X/sumWO78eQC7Ho2hc8uign9b8Df6NOvD89fbHuLekrSFO+bf\nwZ4n9lDP08mSeCGuAJkDF0JcNnl5xoK1jz5yHLwBWrQwAv177xkL1554wti7bSt4gzEXHhdXci7c\nYtFMmD+ZEQ3/WSJ4A/xf///j/Y3vk5GbUeaztNY8vfxp/n3TvyV4iypLArgQwrS5c42V5aNHm8v/\nyivGdrGPPoK0NGNY3R4PD3j1VZg6tSjtxdmLyHA7ydxnyi5A69q0K9f5X8eX274s285dc8nJz2FC\n8ARzDRWiEpIhdCGEKTk5xsUhs2dD//7myz02OZ3/Loli04/96NPHeR3WufB2nTLwebMj7/f9hmfu\nDLOZP+p4FLfNu439k/cXnnaWlpNGh+kd+PGuH7m++fXmGyrEZSZD6EKIy+Lrr43g6krwBqg9+D1q\nPDSEDt3OOc1bvBc+8sO38cu/zm7wBgjxDaGHbw9mbZ9VmPbW2rcIaxkmwVtUedIDF0I4lZVlBO9F\ni6B3b/PlzmSeod2n7ejs3ZmRHUc6vbITjF54q+4HOH5bLyIf2EGvQH+H+bckbWHkjyNJeCqBpAtJ\n9PqyFzsf3Umzus3MN1SIK0B64EKIS27mTGO/tyvBG+DDjR8yosMI3hn4DjO2zMCiLU7LeHhA24f/\nyXCfZ5wGb4BezXoR1CSIr6O/5u8r/s5zoc9J8BbXBNuHAgshRIH0dHj7bfuHrNhzKuMUn2/9nG0P\nbyOgXgD1atRjecJyhrUb5rDcnlN7iMv+g/2T/2u6rtcGvMbQuUNp4NmA70d971pDhaikpAcuhHDo\n00+Nee9u3Vwr9/6G97mr0120rN8SpRRP9X6KTzc7v2v0jTVv8Gzos9SpYfJScSDUP5ThgcOZPmy6\nS1d3ClGZyRy4EMKuc+eMs83XrLF/97YtyenJdJjegehHo2lRrwUAWXlZBEwLYM2ENQQ2DrRZbs+p\nPfT/uj/7J+93KYALURnJHLgQ4pL56CPjelBXgjfAu+vfZWzQ2MLgDeBZzZOJIROZsWWG3XLl6X0L\nca2SHrgQVdypU8a2rE8+wekd2sWdPm3c7715M7Rubb7cibQTdJrRiV2P7SqzmCzxfCJdP+/K4WcO\nlwnS0vsW1xrpgQshHPrsM5g+HSIjXSv33nvGiWuuBG+Ad9a9w/3d7re5Ety/rj8DWw/kmx3flHn2\nxpo3eCb0GQneQpgkPXAhqrDsbGjZ0rhEpG5d+Pxzc+UOJWYRcv0Zdm3ww9/5Tq5Cxy4co8tnXYh9\nIhaf2j4286w9vJZJSycR+0QsbsroQ+w5tYcbvr6B/ZP3U7eGk0PWhagipAcuhLBr3jzo2hXeegt+\n/NE4kMWMsTP/hcf9w10K3mCcgvZgyIN2gzdAvxb98KzmycoDKwvTrHPfEryFME8CuBBVlNbw4Yfw\n3HPGzWDdu8OSJc7LnTqXwaacr8ipdYDYlFjT9R09d5Tvd3/Pi31fdJjPuqXsk8hPAKP3vWL/Cp7s\n/aTpuoQQEsCFqLJWrYL8fBg0yHg/fjx8U3bquYxnv55Lk5xQHu45kW+iTRQo8M76d5gYMpEmtezc\nF1rM2KCxRCZFsv/Mful9C1FOMgcuRBV1220wfDhMmmS8T08Hf3+IiQE/P9tlLBaN19+78O9+nzL0\nBh9u+fYWjjxzBHc3d4d1pWal0urjVsQ+HotvHV9T7Xvpj5eIOxXHxsSNMvctrkkyBy6EKGPvXtiy\nBcaNK0qrVQtGjjTu9Lbn3UUrQbvz7J030sm7E351/ErMVdvzv6j/MazdMNPBG+DxXo/zW/xv0vsW\nopxMBXCl1BCl1B6l1D6l1Es2ntdXSi1WSu1QSm1SSnUyW1YIUfGmTYNHHoGaNUumT5hg3Odtb7Dr\nww3TGNPqGdzcjE7B/V3vt7nlq7h8Sz6fbv6Up/s87VIbA+oHsOCuBUzuM9mlckIIg9MArpRyA6YD\ng4HOwBilVIdS2V4ForTW3YDxwCculBWiSsvIMIaz09MvT32nT8P8+fD442Wf9etnrETfurXss/At\neznlsZUPHxhbmDYmaAy/xf/GuSz7d3n/uu9XmtZqSu9mLl5VBozsOJLaHrVdLieEMNcD7w3Ea60P\na61zgfnA8FJ5OgGrALTWe4GWSilvk2WFqNIWLYLffnP9Nq/y+uILuPNO8LGxk0sp+4vZXlz4KX09\nH6ZBnaLLQBp7NeamVjexIHaB3fo+2fyJ9KKFuALMBPBmwNFi7xML0orbAYwEUEr1BloA/ibLClGl\nzZplrARfuNC1cjm5+S7XlZNjnLr2zDP289x/v9FDz84uSjt4/Cwxbt/z8bjHyuQf32283WH0XSd3\nEZcSx+hOo11uqxDi4lTUfeBvAx8rpbYDu4AowOV/faZMmVL4OiwsjLCwsApqnhBXRkICxMXBtm3Q\npYsxfO1p4rbLN+cv5+3IKaR9tMml+n78ETp0cHz1Z8uWEBQEv/4Ko0YZaU/NnkXLnFvp3q7s8vRh\n7YYxaekk9p/ZT5uGbUo8+3TzpzzW8zE83D1caqcQ16KIiAgiIiIq7POcbiNTSoUCU7TWQwrevwxo\nrfU7DsocBIKALmbLyjYyURX94x9G0P7gAwgLMw5VueMO5+X8nh3B8fo/s+K2fdzSo52purSGnj3h\nX/+CW291nPebb4wRgaVLISsnj9r/aMPXQxZz3809bOafHD6ZhjUbMiVsSmHa6YzTtP20LXuf3Gtq\n77cQoqTLsY1sC9BWKRWglPIA7gF+KdWIekqp6gWvJwGrtdZpZsoKUVXl5Rkrvh980Hg/apQxH+5M\nzKFkjnv+ResL9/FBuIkCBZasTOZc/nGGDnWed9QoWLsWTp6Ef3z7M7VyW9gN3gD3d7ufOTvmYNGW\nwrSvtn/F8MDhEryFuEKcBnCtdT7wJLACiAHma63jlFKPKKUeLsjWEditlIrDWHH+tKOyFf81hLj6\nrFgBzZtD587G+5EjjR5vTo7jcq/O/442uXfy+PUTWHvafAB/OvwZ6t79DG4mfi2vXdtY6DZ3Lny1\n62MmBjneAtbDtwc1q9dk3ZF1AORZ8pixZYYsXhPiCjI1B661Xg4ElkqbWez1ptLPHZUV4lowa1ZR\n7xugWTNjfnrVKhgyxHYZi0Xze/L/eGfADB4Z2pcXNh9ifcxh+nYOcFjX4ZOpHKmxjFQgMzeTmtVr\nOswPxmr0UU9tJX3oEf59350O8yqljMVs0d/QP6A/P+/5mYD6AXT37e60HiHEpSEnsQlxCaSkwJ9/\nwj33lEwfPdrxavRv/9xKvsriqdv74+lRjXb5w3l3qfNe+Kvf/4B/9iC6+4WwYv8KU20cMAByQj5h\nSIMn8fRw/rv8uK7jWLxnMRm5GXwS+QmTe0vvW4grSQK4EJfAt98a55DXLXVC6MiR8PPPxvy4Le+u\n/B8D6k0oPAnt3u6j+OuE8wC+5MjXTOw5gVEdR7F4z2JTbbyQcw63jr/w1RMPOs8M+NXxo0+zPrz2\n12scSj2hfjI9AAAgAElEQVTEiI4jTJUTQlwaEsCFqGBaG8PnDz1U9lnLltCqFaxeXfbZmfOZxKkf\n+fdd4wvTnrvzZtJqxrE9/pjd+n6NjCOz+hFeGj2IER1G8Ou+X8nJdzLRDiyIXcAtbW/Gp14jM18L\nMPaEf7DxAx7v9TjV3CpqF6oQojwkgAtRwTZvNhaq3XCD7ef2htH/+f1PNMzqSZ+OzQvTatf0oGXO\nrby95Ce79b2xdDY9Pe7D06Mazeo2o32j9vx18C+n7ZwdPZsJ3SY4zVfcnR3u5MaWNzKp+ySXygkh\nKp4EcCEqmHXxmrKzu3PUKPjpJ+Ou7uLm7/maezuXHc6+u+so/ki0PYyelZPH1pxv+eftE4o+v+Mo\nFsU5HnaPPx1P/Jl4hrS1s5rOjprVa7Jq/CoaeZnvtQshLg0J4ELYYLFAVJTr5dLTYcECY4W3PW3b\nGueUr19flLY+5jBnPaOYOqbsVQEvjBhMqtc24o6klHn2zsIV1MxtwW19Ohamjew4kiV7l5BvsX8Y\n4pwdc7g36F6qu1c398WEEFcdCeBC2PDHHxAa6voNYgsXQt++4Ff2RNISSh/q8s+F39CFe6hfu+w5\nqw3r1sQ/azBv/7SkzLOvts7mjuYTSqS1btAavzp+hXu2S7NoC3N2zmF8Nwe/ZQghrnoSwIWwYcEC\nyM0FV48ttrd4rbTRo40AbrFAXr6FtRdm8+ItD9jNP7LjKJYdKjksvv/YGRJrrOCte+8pk9/RMHrE\noQga1mxINx8HB6YLIa56EsCFKCU319jq9eijEB5uvtzO2ExiE5O47TbneTt2NLaYbd4Mn/yymmr5\ndRh7o/1DUV4aOYxTNddz+GRqYdor38+jRc5QAprWL5N/VMdRLI5bXOLoU6vyLF4TQlx9JIALUUpE\nBLRpA4884tod3pN/fIeaY8dR3eS0snU1+vR1XzPE54HCvd+2+DWqg0/mjby1aGlh2m9JX/NoH9u9\n9o7eHalTow5bkraUSL+QfYFf9v7C2KCx5hophLhqSQAXopQFC4zg2rUrZGZCfLy5cpvOL+SEx3ou\nZF8wlX/0aJi3+BwHPX7hP2PudZr/9rajWBJvDIsvWreL7GoneX7EzXbz2xpGXxi7kLCWYXjX8jbV\nRiHE1UsCuBDF5OUZw+ejRxvbwIYMMdcL/zUyjlz3VPoH3MCfB/80VVdQEOQG/ohf1kA6tnAeUF8Z\neTsnaq7i2OkLvLVsNn1q3o9HdXe7+a0BvPg1vbN3zJbFa0JUERLAhShm9WoICDBOSwMjgJuZB5/2\n+yKC3EcxrN0wlieYG3dXCprcPJcXBt1vKn8r3wY0zuzLmwt+ISp/Lq/d4TgQB/sEY9EWdp7cCcCB\nsweITYnl1vZOLgsXQlQKEsCFKGbBArjrrqL3t9wC69YZQ+mOrD+7kIeuH82QtkMITwgv0eu152Ta\nSZLyd/DoLYNMt29Yy1HMPPAitbLbMrhne4d5lVIlhtHn7JjD2C5j8XD3MF2fEOLqJQFciAL5+cYJ\naaNHF6XVrw/dusGaNfbL/bEtnpzqJ3lk6PV08u6E1po9p/Y4re/nPT8ztO1QPKuV3fttz8sjhmOp\neYIRrSaYym8N4BZt4Zsd3zA+WIbPhagqJIALUWDNGvD3h9atS6Y7G0b/IHwRndRIPKq7o5Qq7IU7\nsyhuEaM6jnKpjR1bePO477e8d/8YU/n7+PchNSuVL7d9SR2POoT4hLhUnxDi6iUBXIgCpYfPrYYO\ndbyQbe3phTwQWtRtH9p2qNN58NMZp4lMimRou6Eut3PGo2Np0qCWqbxuyo0RHUbw3IrnGN9tPMre\nAe1CiEpHArgQGMPnixeXHD63Cg6Gs2fh4MGyz9bsPEimxxEev7Xo6rGbW9/MxsSNpOfYP4d1yd4l\n3NL6Fryqe1VE8x0a1XEU2XnZ3NvV+VY1IUTlIQFcXJWysowFZDnOr7WuEOvWga+vcdFIaW5u9ofR\n3/1tEYF6BJ4eRXdj161Rlx6+PYg4FGG3vkVxixjdycZvC5fAgJYDiJwYiU9tn8tSnxDi8pAALq5K\nq1fDypWwdevlqc/e8LmVvWH0iOSFjO9VNhA7GkZPzUpl7eG13Nru8mznclNu9PDrcVnqEkJcPhLA\nxVUpPBxq1TIC+aWWn29cLGJr+NzqlluMtmRnF6VtjD1CRo0EJt8eVia/o4Vsv+77lRtb3UidGnUu\nsuVCiGuZBHBxVVq2DJ5/3rUAfuGC4+1e9mzYAE2aQHsH26obNYJOnYyhdqt3ly6mbf5wvDzLHn7e\ntWlXMnIzSDiTUObZwtiFLq8+F0KI0iSAi6tOQoIRjCdPNoJrbq65cvPmwfhybHN2NnxuVXoe/M/j\nCxnX3Xa3vXA7WXzJXviF7Av8degvbm9/u+sNFUKIYiSAi6tOeLgx59yokXGk6fbt5ssdOgSHD5uv\ny2Ixhs/NBPChQ4sC+NZ9SaR5xvHcnfYvExnadijL95ecB18Wv4zrm19Pg5oNzDdSCCFskAAurjrW\nAA4wYIC5YfScHPjrL/P5rdauz6VBo1wCA53n7dkTkpPhyBF4Z8lPtM67ndo17R9LOrD1QNYeXktW\nXlZh2qK4RYzueHlWnwshqjYJ4OKqkpkJa9cai8bACMgREc7LrV9vzGHfdZdrAfyF8H9Sf+RrpvK6\nucGgQcZq9D+SFjI22HEgblCzAV2bdmXNYWNiPiM3g9/3/87wDsPNN1AIIeyQAC6uKhEREBJinEEO\n0L+/EZzz8hyXs/bazQZ8qx1Zv3Cm/krT+YcOhS/nneRczR08f+ctTvMXnwf/PeF3evn1orFXY/MN\nFEIIOySAi6tK8eFzAG9vaN4coqPNlevUCc6fh8RE53WtjzlMrkcKRzP3cC7rnKn2DR4MW9N+omXO\nrdSrXcNp/uLz4AvjZPW5EKLimArgSqkhSqk9Sql9SqmXbDyvq5T6RSkVrZTapZSaUOzZIaXUDqVU\nlFJqcwW2XVRBy5bBsGEl05zNax89CsePQ69exjB3//7mhtE/W7GcgLzB9G7Wm/VH15tqn7c31L9+\nEfd2NxeIQ3xDOJN5hr2n9rIsfhkjOo4wVU4IIZxxGsCVUm7AdGAw0BkYo5TqUCrbE0CM1joYuBH4\nQCllPVvSAoRprUO01r0rrumiqomPN+bAu3YtmR4W5nhYfPlyY27a3d14b3YYfdWRcIa1HcqAgAEO\njz0tLi0njXyfSF6+y/nwORinoA1uM5jnVzxPUJMgOc5UCFFhzPTAewPxWuvDWutcYD5QehWOBqzH\nStUBTmutrbOWymQ94hq3bJmx17r0hVn9+xsHqOTn2y5Xetg9LMx5DzwtM4cTNf9i8q2DCWsZxurD\n5la+/XXwL3o3601tj9qm8oMxD/5b/G+X7exzIcS1wUxgbQYcLfY+sSCtuOlAJ6XUMWAH8HSxZxr4\nQym1RSk16WIaK6q28PCyw+cATZuCjw/s3Fn2WU4OrFplzE1bdekCp0/DsWP265oZvo5amR0IbN6Y\n3s16E5Mcw4XsC87bmBDOkLZDTHybIoPaDMKzmicjO450qZwQQjhSUT3jwUCU1toPCAFmKKWsXZS+\nWuvuwDDgCaVUvwqqU1QhGRnGavOBA20/tzeMbt0+1qRJUZqZefD528LpWc/otntW86RXs15O58G1\n1oQnhDO0rWt3eDf2asyJ50/gX9ffpXJCCOFINedZSAJaFHvvX5BW3APAWwBa6/1KqYNAB2Cr1vp4\nQXqKUuonjCH5ddgwZcqUwtdhYWGEhYWZ+hKi8vvrL+jRA+rVs/18wACYPx+efbZkeunh8+L5IyJg\nzBjbn7crczkz+n9VlL9gHtxR73rf6X3kWfLo5N3Jybcpq56nnS8mhLhmREREEOHKPlcnlNbacQal\n3IG9wM3AcWAzMEZrHVcszwwgWWs9VSnVFNgKdAOyADetdZpSqhawApiqtV5hox7trC2i6nrySWO7\n2Etl9jgYjh83hsZTUowetlVQEHz5JYSGlswfHQ333AN79pT9rMi4o1z3TQhZb5zEo7qx8i3iUAQv\nr3yZTRM32W3jtE3TiE2J5Yvbv3D16wkhRBlKKbTWynlO25wOoWut84EnMYJvDDBfax2nlHpEKfVw\nQbY3geuVUjuBP4AXtdZngKbAOqVUFLAJWGoreItrm9bGAjZbPWkrX1/jbPRdu4rSim8fKy0oyDj2\n9MSJss9m/L6cFrmDCoM3QJ9mfdidvJu0nDS7bSjP/LcQQlwqZobQ0VovBwJLpc0s9vo4xjx46XIH\ngeCLbKOoJLSGmBijp+yKffuMxWhBQY7zWVeXd+tmvC+9faw4d3fo18/If/fdJZ+tPBzOHe1K7seu\nWb0mPfx6sP7Iega3LfNXmYzcDDYc3cCCuxa48M2EEOLSke1dosJs2GAETYvFtXLW3nfp7WOllT7Q\nxd78t5Wt7WRpmTkc91zFU8PKBukBAQPsbieLOBRBD98e1K1R13EjhRDiMpEALipMRAScOwd797pW\nzt72sdKsAdxisb19zFb+0utFZq3YiFdWOzq3bFImf1jLMLsHuixPWC7D50KIq4oEcFFhVq+Gxo0h\nMtJ8mbQ02LgRbrZ/rXYhf3/jkpPYWKO3365dye1jpQUHG3vBk5OL0uZtCad7XduBONQ/lJ0nd5Ke\nk17mWXm2jwkhxKUkAVxUiNxc2LQJnnjCtQD+11/GPdt1TY5MW3vhzobPoWgefM2aorQd6eHcF2q7\noFd1L0J8Q9hwdEOJ9IQzCaTnpNO1aVeb5YQQ4kqQAC4qxLZt0Lq1cRTqJvs7scpYtaro7m8zrMPi\nZgJ48fwAW/clkV0jkQkD+9jPb2Me3Dp8rpxN0gshxGUkAVxUiIgII1iGhBiryjMyzJVbvdooZ9aA\nAfD775CUBL1NXI1TfCHb9OXL8c+5pcT2sTL5bcyDy/YxIcTVSAK4qBCrVxvBskYNYxvZtm3Oy6Sm\nGjeQ2drHbU9AADRsnMegwRab28dKCwmBI0fg1ClYeWg5g9s47rZf538dUSeiyMg1fgPJysti7eG1\n3NLahWECIYS4DCSAi4uWl2csKrvhBuN9nz7mhtHXrjXyeni4Vl/tceNpcsscU3mrVYO+fWHFyjyO\n1VjJZBvbx4qr5VGLYJ9gNh7dCMCaw2vo2rQrDWo2cK2RQghxiUkAFxctKgpatDBWoINxrKmZhWyu\nDp+DcaFIcp0/ON/Q3PWfYNQxddZGPLNaEdTK+X3cxe8HD4+X4XMhxNVJAri4aNb5b6s+fS5dAI87\nFcf57PNsSjS/Ui4sDPbpcELqmNsGVvx+8OX7l8v2MSHEVUkCuLho1vlvq9atISvLWGhmz/nzEBdn\nbiFaiboOreauzneReD6Rs5lnTZXp3h3c2i/n3t7mAvH1za9n+/HtxKXEcSbzDCG+Ia41UgghLgMJ\n4OKi5OfDunXG/dtWSjnvha9fbyxe8/R0rb7Vh1dzc6ub6enXk81Jm02VuZB3hprNEpg4xP72seJq\ne9QmqGkQr0e8zuA2g3FT8n8TIcTVR/5lEhdlxw7w8yt7IpqzhWylh93N0FoTcSiCAQEDCG0WanoY\nfe3htfQNuA6PatVN1zUgYAALYhfI/LcQ4qolAVxclIiIksPnVs4WspVn/nvf6X14uHvQsn5LQv1D\n2ZRkLoCvPryaAQGuVRbWMgyFYlCbQa41UgghLhMJ4OKi2AvEvXrB9u3GFrPS0tJg924jyLtU1+HV\nRmBVij7+fYhMjMSinV99FnEogrCWYS7VNSBgADOGzaCxV2PXGimEEJeJBHBRbhaLsZfbVgCvX9+4\nfGT37rLPNmwwFpbVrOlafdbhcwCf2j7U86xH/Ol4h2VSs1KJPxNPT7+eLtVVs3pNHuv1mGsNFEKI\ny0gCuCi3nTvB2xt87GyttjeMXt7579WHVzOgZVHBUH/n8+BrD68l1D8UD3cXT4sRQoirnATwKkRr\nWLbs8tVXevtYafZWopdn/nv/2f0oFG0atClMM7OQrTzz30IIURlIAK9CDh2CW2+FM2cuT33OAnFo\naNmV6BkZEB0N113nWl0RhyIY0HJAiRvBzCxkkwAuhKiqJIBXIdbe7o4dl74ui8W4Z9tRAO/SBY4e\nhXPnitI2boTgYKhVy7X6bAXiYJ9g9p3eR3pOus0y57LOsefUHno3c/G0GCGEqAQkgFchkZHGbWBR\nUZe+rpgYaNAAmjWzn6daNeM2sC1bitLKPf99aHWZleQ1qtWga9OubD221Wa59UfX08uvFzWq1XCt\nQiGEqAQkgFchmzfD6NHGELVZGRnlC/hm57FLD6OXZ/77YOpBci25tGvYruznO5gHL8/2MSGEqCwk\ngFcRublG4J440bUAvmgRPPCA6/WZDcTFF7JlZhp7w/v2dbGuQ8bwefH5bytH8+Ay/y2EqMokgFcR\nu3ZBy5ZGjzchwbhMxIzISGM4PDvbfF1auxbAN20yykRGGvPitWubrwuKDnCxxbqVTGtdIv1C9gVi\nU2Lp42/u/HMhhKhsJIBXEZGRRrD09IS2bY2gbLacUhAba76uuDgjCLdo4Tyvv78xL3/wYPnmv8Fx\nT7pFPaMRR84dKZG+/uh6evj2wLOai7elCCFEJSEBvIrYvNkI4GAsHDMzjJ6VZQT6O+5wbR78u5XR\ndB24y3R+6zB6eea/D6ceJiM3gw6NO9h8rpSyeaCLddhdCCGqKgngVURkZNHd2sHB5gJ4VBR06ADX\nX+9aAJ935H1S288wnb9PHyN4b9kC/fqZrweKet+25r+tbC1kizgsC9iEEFWbBPAq4Nw5OHLEmF8G\n8wHc2msPDnYtgCfpSM54mC8QGgrffQcdO0LduubrAXM96dIL2dJy0th1cheh/i7eliKEEJWIqQCu\nlBqilNqjlNqnlHrJxvO6SqlflFLRSqldSqkJZsuKi7d1qxGEqxdcd92tm3GYi8XJRV3WXntwsHGu\nubP8AAlJp8n1PM7+tN3kWWxcNWZDjx7GcH155r/N9KR7+vVk58mdZOcZK/E2HN1Ad9/u1Kzu4m0p\nQghRiTgN4EopN2A6MBjoDIxRSpWekHwCiNFaBwM3Ah8opaqZLCsuknUBm1XDhsYhKwcOmCvXsKHx\ns3+/87rmrd1M/fTeNKvTjL2n9ppqX61a0Gjs8wRev89UfqvE84mczz5PJ+9Ojj/foxbtG7Un+oQx\n7CDz30KIa4GZHnhvIF5rfVhrnQvMB4aXyqOBOgWv6wCntdZ5JsuKi1R8AZuVs4VsKSlw6pQxBw7m\nh9H/3BtJhzp9CPENIeqEuWH0fEs+GR2+JK/5KlP5rVYfWk3/gP4O57+tis+Dl761TAghqiIzAbwZ\ncLTY+8SCtOKmA52UUseAHcDTLpQVF8G6v7p3qeO+nc2Db94MvXqBW8HfALMr12NSIwlr24cQnxC2\nH99uqo17Tu0hLfcCUSfM5beKOBRBWECYqbzWefCM3AyiT0Rznb+Lt6UIIUQlU1GL2AYDUVprPyAE\nmKGUcvG4DlEeiYnG3HVAQMl0Zz3q0r32kBDnPXCLRXO6xmbu7mcEcLM98MikSNo0aGM64Fu50pMO\n9Q9l49GNbDy6kW4+3ajl4eJtKUIIUclUM5EnCSh+ZId/QVpxDwBvAWit9yulDgIdTJYtNGXKlMLX\nYWFhhDm6bFoARb3v0qPMznrgkZHw2GMl8zsL4H9GJeCWX5vgNr4kp7sTfSIarbXTIe7IxEgmdZ/E\nv9b8i9z8XKq7V3fyreBk2klSMlLo0qSL07wA7Rq143z2eebvnm+61y6EEJdTREQEERERFfZ5ZgL4\nFqCtUioAOA7cA4wplecwMBBYr5RqCrQHDgDnTJQtVDyAC3NKL2CzCggwLipJToYmTUo+09rogc+e\nXZTWvLlxnvrx4+Dra7uuxZsj8c03KmtSqwm1qtfiUOohWjVo5bCNm5I2MbH7RObsnENsSizdfLo5\n/15JkfRp1gc3ZW6QyE250ce/D3N2zmHpmKWmygghxOVUumM6derUi/o8p/86aq3zgSeBFUAMMF9r\nHaeUekQp9XBBtjeB65VSO4E/gBe11mfslb2oFosSbC1gA6NHHhxs+27w+HioUwd8fErmdzYPvv5w\nJMHeRZWZWciWlpNGwpkEuvl0c2nePDLRCOCuCG0WSr4ln+ubX+9SOSGEqIxMdW+01su11oFa63Za\n67cL0mZqrb8oeH1caz1Ya9214Geeo7KiYuTlGbd79epl+7m9YXF7Qd/ZPPiB7EiGBhUL4CYC8tZj\nW+natCse7h509+1uOoBvStrk8kEsYS3D6NeiH7U9ZPmFEKLqk5PYKrHYWGjWDOrXt/3c3jy4vWF3\nR/PgqWlZpHvF8LcbuhemmVnIFpkYSWgzIxB39+1uauFbviWfLUlb6N2st9O8xQ1oOYC/xv/lUhkh\nhKisJIBXYra2jxXnKIDbKudoCH3humhqZgTSuJ5XUX7fEKKOOwngSZGFV3oG+wSz4+QO8i35DsvE\nnYrDp7YPjbwaOcxni5k940IIURVIAK/E7PWkrTp2hEOHjMVsVtYbyHr0KJs/MBCOHYPz58s+W7Yz\nklYeJSsLqBdAVl4WJ9NO2qxfa82mxE2Fc9n1PevTpFYT4s/EO/xemxJdHz4XQohrjQTwSszeXLaV\nh4dx0tquYjd/RkdD+/bg5VU2v7u7cSGKrYVv209Gcl3zkpUppRwuZEs8n0i+zqdl/ZaFad19uzvt\ntUsAF0II5ySAV1JpacbZ5V27Os5XehjdWdC3t5AtSUVyZ6+yBR0tZLP2vosPa3f3cb6QzbqFTAgh\nhH0SwCupbduM4O3h4Thf6QDubNjd1jx43JEU8qqfZkjPwLL5HSxksxWIQ3xD2O7gSNXz2ec5ePYg\nXZs6+c1ECCGucRLAKylnC9isbAVwZwvfSvfAf1i7mYaZvajmXvavi6OFbJFJkWWGwkN8jPxaa5tl\ntiRtIcQ3xNRpbUIIcS2TAF5JORsKt+rWzZgDz883bh9LSSm6gcyWoCDYuxdycorSVu2LpGNd25UF\nNgrkRNoJzmWdK5Gem59L1PEoejUruUm9ae2meFX34vC5wzY/T4bPhRDCHAnglZSzoXCrevWgaVPj\n9LXNm6FnT2Oxmj1eXtCqlbFS3Sr2/CZubGe7Mnc3d4KaBrHjZMmVb7uSd9Gyfkvq1qhbpkyIr+N5\nc1nAJoQQzkkAr4SOHYPMTGjd2lx+6zC62V578XnwvHwLZzy3cM8N9gvaWshWfPtYafYWslm3nUkA\nF0II5ySAV0KbN9u+gcweawA322svPg/+x/Z9uOc2oHPLJnbz21rIVvwAl9Lsnch2KPUQ1d2r41/X\n33kjhRDiGicBvBLavt32QSz2WAOyNfA7U3wr2eLNkfhZHEd9W3u7IxPLLmArnt9WD1x630IIYZ4E\n8EooOtoIymaFhEBEBNSqZf+q0OKst5hZLLDxSCQhTRwH8C5NupBwJoGsvCwAzmaeJelCEp29O9vM\n71/Xn9z8XI5fOF4ifVPipsJz04UQQjgmAbwScjWAN2tmXB9qZvgcoFEj44KUAwfgQE4kw7o6Llij\nWg3aNWrH7uTdAGxO2kwP3x64u9leLaeUsjmMvilpk91hdyGEECVJAK9kzpyB1FRjpbhZSoH/Tb/S\npleC6TIhIRCxLpNMrz3cdUOI8/zFFrKZGQovPYyenZfN7uTd9PB1YW5ACCGuYRLAK5kdO4y93W4u\n/i+nbnibGsGLTecPCYEZP23HK6MTDep4Os/vU3Sgi5m93KUDeNSJKAIbBVLLo5bpNgohxLVMAngl\nEx1tBHBXWLSFhLQdHEzfbbpMSAhEp0TSpoa5IW3rkLjW2uEK9MLPL7X1zNG2MyGEEGVJAK9kXJ3/\nBth/Zj8ZuRmFc9RmBAcDzSK5voW5oNrNpxu7k3ez9/RealWvhV8dP4f52zRsw9mss5zJPAPYPnZV\nCCGEfRLAK5nyBPDoE9Hc2PJG9pzaQ74l31SZFi3AvcVmRl9nLoDXrVEX3zq+zNkxx9RCNDflRrBP\ncOGwu2whE0II10gAr0RycowjUTvb3p1lV/SJaPq16IdPbR8OnD1gqszZrDN4NT7NTcFtTdcT4hPC\nrKhZpreCWYfRT6adJDUrlXaN2pmuSwghrnUSwCuR2Fhj9XnNmq6VizoRRYhPCJ2bdDY9jL7jxA66\n+XTDTZn/KxLiE0JyerLprWDWeXProjdX6hJCiGud/ItZiZRn+ByMHniwTzBdvLuYDuDWoO+K7r7d\ncVfudPftbjr/9uPbZfhcCCHKQQJ4JVKeAH4y7SSZeZm0qNeCLk26sDvl0gXw65pfxxs3voFXdS9T\n+Ts07sDR80dZeWClrEAXQggXSQCvRHbscD2A7zi5g2CfYJRSRgA32wM/HkWwj2uV1a1Rl1dueMV0\n/mpu1ejSpAtbjm2hdzMTh7QLIYQoVO1KN0CYo3X59oBHn4gu7EkHNg7kwNkD5OTn4OHuYbdMZm4m\nB84eoHMTF1fLlUN3n+6kZqXSyKvRJa9LCCGqEumBVxJHjhiL15rYv9XTpqgTRT1pz2qetKzfkn2n\n9zksszt5N4GNAx0G+YpyQ8ANDGw18JLXI4QQVY0E8EriYhewWXX2dr4SvXjQv9TGBo1lxq0zLktd\nQghRlUgAryTKM/+dnpPO4dTDdGjcoTDNzDx41HHXF7AJIYS4vEwFcKXUEKXUHqXUPqXUSzae/10p\nFaWU2q6U2qWUylNK1S94dkgptaPg+eaK/gLXivLMf+9O3k2Hxh1KDIWbCeDRJ6MlgAshxFXOaQBX\nSrkB04HBQGdgjFKqQ/E8Wuv3tdYhWuvuwCtAhNY6teCxBQgreC5LjcupPEPotraCOQvg+ZZ8dp3c\nRTcfF39bEEIIcVmZ6YH3BuK11oe11rnAfGC4g/xjgHnF3iuT9Qg7zp2D5GRoa/5UU6Ds/DdA24Zt\nSbqQREZuhs0y+07vw6e2D3Vr1C1vc4UQQlwGZgJrM+BosfeJBWllKKVqAkOARcWSNfCHUmqLUmpS\neXBH7XcAACAASURBVBt6Ldu5E4KCwN3dtXK2Ang1t2oENgokLiXObpkQXxk+F0KIq11F94xvB9YV\nGz4H6FswtD4MeEIp1a+C66zyyjP/nW/JZ3fybptD4Y7ORC/PCWxCCCEuPzMHuSQBLYq99y9Is+Ue\nSg6fo7U+XvDfFKXUTxhD8utsFZ4yZUrh67CwMMLCwkw0r+qLjoZevVwr42go3NGZ6FEnong29Nny\nNFMIIYQDERERREREVNjnKa214wxKuQN7gZuB48BmYIzWOq5UvnrAAcBfa51ZkOYFuGmt05RStYAV\nwFSt9Qob9WhnbblW9egBM2ZAqAv3fczbNY9FcYtY+LeFZZ4t3buUz7Z+Rvi94SXStdZ4v+fNrsd2\n4VvH92KbLYQQwgGlFFprVd7yTnvgWut8pdSTGMHXDZiltY5TSj1iPNZfFGS9E/jdGrwLNAV+Ukrp\ngrrm2grewr7cXIiLM+bAXWFr/tvK3kr0pAtJVHOrJsFbCCEqAVNnoWutlwOBpdJmlnr/DfBNqbSD\nwOU50usyysmBkyehefNLX9fevUY9tWq5Vi76ZDRP9X7K5rOA+gGczTxLalYq9T3rF6aX5wITIYQQ\nV4Zs7yqHJUtg3LjLU1d59n9rrR2epuam3OjcpDMxyTEl0mUBmxBCVB4SwMshJsboGV8O5Qngx9OO\no9H41fGzm6ezd2diUmwEcNlCJoQQlYIE8HKIizOG0M+du/R1lecMdOv8t1L210bYmgd3NG8uhBDi\n6iIBvBzi4sDTE+LjzZc5cz6T1LQsl+q5mDvAg5s6DsSlA/jZzLOczjhN24YuHvcmhBDiipAA7qK8\nPCNw33wz7HN8rXYJg97+P0Z/+L5LdR07BkqBr4uLws2cplY6gEefiKZr0664KfkrIYQQlYH8a+2i\ngwfBxwdCQlwL4AczdhJ/LsZ5xmKs898ORsJtMnOft29tX/IseSSnJxeWkQVsQghReUgAd1FcHHTs\nCO3buxbAUz1iSbG4tvKtPPPfF7IvcOzCMdo3au8wn1KqRC9c5r+FEKJykQDuovIE8MMnU7HUOE2m\n1z4sFvOnza2Oi6Fz11yX2rfz5E46e3emmpvzLf6dvYu2kskKdCGEqFwkgLsoNhY6dYJ27YwAbub0\n19+3x+GVFoTKq8X2hGOm64rwHk1G0z9dal/0iWjTQ+HWHnhmbiYJZxLo7N3ZpbqEEEJcORLAXWTt\ngTdsCDVqGNvJnNkQH4uPeyfqZAeyere5YfTz6dnk1Ikno6Zrw+6uDIV3adKF3Sm7iUmJoX2j9tSo\nVsOluoQQQlw5EsBdoDXs2WMEcDA/jL77RCztG3TCp3p7th0yN+7+R9Q+cMsnIXWPS200s4DNynqt\n6Pbj22UBmxBCVDISwF2QlAReXtCggfHebAA/khlHzxadaNsgkD0p5nrUa+JiqZ7XgL2nzffA8yx5\nxKbEEtTU3M0njb0a41Xdi6X7lkoAF0KISkYCuAus899WZgP4GfdYbgzqRHDzQBKzzAXkqMQYOrrd\nwZ5T5nvgCWcS8KvjR22P2qbLdGnSheUJy2UFuhBCVDISwF1gnf+2at/e+WlsJ86kke+ZTL/OLekb\n2J6zbuaG0A9ciKV/s8Gcyz7H+ezzpsrsTt5N5yauLUTr7N2ZPEueBHAhhKhkJIC7oHQAt65Ed2TF\n9j3UTA/Eo7o7/YNak+eVyPn0bKd1pRBDWKcutGvYjr2nzPXaY5JjXF5J3qVJF1o3aE09z3oulRNC\nCHFlSQB3QWxsyQDeti0cOAD5+fbLrNsbSxM3Y9y9dk0Pqme0IGLXfof1XMjIIcfrELeEtKdD4w6m\n58FjUmLo0qSLqbxWg9sM5uW+L7tURgghxJUnAdwFcXEl58C9vMDbG44csV9m5/FY2tUvKtTAEsj6\nPY4D8sqofXhkBlC3Vg0CGwWangePSXG9B968XnMm9ZjkUhkhhBBXngRwk06dgtxc4xz04pwtZDuU\nHkv35kUBvHnN9uxMcjzuvjo2lsbaCMRme+A5+TkcOHuAwMaBTvMKIYSo/CSAm2Sd/y59sYizAH7a\n7f/bu/foOKvz3uPfZ3SXrLtkSbalGUn2yDZgY5eYSzDIkFCbcApZTcBOV9LSU8pKQ+Bk0TR2FzSw\nmrJoD6c97UlzIG1IXU4hhxAC5kADhCAgCcQOtjG2LGuk0R1fJOtm3WVpnz9mRh6N5vJKlqUZzfNZ\ny8t633n3zLslL/+097svtVT7NdvXLq+isTd8IB/qOEZFpqdMVUGVpWfgrrMuyrLLSE1MjXitUkqp\n2KcBblHgFDKfcAHe3T/M+bQOqjdWTp37VHkVp8bDB3Jjfy2bVnpa4M58J65uFxOTYR604x2Brkuh\nKqVU3NAAtyhwBLpPuAB/89AJUoYqSUu5sLHIjZc7GUgN34XeyTFuWOcd+Ja8jIL0Alr7wjxoZ24D\n2JRSSsUuDXCL5hLg7x6vpZDpzfYN5cUY2yiNn3QHLTMwPMZYehO3bL7wLNvKQLa5DGBTSikVuzTA\nLQoV4A4HnDwJIyMzX/vok1oqs6YHuM0mpA9X8dZHwbvR3zrkImnIMwLdx8pAtmNnjs16ERellFKx\nSwPcgnPn4OxZsNtnvpaY6AnxxiBTu5sGatm0auaD80KbkwPu4M32mtpjFJjpZSK1wEfPj9LS14Iz\n3xm2HkoppZYODXAL6uo8XeUJCcFfD9WN3sXxqWfZ/iqyqjh2KniL+lB7LZVZ01vSkVrgJ86eoDyn\nnOSE5NCVUEoptaRogFsQqvvcJ9ia6J5n2c3cfOWaGddfsaKKlsHggdzQf4wrVwS0wAvCt8Dnsga6\nUkqp2KYBbkGoKWQ+wdZEf+uQa2o1tUDXrHFyluBd6J3UcuP66WG8KmsV/aP9ITc1OXbmGJcX6gh0\npZSKJxrgFlhpgQcG+DvHa8mfDJ76N290MprewNj49Lndnla7m89smv4s2yY2nPnOkAu6HOvUAWxK\nKRVvLAW4iGwXkToRqReRbwV5/c9F5JCIHBSRj0XkvIjkWCkbC+YS4Ifaa6dWUwu0PDeDhNECPjg+\nfW73Lw43kDRcRs6ymauprS1YG7IbXaeQKaVU/IkY4CJiA74L/C5wGbBLRNb6X2OMecIYs8kYsxnY\nA9QYY3qtlI12o6OezUpWrw59zYoVMDAAfX0XzjX21bJxReh+96zxKt6tnd6irqk9RkGIVntVflXQ\ngWzD48O097ezOi/MDSqllFpyrLTAtwAuY0yLMWYc+BFwe5jrdwHPzbFs1Kmvh/JySA4zwFvE8xzc\nfyDbGWq5fm3oZvuKFCeHWqc32w+2HaMiM3hLOtRUsuNdx1mdt5qkhKTwFVFKKbWkWAnwlUCb33G7\n99wMIpIGbAd+Mtuy0SpS97mPfzf6yNh5RtMb+Oym0DuDOfOrqA9oUTf213LlyuABHmoq2bEzuoSq\nUkrFo/kexPZfgF8aY3rn+X0XzVwC/O2PGkkcWUFBdnrI6zeXVdExOj2Q/ddAn/H++U4auhtmbGqi\nz7+VUio+JUa+hA6gzO94lfdcMDu50H0+27I88sgjU19XV1dTXV1t4fYurePH4bbbIl/ndMJ//qfn\n65pjteRNhJl3Bly/zskjH17oQh8aGWc03T1tDXR/GckZFKYX0tLXQkVuxdT5Y53H+OMr/zjyDSql\nlFpUNTU11NTUzNv7WQnwA8BqEbEDJ/GE9K7Ai0QkG7gR+IPZlvXxD/BoUVsLf/EXka9zOuEf/9Hz\n9cG2WhwZ4QP8uvV2JlI6OdMzyPLcDH5+2EXScGnQEeg+vgVdpgW4roGulFIxIbBh+uijj17U+0Xs\nQjfGTAD3AW8Ax4AfGWOOi8i9IvKnfpfeAbxujBmOVPai7niOmpvh4MHZlZmY8AxMqwr9KHuKbzEX\nY6Cht5YNJeEDPDkpgZShSt4+4hn59s6xWgomwwfx2vy10+aCD44NcmrgFJW5lWFKKaWUWoqstMAx\nxvwMqAo491TA8V5gr5Wyi+Hf/92zpvmzz1ov09QERUWQkRH52rw8SEmB06fh9GQt1zv/W+QyxskH\nrnruuvFKDrYfozzEvHGfqoIqPj798dRxbWctVQVVJNhCLNKulFJqyYqbldgaGsDtnl2ZSEuoBlqz\nBo4dn2A4vZ7Pbo483d2+rIojn3ha1I19tVy5IkILPGAkug5gU0qp+BVXAR5sy89wrI5A93E64Sc/\nbyFhrIAV+ZkRr7+sqAp3nyeQz4QZge4TuJjLsTMa4EopFa/iJsBdLujuhv7g+4EE9XLbv5K5+uPI\nF3o5nfD/flNL7nlrzfZPVTjpnKz3jkBvDDtvHGBl1krOjZ6jb8Sz5Juuga6UUvErLgK8txeGhz3d\n4U1N1st9nPg0Z7Pfsny90wltI7WUpVkL8Js2VDGYdsI7An0VeVlpYa+f2tTE2wrXLnSllIpfcRHg\njY2etcwrK2f3HHww1cVQsvV+d6cTKKzl8iJrAb5mVT4ymcT/+eXb5EcYge7jm0rWP9pP11AX5bnl\nlu9PKaXU0hEXAe5yeQaYVVRYD/Cmkz2YtC4+GWmw/DmrVwOFtVy3xvqD84wRJz9vfzHkGuiBfFPJ\najtrWVewDpvExY9QKaVUgLj437+hwROuFRXWB7LVfOzCNp5JY4/1FnhamiGppI5bt1gP8OLEKnqy\n3wm7c5m/qoIq6s7W6QIuSikV5+IuwK22wA+4XRSPbKO1r3XG+uOhnBo4Rc6yVEoLci3fW2VOFdgm\nuHGdxRZ4gacFrs+/lVIqvsVFgM+lC732lIvy9CsozCikrb8tcgGg/mw9znznrO5t46oqmLRFHIHu\nsyZvDQ3dDRw5fUQDXCml4lhcBLivBe5wQGurZ4nUSJrPuVhftIbK3Eoau611o88lwHds2khG/+9E\nHIHuk5GcQdGyIt5rfU+3EVVKqTi25AO8vx8GBqCkBFJTobAQ2tsjl+uadHFVhdMT4Bafg7u6XazJ\nWzOr+6veWMHAP+yfVZmq/CqSE5Ipyy6LfLFSSqklackHuK/1LeI5ttKNPjlpGEp1cePla6jMu7Qt\n8LlYW7CW9YXrEV+llFJKxZ24CXAfKwFe334WgDUr82fVAl+oAN9UvImrSq665J+jlFIqelnajSyW\n+Qaw+VgJ8HeO1pM+sgabTTwtcAsBPjE5gbvHzeq81RGvvVh3b7qbP7ryjy755yillIpecdcCt7Ia\n22+bXBTYPKnvG8RmjAlbpqWvhaJlRaQlWRuMdrG0+1wppeJb3AW4lcVcjp924cj0BHhuWi6JtkS6\nhrrCllmo7nOllFIK4iDA59KF3jrgYn3xhUJWutHrz9bjzNMAV0optTCWdICfO+eZRlZScuFcYSGM\njEBfX+hyXcbFlkq/AM+tpKE7/Jro2gJXSim1kJZ0gDc2ep552/xqKeJphYfaVnRy0jCc7qL6igsB\nvjpvdcSpZBrgSimlFtKSDvDA7nOfysrQz8E/bjqNTKTiKL6wnrmVqWQa4EoppRbSkg7wwAFsPuGe\ng797rJ6MkempH+kZ+PD4MKcGTmHPsV/M7SqllFKWaYAH+LDZxfLEgACPsB56Y08j5bnlJNqW/LR6\npZRSUWJJB3ioLvRwAV53xkV51vRCJZkl9I/2MzA2EPxzzs5+DXSllFLqYizpAJ9LC7xtyMVlJdPD\n2CY2ynPLcfcEL6TPv5VSSi20JRvgAwPQ2wsrV858zeGAtjY4f37ma924uHr1zNZ0uG50DXCllFIL\nbckGeGOjp6VtC1LDlBRYvnzmtqLnJyYZSWukekOIAA8xkK2+WwNcKaXUwlqyAR6q+9wnWDf6Qdcn\n2MazWJGfOeP6cNuKagtcKaXUQluyAe5yzT7A36utZ9lY8MFooVrgvSO9DI4NUrKsJEgppZRS6tJY\nsgHe0BB8BLpP0BZ4i4uipBABHmIuuOusC2e+U3cHU0optaAsBbiIbBeROhGpF5FvhbimWkQOichR\nEXnb73yziHzkfW3/fN14JJG60IOtxnaiy0VFdvAAd+Q4aO9vZ3xifNp57T5XSim1GCKuPCIiNuC7\nwM3AJ8ABEXnZGFPnd0028M/ALcaYDhEp8HuLSaDaGNMzv7ce3ly60DuGXGyrvDbo9ckJyZQsK6Gl\nr4XVeRfeWANcKaXUYrDSAt8CuIwxLcaYceBHwO0B13wJ+IkxpgPAGOO/ebZY/Jx5MzgI3d1QWhr6\nmmAB3i0urnGG7ncPNpBNR6ArpZRaDFaCdSXQ5nfc7j3nzwnkicjbInJARL7s95oB3vSev+fibtca\ntxvKy4NPIfMpKICxMc9ccYCx8QnG0pu48YrQzfZgA9m0Ba6UUmoxzNfi3YnAZuAmIAN4X0TeN8Y0\nAJ82xpwUkUI8QX7cGPPLYG/yyCOPTH1dXV1NdXX1nG4m1BKq/nzbirrdsHkz/KaujYTRAgqy00OW\nCdxW1BhD/dl6XUZVKaVURDU1NdTU1Mzb+1kJ8A6gzO94lfecv3agyxgzAoyIyLvARqDBGHMSwBjT\nKSI/xdMlHzHAL0akAWw+lZUXAvyXx+vJHA8fxJW5lfy67ddTx6cHT5OamEpuWm6YUkoppdTMhumj\njz56Ue9npQv9ALBaROwikgzsBPYFXPMycL2IJIhIOnA1cFxE0kVkGYCIZAC3AEcv6o4tsBrg/s/B\nD7W6KE6OEOABU8m0+1wppdRiidgCN8ZMiMh9wBt4Av8HxpjjInKv52XzfWNMnYi8DhwBJoDvG2Nq\nRaQc+KmIGO9n/Ycx5o1LVx0PlwvuvDPydRUVcOSIt8xZF5W5kVvg7h43xhhERANcKaXUorH0DNwY\n8zOgKuDcUwHHTwBPBJxrAq68yHuctdm0wF96yfN1x4iLHeu2hb0+MyWTjKQMTg2coiSzxBPgeRrg\nSimlFt6SW4ltaAg6O8NPIfOpqLiwmEuvzcW1YaaQ+fh3o2sLXCml1GJZcgHum0KWkBD5WofDsyNZ\n/8A44+kt3HBFRcQy/tuKaoArpZRaLFEd4I//+E1+8PpvZlXGavc5QHIyFBfDT95qJnF4BdkZqRHL\n+OaCT0xO4O5xT1uVTSmllFooUR3g//KbZ/j+ey/OqkykJVQDVVTAvl+7yDpvbS63rwu9pa+FomVF\npCWlzer+lFJKqfkQ1QHedb6Jk0MtsyoTaReyQBUV8MEJFytSLAZ4biUN3Q3afa6UUmpRRXWADyY1\n02NmF+C/7HuWEkef5esrKuDUuIvVFldT862HriPQlVJKLaaoDfD+wVEmMjoYSmqeVbkTq/6SnmXv\nW76+shLIc7FxlbUAL8ooYuT8CAc+OaAtcKWUUosmagP8QH0bCYOlTKZ00zswYqnM0Mg4ExltDKc0\nW/6cigog38W1VdYCXESoyK3g9YbXWZOva6ArpZRaHFEb4B82NpM5Xkni8CoOnGiLXADPhiTYJmnt\nb7L8OaWOMcjsYOvl5ZbLVOZV0jnUqS1wpZRSiyZqA/xoexOFSQ6WnbfzobvZUpkDDZ6FzZv7rF0P\ncC6hmbLcVaSnJlkuU5lbSaItEUeOw3IZpZRSaj7N13ai866xu5nSzHI4B8c/sTaQ7Wh7E9nja2nu\nbbb8Oe4eN87CyAu4+Fudt3oqxJVSSqnFELUJ1DHYxPbKHUyaSdzd1gK84WwTVUk30dz7guXPcfe4\nqciZXYBfV3odHf2BO6oqpZRSCydqu9DPTjRzRamDynw7HQPNlsq0D7rZXHQt/aP9DI4NWirj7nFT\nkTu7AN9QtIG/vumvZ1VGKaWUmk9RG+BDyc1scZZz+SoHZyestcC7TRNXllVgz7bT0metTGNPI5V5\nlRdzq0oppdSCi8oA7+4fZjKlm40VJWyusDOYZC2Mh5KbuKaqAkeOw/Jz8Lm0wJVSSqnFFpUB/psT\nrSQNlZKclMBVzlVMpJ1kaGQ8bJlT3QOYpAGuKC/CkeOgqSfyVDJjjAa4UkqpmBSVAf5hYxOZEw4A\n0lOTSBgu5mBD+EFjv6ptInnIgc0mlOeUW2qBdw11kWRLIic1Zx7uWimllFo4URngtZ80U5jsmDrO\nGLfzYWP4bvSDTU1kG09L2pHjsDQX3N3j1uffSimlYlJUBri7u5myzAsro+Ul2Dna3hy2TO0nbopT\nPGWsdqE39jRq97lSSqmYFJUB/slQE2sKHVPHKzMcNHaFb4E39TZRnnMhwK10oc9lDrhSSikVDaIy\nwLtNMxvtF1rgFXl22iPMBT810sS6Yk8YL89YztD4EOdGz4UtowPYlFJKxaqoDPCh5Ca2OB1Tx+tW\n2OkaD98C7xU3mys8oS8illrhGuBKKaViVdQF+JmeQUzSOa4oL5o6t7nCwbmE0AE+OWkYTWvmunUX\nWu1WA1wHsSmlFDgcDkRE/1yCPw6H45L8zKJuLfQP6lpIHraTYLvwu8XVa8s4n97G+YlJEhNm/s5x\nvLUTmUhlVWHW1LlIU8lGz49yevA0q7JWzev9K6VULGppacEYs9i3sSSJyCV536hrgR90N5E16Zh2\nLmdZKraxXA43ngxa5v06N2mj0/fzjtQCb+5tpjSrVHcUU0opFZOiLsBrTzazPLl8xvm0MQcfNgTv\nRv+opYk8mRngTb2hp5Lp82+llFKxLOoC3N3TRFmWY8b5PLFzpLU5aJkTZ5pYkT49wMtzw3ehu3vc\nVObq82+llFKxKeoC/NRwM2uLZrbAS9IdNISYC97S72Z1/vTWdKQudF3ERSmllpa2tjaysrIsPctv\naWnBZrMxOTkJwK233sozzzwDwN69e9m6deslvdf5YCnARWS7iNSJSL2IfCvENdUickhEjorI27Mp\n66/HNLOhzDHjfHmunbb+4AHeOd7E5Sunh35+Wj5jE2P0jfQFLaNd6EoptbSUlpbS399vedCY/3Wv\nvfYaX/7yl4O+Fq0iBriI2IDvAr8LXAbsEpG1AddkA/8M3GaMuRz4otWygYZTmri6yjHj/NpiO2fG\nm4OWOZfYxFWV0wM80lxwDXCllFKxzEoLfAvgMsa0GGPGgR8Btwdc8yXgJ8aYDgBjTNcsyk5p7+zH\nJIywtrRwxmubyh2ck5kt8JGx85xP6+Da9fYZr4V6Dq7biCqlVOwoLy/niSeeYOPGjWRmZnLPPfdw\n5swZbr31VrKysrjlllvo6+ub0S2+bds2/uqv/orrr7+erKwstm/fTnd3d9DP2LZtG08//XTQ1775\nzW9yww03cO6cZ3XPp59+mvXr15Ofn8+OHTtobW29NBWPwEqArwTa/I7bvef8OYE8EXlbRA6IyJdn\nUXbKB3XNpAx7tgQNdPVaO2PpLUxOTn+2sb+ujYSRIpalJc8o48gOPhK9c6iT1MRUslOzQ92KUkqp\nKPLiiy/y1ltvUV9fz759+7j11lt5/PHH6erqYmJign/6p38CZnZ9P/fcc+zdu5fOzk5GR0d54okn\nLH+mMYZ77rmHo0eP8uabb5KZmcnLL7/M448/zksvvURnZydbt25l165d81pXq+ZrEFsisBnYAWwH\nHhaR1bN9k0PNzWSbmQPYAIrzliHn0zje2jnt/IGGJpaNBy8Tqgu9sVsHsCmlVCz5+te/TkFBASUl\nJWzdupWrr76aDRs2kJyczOc//3kOHToUtNzdd99NZWUlKSkp3HnnnRw+fNjS542NjbFr1y56e3t5\n5ZVXSElJAeCpp55iz549OJ1ObDYbu3fv5vDhw7S1tUV4x/lnZRWTDqDM73iV95y/dqDLGDMCjIjI\nu8BGi2WnvPjv/xuGz/LII49QXV1NdXX1tNdTRx381tXCZY7lU+eOtLkpTAwexuW55bzX+t6M89p9\nrpRSszNfY7rmuthbUdGF5bXT0tJmHA8MDHjff/oHFBcXT32dnp4+dV0kDQ0NHDlyhP3795OYeCEq\nW1paeOCBB3jwwQenPk9E6OjooLS0NOx71tTUUFNTY+nzrbAS4AeA1SJiB04CO4HA/oKXgf8lIglA\nCnA18PfACQtlp6RdtZYtWSt5ZM+fB309BztHWluAT02dazjbRGnm7FrgGuBKKTU7sbLK6nyNHl+/\nfj1f+9rX2L59O7/4xS9wOp0AlJWV8dBDD82p2zywYfroo49e1D1G7EI3xkwA9wFvAMeAHxljjovI\nvSLyp95r6oDXgSPAB8D3jTG1ocqG+qyTI8HngPsUp9o5caZ52rmOwSachaEDvKm3acZvZO5eXcRF\nKaWWotms5x7p2rvuuovHHnuMz3zmM7jdbgDuvfdeHnvsMWprawHo6+vjhRdemPsNXwRLC4EbY34G\nVAWceyrg+AlgxuiAYGVD6aWJK8Ps2uLIddDQ3TDtXNekm4324K3p3NRcjDH0jvSSm5Y7dd7d4+Yr\nG75i5ZaUUkotssBWdbhWtv9rkVrjVq79yle+wtjYGDfffDPvvPMOd9xxB4ODg+zcuZPW1lays7P5\n7Gc/yxe+8AUrVZlXEi27z4iIYXc2DV93U7kiL+g1e/a+xA8PP82pf9g3dc72rSJ++yeH2LxmRdAy\nG5/cyL/d/m9sKtk0dW7V36/iV3/8K+w5M6eeKaVUPBIR3Y3sEgn1vfWen3Off3QtpSqTlBfnhnx5\ng91OnzRPHXv2Du9nQ0VxyDKBm5qMnB+hc6hTtxFVSikV06IqwFNHgs8B97mmysFI6oW54L+qbSJ5\nyBF0j3AfR/b0gWzNvc2UZZeRYEuYt/tWSimlFlpUBXhOiDngPvaiHMDQcroX8OwdHmreuE/gamy6\nC5lSSqmlIKoCvDjVEfZ1m01IGbGzv96zpGrtySaKguwd7i+wC12nkCmllFoKoirAHTnhwxgg2zj4\nqMUT4O4eN46c8GEcOBdcV2FTSim1FERVgK8tdkS8pijFTt2pZgBOjTSxrjhyC7y5t3lqBKC7V1vg\nSimlYl9UBXi4OeA+pVl2Wno9LfBeaWJTefgAz0nNIdGWSPewZwcafQaulFJqKYiqAL9mrSPiNVVF\nDk6NeEaij6Q18en1kVvT/iuyuXvclOdG7qpXSimlollUBbhnlHl4V5Ta6THNnGjvQiaTKFseI8J4\nRAAAClxJREFUeUtQXzf6mcEzpCelk5WSNR+3q5RSKo4F7j++0KIqwK341Bo7IyktfFDXROqItZZ0\neY5nKlljjw5gU0opBXv37mXr1q0X/T7ztXnKXFhaCz2arLcvxyQO8U7dx+SJtTB25Dg40XWC4mXF\nGuBKKaWmtgENZ3JyEpstetu50XtnIdhsQvJwGe+21LAizVoL3JHjoLmvWQewKaVUjGpvb+f3f//3\nWb58OYWFhdx///0APP3006xfv578/Hx27NhBa2vrVBmbzcZTTz2F0+kkLy+P++67D4C6ujq++tWv\n8v7775OZmUlenmf/jbvvvps/+7M/43Of+xyZmZnU1NTw2muvsXnzZrKzs7Hb7Re9Beh8irkAB8ic\nsNNie5vKvNl1oesiLkopFXsmJye57bbbKC8vp6WlhY6ODnbu3Mm+fft4/PHHeemll+js7GTr1q0z\n9ul+9dVX+fDDD/noo494/vnneeONN1i7di1PPvkk1157LefOnaO7u3vq+ueee46HH36Yc+fOcf31\n17Ns2TKeeeYZ+vr6ePXVV3nyySfZt29f4C0uipgM8MJkB5PLOrh8lbUwtufY9Rm4UkrFqP3793Py\n5En+7u/+jrS0NJKTk7nuuut48skn2bNnD06nE5vNxu7duzl8+DBtbW1TZffs2UNmZialpaVs27aN\nw4cPh/2s22+/nWuuuQaA5ORkbrjhBi677DIALr/8cnbu3Mk777xz6So7CzH3DBygNNNO3SRcVWmt\nBZ6VkkVqYioHTx7UAFdKqTmQR+dnsJb59uy3LG1ra8Nut894Ht3S0sIDDzzAgw8+6Hlv73Ptjo4O\nSktLASgqKpq6Pj09nYGBgbCf5Svns3//fnbv3s3Ro0cZGxtjbGyML37xi7Ouw6UQkwG+utDOm6eE\na9dZ38/bkePg6JmjrMxceQnvTCmllqa5BO98KS0tpbW1dcagsrKyMh566KEZ3eZWhBrAFnj+S1/6\nEvfffz+vv/46SUlJfOMb3+Ds2bOz/rxLISa70K8odWAbXEFWRorlMuU55ThyHLqNqFJKxZgtW7ZQ\nUlLC7t27GRoaYnR0lF//+tfce++9PPbYY9TW1gLQ19fHCy+8YOk9i4qKaG9vZ3x8POx1AwMD5Obm\nkpSUxP79+3n22Wenve5bpnsxxGSA/+HNW/jv1z0zqzKOHId2nyulVAyy2Wy88soruFwuysrKKC0t\n5fnnn+eOO+5g9+7d7Ny5k5ycHDZs2MDPfvazqXKBrWn/45tuuonLLruM4uJili9fHvKzv/e97/Hw\nww+TnZ3Nd77zHe66666Q77nQZDF/e/AnIuZS3stzHz9H/dl6vl397Uv2GUopFatEZFFbk0tZqO+t\n9/ycfwOImwBXSikVmgb4pXOpAjwmu9CVUkqpeKcBrpRSSsUgDXCllFIqBmmAK6WUUjFIA1wppZSK\nQRrgSimlVAyKyaVUlVJKzS+73b6oi5IsZXa79WW/Z8PSPHAR2Q78Tzwt9h8YY/424PUbgZcBt/fU\ni8aY73hfawb6gElg3BizJcRn6DxwpZRSceOSzwMXERvwXeB3gcuAXSKyNsil7xpjNnv/fMfv/CRQ\nbYzZFCq8411NTc1i38Ki0vrXLPYtLKp4rn881x20/hfLyjPwLYDLGNNijBkHfgTcHuS6UL9FiMXP\niVvx/o9Y61+z2LewqOK5/vFcd9D6XywrwboSaPM7bveeC3StiBwWkVdFZL3feQO8KSIHROSei7hX\npZRSSnnN1yC2D4EyY8yQiOwAXgKc3tc+bYw5KSKFeIL8uDHml/P0uUoppVRcijiITUSuAR4xxmz3\nHu8GTOBAtoAyTcDvGGO6A85/GzhnjPn7IGV0BJtSSqm4cjGD2Ky0wA8Aq0XEDpwEdgK7/C8QkSJj\nzGnv11vw/GLQLSLpgM0YMyAiGcAtwKPzXQmllFIq3kQMcGPMhIjcB7zBhWlkx0XkXs/L5vvAF0Tk\nq8A4MAz4djwvAn7qbV0nAv9hjHnjUlREKaWUiidRsx+4Ukoppaxb8OldIpIiIr8RkUMi8rH3uTgi\nkisib4jICRF5XUSyF/reFoqI2ETkoIjs8x7HTd3Bs7iPiHzk/Tew33suLr4HIpItIj8WkeMickxE\nro6juju9P/OD3r/7ROT+eKk/gIh8Q0SOisgREfkPEUmOl/qLyAPe//M/FpH7veeWdN1F5AciclpE\njvidC1lnEdkjIi7v/w+3RHr/BQ9wY8wosM0Yswm4EtjhfW6+G/i5MaYK+AWwZ6HvbQE9ANT6HcdT\n3SH44j7x8j34R+A1Y8w6YCNQR5zU3RhT7/2ZbwZ+BxgEfkqc1F9EVgBfBzYbYzbgeay4iziov4hc\nBvxX4Co8/+/fJiKVLP26/xDPImj+gtbZO/36TmAdsAP4nkRa29YYs2h/gHTgt8Cn8PxHVuQ9XwzU\nLea9XcI6rwLeBKqBfd5zcVF3v+9BE5AfcG7Jfw+ALKAxyPklX/cgdb4FeC+e6g+sAFqAXDzhvQ/4\nTDzUH/gC8C9+xw8B3wSOx0Hd7cARv+OgP29vsH/L77r/BK4O996LskKatwv5EHAKeNMYc8BbodMA\nxphTwPLFuLcF8A94/uH6Dz6Il7r7+C/u8yfec/HwPSgHukTkh95u5O97Z2rEQ90D3QU86/06Lupv\njPkE+B9AK9AB9Bljfk581P8osNXbfZwO3AqUEh91D7Q8RJ0DF03rIPiiaVMWJcCNMZPG04W+Ctji\n7V4JHE235EbXicjngNPGmMOEXnoWlmDdA3zaeLpRbwW+JiJbiYOfP55W12bgn731H8TzW3c81H2K\niCQBvwf82HsqLuovIjl4lqG242mNZ4jIHxAH9TfG1AF/i6f38TXgEDAR7NKFvK8oMec6L+oa5caY\nfqAG2A6cFpEiABEpBs4s4q1dKp8Gfk9E3MBzwE0i8gxwKg7qPsUYc9L7dyeeVfu2EB8//3agzRjz\nW+/xT/AEejzU3d8O4ENjTJf3OF7q/xnAbYzpNsZM4Hn+fx1xUn9jzA+NMVcZY6qBXuAEcVL3AKHq\n3IGnV8JnlfdcSIsxCr3AN+pORNKAz+J5DrIP+CPvZX+IZ3vSJcUY85fGmDJjTAWeBXF+YYz5MvAK\nS7zuPiKSLiLLvF/7Fvf5mPj4+Z8G2kTEt8zwzcAx4qDuAXbh+QXWJ17q3wpcIyKp3sFJN+MZzBoX\n9RfPctqISBnweTyPUOKh7sL0HtdQdd4H7PTOTCgHVgP7w76x92H5ghGRK4C9eH55sAH/1xjzNyKS\nBzyP5zeQFuBOY0zvgt7cAhLPHuoPGmN+L57q7v2H+VM83Ua+xX0ej5fvgYhsBP4VSALcwN1AAnFQ\nd/D8AoenjhXGmHPec3Hxs4ep5aR34ln06hDwJ0AmcVB/EXkXyMNT928YY2qW+s9eRJ7FM2A5HzgN\nfBtPr+OPCVJnEdmDZ7T+OPCAibDwmS7kopRSSsUg3adbKaWUikEa4EoppVQM0gBXSimlYpAGuFJK\nKRWDNMCVUkqpGKQBrpRSSsUgDXCllFIqBmmAK6WUUjHo/wPEe4vrSp5PsAAAAABJRU5ErkJggg==\n", "text/plain": [ - "" + "" ] }, "metadata": {}, @@ -2060,7 +2364,7 @@ }, { "cell_type": "code", - "execution_count": 57, + "execution_count": 62, "metadata": { "collapsed": false }, @@ -2078,7 +2382,7 @@ "array([3, 6], dtype=int64)" ] }, - "execution_count": 57, + "execution_count": 62, "metadata": {}, "output_type": "execute_result" } @@ -2091,7 +2395,7 @@ }, { "cell_type": "code", - "execution_count": 58, + "execution_count": 63, "metadata": { "collapsed": false }, @@ -2102,7 +2406,7 @@ "(0.3, 30)" ] }, - "execution_count": 58, + "execution_count": 63, "metadata": {}, "output_type": "execute_result" } @@ -2130,7 +2434,7 @@ }, { "cell_type": "code", - "execution_count": 59, + "execution_count": 64, "metadata": { "collapsed": true }, @@ -2151,7 +2455,7 @@ }, { "cell_type": "code", - "execution_count": 60, + "execution_count": 65, "metadata": { "collapsed": false }, @@ -2162,7 +2466,7 @@ "0.75280935132909166" ] }, - "execution_count": 60, + "execution_count": 65, "metadata": {}, "output_type": "execute_result" } @@ -2174,7 +2478,7 @@ }, { "cell_type": "code", - "execution_count": 61, + "execution_count": 66, "metadata": { "collapsed": false }, @@ -2185,7 +2489,7 @@ "29.999189245636149" ] }, - "execution_count": 61, + "execution_count": 66, "metadata": {}, "output_type": "execute_result" } @@ -2197,7 +2501,7 @@ }, { "cell_type": "code", - "execution_count": 62, + "execution_count": 67, "metadata": { "collapsed": false }, @@ -2208,7 +2512,7 @@ "0.27039432496290688" ] }, - "execution_count": 62, + "execution_count": 67, "metadata": {}, "output_type": "execute_result" } @@ -2221,7 +2525,7 @@ }, { "cell_type": "code", - "execution_count": 63, + "execution_count": 68, "metadata": { "collapsed": false }, @@ -2232,7 +2536,7 @@ "0.40695068348471369" ] }, - "execution_count": 63, + "execution_count": 68, "metadata": {}, "output_type": "execute_result" } @@ -2245,7 +2549,7 @@ }, { "cell_type": "code", - "execution_count": 64, + "execution_count": 69, "metadata": { "collapsed": false }, @@ -2256,7 +2560,7 @@ "0.90673162370556504" ] }, - "execution_count": 64, + "execution_count": 69, "metadata": {}, "output_type": "execute_result" } @@ -2270,7 +2574,7 @@ }, { "cell_type": "code", - "execution_count": 65, + "execution_count": 70, "metadata": { "collapsed": false }, @@ -2281,7 +2585,7 @@ "0.80742957881382105" ] }, - "execution_count": 65, + "execution_count": 70, "metadata": {}, "output_type": "execute_result" } @@ -2297,7 +2601,7 @@ }, { "cell_type": "code", - "execution_count": 66, + "execution_count": 71, "metadata": { "collapsed": false }, @@ -2308,7 +2612,7 @@ "0.80125278346378259" ] }, - "execution_count": 66, + "execution_count": 71, "metadata": {}, "output_type": "execute_result" } @@ -2323,7 +2627,7 @@ }, { "cell_type": "code", - "execution_count": 67, + "execution_count": 72, "metadata": { "collapsed": true }, @@ -2357,7 +2661,7 @@ }, { "cell_type": "code", - "execution_count": 68, + "execution_count": 73, "metadata": { "collapsed": false }, @@ -2384,7 +2688,7 @@ }, { "cell_type": "code", - "execution_count": 69, + "execution_count": 74, "metadata": { "collapsed": false }, @@ -2402,7 +2706,7 @@ "0.80125278346378259" ] }, - "execution_count": 69, + "execution_count": 74, "metadata": {}, "output_type": "execute_result" } @@ -2430,7 +2734,7 @@ }, { "cell_type": "code", - "execution_count": 70, + "execution_count": 75, "metadata": { "collapsed": false }, @@ -2441,7 +2745,7 @@ "0.049999999999999975" ] }, - "execution_count": 70, + "execution_count": 75, "metadata": {}, "output_type": "execute_result" } @@ -2476,7 +2780,7 @@ }, { "cell_type": "code", - "execution_count": 71, + "execution_count": 76, "metadata": { "collapsed": false }, @@ -2487,7 +2791,7 @@ "(2.4738633753705956, 0.013366080075435313)" ] }, - "execution_count": 71, + "execution_count": 76, "metadata": {}, "output_type": "execute_result" } @@ -2501,7 +2805,7 @@ }, { "cell_type": "code", - "execution_count": 72, + "execution_count": 77, "metadata": { "collapsed": false }, @@ -2522,7 +2826,7 @@ " 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153]))" ] }, - "execution_count": 72, + "execution_count": 77, "metadata": {}, "output_type": "execute_result" } @@ -2536,7 +2840,7 @@ }, { "cell_type": "code", - "execution_count": 73, + "execution_count": 78, "metadata": { "collapsed": false }, @@ -2547,7 +2851,7 @@ "(0.5, 0.6, 153)" ] }, - "execution_count": 73, + "execution_count": 78, "metadata": {}, "output_type": "execute_result" } @@ -2558,7 +2862,7 @@ }, { "cell_type": "code", - "execution_count": 74, + "execution_count": 79, "metadata": { "collapsed": false }, @@ -2607,7 +2911,7 @@ " [ 9.80000000e+01, 5.08299924e-04, 1.00000000e+00]])" ] }, - "execution_count": 74, + "execution_count": 79, "metadata": {}, "output_type": "execute_result" } @@ -2622,24 +2926,251 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The power using the exact distribution is lower than using the asymptotic normal distribution.\n", - "The rejection region looks correct, so how do we verify that we calculated the power correctly?\n", + "The power using the exact distribution is lower than using the asymptotic normal distribution.\n", + "The rejection region looks correct, so how do we verify that we calculated the power correctly?\n", + "\n", + "\n", + "PASS reports the following values\n", + "\n", + "```\n", + " Exact Z-Test Z-Test Z-Test Z-Test\n", + " Target Test S(P0) S(P0)C S(P) S(P)C\n", + "n P0 P1 Alpha Power Power Power Power Power\n", + "10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958\n", + "50 0.5000 0.6000 0.0500 0.23706 0.33613 0.23706 0.33613 0.23706\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": 80, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.33613256480043147,\n", + " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", + " 17, 18, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n", + " 47, 48, 49, 50]))" + ] + }, + "execution_count": 80, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "p0, pa, nobs_ = 0.5, 0.6, 50\n", + "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "0.33613 is the same as reported by PASS for the exact power of the score test, `S(P0)`. Unfortunately for testing purposes, in this example Wald and score test report identical numbers for n=50." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "code", + "execution_count": 81, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.33613256480043147,\n", + " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", + " 17, 18, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n", + " 47, 48, 49, 50]))" + ] + }, + "execution_count": 81, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None)\n", + "power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958" + ] + }, + { + "cell_type": "code", + "execution_count": 82, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.17958430719999999, array([ 0, 1, 2, 8, 9, 10]))" + ] + }, + "execution_count": 82, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "nobs_ = 10\n", + "pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None)\n", + "power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This is the same as the Wald test, while the score test has much lower power in this example. It is only around 0.048 which is the same in PASS and our calculations at the provided print precision." + ] + }, + { + "cell_type": "code", + "execution_count": 83, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.048035123200000036, array([ 0, 1, 9, 10]))" + ] + }, + "execution_count": 83, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Know we know how to use it, and I added keywords to the `power_binom_proptest` above, we can drop the use of lambda functions." + ] + }, + { + "cell_type": "code", + "execution_count": 84, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.17958430719999999, array([ 0, 1, 2, 8, 9, 10]))" + ] + }, + "execution_count": 84, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 85, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "(0.048035123200000036, array([ 0, 1, 9, 10]))" + ] + }, + "execution_count": 85, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=1)" + ] + }, + { + "cell_type": "code", + "execution_count": 86, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "(0.17958430720000004, (2, 8))\n", + "(0.048035123199999974, (1, 9))\n" + ] + } + ], + "source": [ + "print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=0))\n", + "print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=0))" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "collapsed": true + }, + "outputs": [], + "source": [] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Sensitivity to misspecification\n", "\n", + "This is just a quick experiment.\n", "\n", - "PASS reports the following values\n", "\n", - "```\n", - " Exact Z-Test Z-Test Z-Test Z-Test\n", - " Target Test S(P0) S(P0)C S(P) S(P)C\n", - "n P0 P1 Alpha Power Power Power Power Power\n", - "10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958\n", - "50 0.5000 0.6000 0.0500 0.23706 0.33613 0.23706 0.33613 0.23706\n", - "```" + "We go back to the exact binomial test in the standard minlike version with power evaluated using the exact distribution. The underlying assumption is that we have a set of independent Bernoulli experiments with identical probability of an event.\n", + "\n", + "As a simple deviation we consider that we have 3 groups of observations with different true proportions. For the initial analysis we calculate the rejection rate, size and power, using Monte Carlo.\n", + "\n", + "It looks like in this example with three fixed groups we have underdispersion, and the rejection ratio is lower than with a sinlge group. That means that in this case the binomial test is even more conservative than in the case where the binomial distribution is correctly specified. This is a surprising because unobserved heterogeneity and mixture distribution should lead to over dispersion, but we keep the composition of the population and of the sample fixed in this experiment and consequently do not get extra variation from a changing sample composition.\n", + "\n", + "I had used equal group sizes in my intial choice of numbers for the Monte Carlo setup. That case did not show any overdispersion in the sampled proportions. This needs further investigation." ] }, { "cell_type": "code", - "execution_count": 75, + "execution_count": 87, "metadata": { "collapsed": false }, @@ -2647,41 +3178,43 @@ { "data": { "text/plain": [ - "(0.33613256480043147,\n", - " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", - " 17, 18, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n", - " 47, 48, 49, 50]))" + "0.89742182699143069" ] }, - "execution_count": 75, + "execution_count": 87, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "p0, pa, nobs_ = 0.5, 0.6, 50\n", - "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "0.33613 is the same as reported by PASS for the exact power of the score test, `S(P0)`. Unfortunately for testing purposes, in this example Wald and score test report identical numbers for n=50." + "smprop.binom_test(31, 60)" ] }, { "cell_type": "code", - "execution_count": null, + "execution_count": 88, "metadata": { - "collapsed": true + "collapsed": false }, - "outputs": [], - "source": [] + "outputs": [ + { + "data": { + "text/plain": [ + "(0.027340133868077938, (21, 39))" + ] + }, + "execution_count": 88, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "power_binom_proptest(smprop.binom_test, 0.5, 0.5, 60)" + ] }, { "cell_type": "code", - "execution_count": 76, + "execution_count": 89, "metadata": { "collapsed": false }, @@ -2689,63 +3222,105 @@ { "data": { "text/plain": [ - "(0.33613256480043147,\n", - " array([ 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16,\n", - " 17, 18, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46,\n", - " 47, 48, 49, 50]))" + "(29.9985, 15.327697749999999, 14.999999962499999)" ] }, - "execution_count": 76, + "execution_count": 89, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None)\n", - "power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1)" + "# our binomial sampling process\n", + "rvs = np.random.binomial(60, 0.5, size=10000)\n", + "m = rvs.mean()\n", + "m, rvs.var(), m / 60 * (1 - m / 60) * 60" ] }, { - "cell_type": "markdown", - "metadata": {}, + "cell_type": "code", + "execution_count": 90, + "metadata": { + "collapsed": false + }, + "outputs": [], "source": [ - "10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958" + "def binom_mix_rvs(size=1):\n", + " #group fraction\n", + " # np.random.multinomial(60, [1./3] * 3, size=size)\n", + " # assume fixed population group size, instead of multinomial\n", + " rvs1 = np.random.binomial([26, 20, 14], [0.33847, 0.5, 0.8], size=(size, 3))\n", + " return rvs1.sum(1)\n" ] }, { "cell_type": "code", - "execution_count": 77, + "execution_count": 91, "metadata": { "collapsed": false }, "outputs": [ { - "data": { - "text/plain": [ - "(0.17958430719999999, array([ 0, 1, 2, 8, 9, 10]))" - ] - }, - "execution_count": 77, - "metadata": {}, - "output_type": "execute_result" + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0289\n" + ] } ], "source": [ - "nobs_ = 10\n", - "pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None)\n", - "power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1)" + "# true binomial distribution\n", + "\n", + "n_rep = 10000\n", + "res0 = np.empty(n_rep, float)\n", + "res0.fill(np.nan)\n", + "for i in range(n_rep):\n", + " xc = np.random.binomial(60, 0.5)\n", + " res0[i] = smprop.binom_test(xc, 60)\n", + "\n", + "print((res0 < 0.05).mean())" + ] + }, + { + "cell_type": "code", + "execution_count": 92, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "0.0186\n" + ] + } + ], + "source": [ + "# mixed binomial distribution\n", + "\n", + "n_rep = 10000\n", + "res0 = np.empty(n_rep, float)\n", + "res0.fill(np.nan)\n", + "for i in range(n_rep):\n", + " xc = binom_mix_rvs()\n", + " res0[i] = smprop.binom_test(xc, 60)\n", + "\n", + "print((res0 < 0.05).mean())\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "This is the same as the Wald test, while the score test has much lower power in this example. It is only around 0.048 which is the same in PASS and our calculations at the provided print precision." + "These two Monte Carlo experiments show that the rejection rate under the null hypothesis drops from 0.0276 to 0.0148. As expected, the rejection rate in the Monte Carlo corresponds closely to the exact power calculations which is 0.0273.\n", + "\n", + "Below are some checks to see whether the random sampling works as expected." ] }, { "cell_type": "code", - "execution_count": 78, + "execution_count": 93, "metadata": { "collapsed": false }, @@ -2753,28 +3328,61 @@ { "data": { "text/plain": [ - "(0.048035123200000036, array([ 0, 1, 9, 10]))" + "array([[18, 23, 19],\n", + " [25, 20, 15],\n", + " [15, 19, 26],\n", + " [22, 13, 25],\n", + " [25, 18, 17],\n", + " [21, 23, 16],\n", + " [22, 19, 19],\n", + " [16, 16, 28],\n", + " [27, 16, 17],\n", + " [15, 18, 27]])" ] }, - "execution_count": 78, + "execution_count": 93, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1)" + "np.random.multinomial(60, [1./3] * 3, size=10)" ] }, { - "cell_type": "markdown", - "metadata": {}, + "cell_type": "code", + "execution_count": 94, + "metadata": { + "collapsed": false + }, + "outputs": [ + { + "data": { + "text/plain": [ + "array([[11, 5, 15],\n", + " [11, 4, 15],\n", + " [ 9, 6, 16],\n", + " [14, 3, 15],\n", + " [ 9, 8, 10],\n", + " [ 8, 4, 17],\n", + " [10, 3, 18],\n", + " [11, 7, 15],\n", + " [ 9, 4, 21],\n", + " [ 8, 5, 17]])" + ] + }, + "execution_count": 94, + "metadata": {}, + "output_type": "execute_result" + } + ], "source": [ - "Know we know how to use it, and I added keywords to the `power_binom_proptest` above, we can drop the use of lambda functions." + "np.random.binomial([25, 10, 25], [0.4, 0.5, 0.6], size=(10, 3))" ] }, { "cell_type": "code", - "execution_count": 79, + "execution_count": 95, "metadata": { "collapsed": false }, @@ -2782,21 +3390,21 @@ { "data": { "text/plain": [ - "(0.17958430719999999, array([ 0, 1, 2, 8, 9, 10]))" + "array([ 7.974, 9.855, 11.926])" ] }, - "execution_count": 79, + "execution_count": 95, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=1)" + "np.random.binomial(20, [0.4, 0.5, 0.6], size=(1000, 3)).mean(0)" ] }, { "cell_type": "code", - "execution_count": 80, + "execution_count": 96, "metadata": { "collapsed": false }, @@ -2804,44 +3412,50 @@ { "data": { "text/plain": [ - "(0.048035123200000036, array([ 0, 1, 9, 10]))" + "(30.011600000000001,\n", + " 13.012865439999997,\n", + " 14.999997757333333,\n", + " 14.783116666666668)" ] }, - "execution_count": 80, + "execution_count": 96, "metadata": {}, "output_type": "execute_result" } ], "source": [ - "power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=1)" + "rvs1 = binom_mix_rvs(size=100000)\n", + "m = rvs1.mean()\n", + "m, rvs1.var(), m / 60 * (1 - m / 60) * 60, (rvs1 / 60 * (1 - rvs1 / 60) * 60).mean()" ] }, { "cell_type": "code", - "execution_count": 81, + "execution_count": 97, "metadata": { "collapsed": false }, "outputs": [ { - "name": "stdout", - "output_type": "stream", - "text": [ - "(0.17958430720000004, (2, 8))\n", - "(0.048035123199999974, (1, 9))\n" - ] + "data": { + "text/plain": [ + "30.000219999999999" + ] + }, + "execution_count": 97, + "metadata": {}, + "output_type": "execute_result" } ], "source": [ - "print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=0))\n", - "print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=0))" + "(np.array([26, 20, 14]) * [0.33847, 0.5, 0.8]).sum()" ] }, { "cell_type": "code", "execution_count": null, "metadata": { - "collapsed": true + "collapsed": false }, "outputs": [], "source": [] @@ -2896,7 +3510,7 @@ }, { "cell_type": "code", - "execution_count": 82, + "execution_count": 98, "metadata": { "collapsed": false }, @@ -2926,7 +3540,7 @@ " 0.51669138801043579]" ] }, - "execution_count": 82, + "execution_count": 98, "metadata": {}, "output_type": "execute_result" } @@ -2944,7 +3558,7 @@ }, { "cell_type": "code", - "execution_count": 83, + "execution_count": 99, "metadata": { "collapsed": false }, @@ -2974,7 +3588,7 @@ " [ 0.51669139, 0.51650774, 0.63903825]])" ] }, - "execution_count": 83, + "execution_count": 99, "metadata": {}, "output_type": "execute_result" } @@ -3010,7 +3624,7 @@ }, { "cell_type": "code", - "execution_count": 84, + "execution_count": 100, "metadata": { "collapsed": false }, @@ -3021,7 +3635,7 @@ "(-0.39840953644479782, 0.69032832946419354)" ] }, - "execution_count": 84, + "execution_count": 100, "metadata": {}, "output_type": "execute_result" } @@ -3032,7 +3646,7 @@ }, { "cell_type": "code", - "execution_count": 85, + "execution_count": 101, "metadata": { "collapsed": false }, @@ -3043,7 +3657,7 @@ "(-0.56343616981901101, 0.57313791338407638)" ] }, - "execution_count": 85, + "execution_count": 101, "metadata": {}, "output_type": "execute_result" } @@ -3054,7 +3668,7 @@ }, { "cell_type": "code", - "execution_count": 86, + "execution_count": 102, "metadata": { "collapsed": false }, @@ -3065,7 +3679,7 @@ "(-0.79681907288959564, 0.42555611641912894)" ] }, - "execution_count": 86, + "execution_count": 102, "metadata": {}, "output_type": "execute_result" } @@ -3076,7 +3690,7 @@ }, { "cell_type": "code", - "execution_count": 87, + "execution_count": 103, "metadata": { "collapsed": false }, @@ -3087,7 +3701,7 @@ "(8.293564511085938e-17, 0.99999999999999989)" ] }, - "execution_count": 87, + "execution_count": 103, "metadata": {}, "output_type": "execute_result" } @@ -3098,7 +3712,7 @@ }, { "cell_type": "code", - "execution_count": 88, + "execution_count": 104, "metadata": { "collapsed": false }, @@ -3109,7 +3723,7 @@ "(5.864435705996961e-17, 1.0)" ] }, - "execution_count": 88, + "execution_count": 104, "metadata": {}, "output_type": "execute_result" } @@ -3129,7 +3743,7 @@ }, { "cell_type": "code", - "execution_count": 89, + "execution_count": 105, "metadata": { "collapsed": true }, @@ -3140,7 +3754,7 @@ }, { "cell_type": "code", - "execution_count": 90, + "execution_count": 106, "metadata": { "collapsed": false }, @@ -3151,7 +3765,7 @@ "(0.081984475816844427, 0.38468219084982225)" ] }, - "execution_count": 90, + "execution_count": 106, "metadata": {}, "output_type": "execute_result" } @@ -3162,7 +3776,7 @@ }, { "cell_type": "code", - "execution_count": 91, + "execution_count": 107, "metadata": { "collapsed": false }, @@ -3173,7 +3787,7 @@ "0.34470140912721514" ] }, - "execution_count": 91, + "execution_count": 107, "metadata": {}, "output_type": "execute_result" } @@ -3187,7 +3801,7 @@ }, { "cell_type": "code", - "execution_count": 92, + "execution_count": 108, "metadata": { "collapsed": false }, @@ -3198,7 +3812,7 @@ "259.154426739506" ] }, - "execution_count": 92, + "execution_count": 108, "metadata": {}, "output_type": "execute_result" } @@ -3236,7 +3850,7 @@ }, { "cell_type": "code", - "execution_count": 93, + "execution_count": 109, "metadata": { "collapsed": false }, @@ -3251,7 +3865,7 @@ " 1.6821766224528543),))" ] }, - "execution_count": 93, + "execution_count": 109, "metadata": {}, "output_type": "execute_result" } @@ -3266,7 +3880,7 @@ }, { "cell_type": "code", - "execution_count": 94, + "execution_count": 110, "metadata": { "collapsed": false }, @@ -3281,7 +3895,7 @@ " 2.3848884863189261),))" ] }, - "execution_count": 94, + "execution_count": 110, "metadata": {}, "output_type": "execute_result" } @@ -3295,7 +3909,7 @@ }, { "cell_type": "code", - "execution_count": 95, + "execution_count": 111, "metadata": { "collapsed": false }, @@ -3310,7 +3924,7 @@ " 2.4628099694678625),))" ] }, - "execution_count": 95, + "execution_count": 111, "metadata": {}, "output_type": "execute_result" } @@ -3324,7 +3938,7 @@ }, { "cell_type": "code", - "execution_count": 96, + "execution_count": 112, "metadata": { "collapsed": false }, @@ -3336,7 +3950,7 @@ " ((158.0, 181.0, 5695.1110612524499, 6528.3652241583422),))" ] }, - "execution_count": 96, + "execution_count": 112, "metadata": {}, "output_type": "execute_result" } @@ -3350,7 +3964,7 @@ }, { "cell_type": "code", - "execution_count": 97, + "execution_count": 113, "metadata": { "collapsed": false }, @@ -3365,7 +3979,7 @@ " 2.2137816599366142),))" ] }, - "execution_count": 97, + "execution_count": 113, "metadata": {}, "output_type": "execute_result" } @@ -3397,7 +4011,7 @@ }, { "cell_type": "code", - "execution_count": 98, + "execution_count": 114, "metadata": { "collapsed": false }, @@ -3408,7 +4022,7 @@ "(49.0, 51.0)" ] }, - "execution_count": 98, + "execution_count": 114, "metadata": {}, "output_type": "execute_result" } @@ -3420,7 +4034,7 @@ }, { "cell_type": "code", - "execution_count": 99, + "execution_count": 115, "metadata": { "collapsed": false }, @@ -3431,7 +4045,7 @@ "(12.0, 28.0)" ] }, - "execution_count": 99, + "execution_count": 115, "metadata": {}, "output_type": "execute_result" } @@ -3443,7 +4057,7 @@ }, { "cell_type": "code", - "execution_count": 100, + "execution_count": 116, "metadata": { "collapsed": false }, @@ -3454,7 +4068,7 @@ "(0.39832112950330101, 0.6016788704966991)" ] }, - "execution_count": 100, + "execution_count": 116, "metadata": {}, "output_type": "execute_result" } @@ -3465,7 +4079,7 @@ }, { "cell_type": "code", - "execution_count": 101, + "execution_count": 117, "metadata": { "collapsed": false }, @@ -3476,7 +4090,7 @@ "(78.0, 84.0)" ] }, - "execution_count": 101, + "execution_count": 117, "metadata": {}, "output_type": "execute_result" } @@ -3488,7 +4102,7 @@ }, { "cell_type": "code", - "execution_count": 102, + "execution_count": 118, "metadata": { "collapsed": false }, @@ -3499,7 +4113,7 @@ "(0.65848903119285485, ((78.0, 85.0, 1930.0, 2105.0),))" ] }, - "execution_count": 102, + "execution_count": 118, "metadata": {}, "output_type": "execute_result" } @@ -3513,7 +4127,7 @@ }, { "cell_type": "code", - "execution_count": 103, + "execution_count": 119, "metadata": { "collapsed": false }, @@ -3524,7 +4138,7 @@ "0.61042723749210825" ] }, - "execution_count": 103, + "execution_count": 119, "metadata": {}, "output_type": "execute_result" } @@ -3536,7 +4150,7 @@ }, { "cell_type": "code", - "execution_count": 104, + "execution_count": 120, "metadata": { "collapsed": false }, @@ -3547,7 +4161,7 @@ "0.71661671146632" ] }, - "execution_count": 104, + "execution_count": 120, "metadata": {}, "output_type": "execute_result" } @@ -3586,7 +4200,7 @@ }, { "cell_type": "code", - "execution_count": 105, + "execution_count": 121, "metadata": { "collapsed": true }, @@ -3607,7 +4221,7 @@ }, { "cell_type": "code", - "execution_count": 106, + "execution_count": 122, "metadata": { "collapsed": false }, @@ -3618,7 +4232,7 @@ "856.38473506679793" ] }, - "execution_count": 106, + "execution_count": 122, "metadata": {}, "output_type": "execute_result" } @@ -3643,7 +4257,7 @@ }, { "cell_type": "code", - "execution_count": 107, + "execution_count": 123, "metadata": { "collapsed": false }, @@ -3654,7 +4268,7 @@ "0.89999999999999991" ] }, - "execution_count": 107, + "execution_count": 123, "metadata": {}, "output_type": "execute_result" } @@ -3666,7 +4280,7 @@ }, { "cell_type": "code", - "execution_count": 108, + "execution_count": 124, "metadata": { "collapsed": false }, @@ -3677,7 +4291,7 @@ "(1.6448536269514729, 1.2815515655446004)" ] }, - "execution_count": 108, + "execution_count": 124, "metadata": {}, "output_type": "execute_result" } @@ -3689,7 +4303,7 @@ }, { "cell_type": "code", - "execution_count": 109, + "execution_count": 125, "metadata": { "collapsed": false }, @@ -3700,7 +4314,7 @@ "1.2815515655446004" ] }, - "execution_count": 109, + "execution_count": 125, "metadata": {}, "output_type": "execute_result" } @@ -3712,7 +4326,7 @@ }, { "cell_type": "code", - "execution_count": 110, + "execution_count": 126, "metadata": { "collapsed": false }, @@ -3723,7 +4337,7 @@ "0.89999999999999991" ] }, - "execution_count": 110, + "execution_count": 126, "metadata": {}, "output_type": "execute_result" } @@ -3734,7 +4348,7 @@ }, { "cell_type": "code", - "execution_count": 111, + "execution_count": 127, "metadata": { "collapsed": false }, @@ -3745,7 +4359,7 @@ "(array([ 30., 49.]), array([ 51., 70.]))" ] }, - "execution_count": 111, + "execution_count": 127, "metadata": {}, "output_type": "execute_result" } diff --git a/notebooks/proportion_one_power.py b/notebooks/proportion_one_power.py index eb62dba..6c3edfe 100644 --- a/notebooks/proportion_one_power.py +++ b/notebooks/proportion_one_power.py @@ -62,43 +62,102 @@ ci_df -# **Two sided hypothesis** +# We can check some corner case behavior to see if the function handles those correctly. It does not yet do so. beta/exact confidence interval contains a NaN if the count is all of the same kind, normal and agresti_coull return proportions that are negative or larger than one. (I opened https://github.com/statsmodels/statsmodels/issues/2742 ) # In[5]: -smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided') +count_ = 0 +confints0 = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods] +count_ = 1 +confints1 = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods] +count_ = nobs - 1 +confintsnm1 = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods] +count_ = nobs +confintsn = [smprop.proportion_confint(count_, nobs, alpha=0.05, method=method) for method in confint_methods] +pd.DataFrame(np.column_stack((confints0, confints1, confintsnm1, confintsn)), index=confint_methods, + columns=['0 lower', '0 upper', '1 lower', '1 upper', 'n-1 lower', 'n-1 upper', 'n lower', 'n upper']) +# **Two sided hypothesis** + # In[6]: -smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided') +smprop.binom_test(count, nobs, prop=p_null, alternative='two-sided') # In[7]: +smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided') + + +# In[8]: + smprop.proportions_ztest(count, nobs, value=p_null, alternative='two-sided', prop_var=p_null) +# **Aside: Corner case for tests** +# +# Many normal distribution based hypothesis tests have problems with observations where the count is zero. Various solutions have been proposed, one of them is to add 0.5 to all zero observations. PASS also adds a small number like 0.001 for the power calculations in this case. There is currently no option for this in my functions. + +# In[9]: + +print('x = 0') +count_ = 0 +p_null_ = 0.05 +print(smprop.binom_test(count_, nobs, prop=p_null_, alternative='two-sided')) +print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided')) +print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided', prop_var=p_null_)) +print('\nx = 1') +count_ = 0.05 +p_null_ = 0.05 +print(smprop.binom_test(count_, nobs, prop=p_null_, alternative='two-sided')) +print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided')) +print(smprop.proportions_ztest(count_, nobs, value=p_null_, alternative='two-sided', prop_var=p_null_)) + + +# In[10]: + +import statsmodels.api as sm + +res = sm.GLM([[7, 30 - 7]], [[1]], family=sm.genmod.families.Binomial(link=sm.genmod.families.links.identity)).fit() +print(res.summary()) + + +# In[11]: + +tt = res.t_test('const - %f' % p_null) +print(tt) +'HO: const = %f' % p_null, tt.pvalue, tt.conf_int() + + +# The pvalue is exactly the same as the Wald test version of `proportions_ztest`. The confidence interval is identical to `proportion_confint` with method `"normal"`. + +# In[12]: + +res = sm.GLM([[7, 30 - 7]], [[1]], family=sm.genmod.families.Binomial(link=sm.genmod.families.links.identity), offset=[[p_null]]).fit() +print(res.summary()) + + # **Equivalence** -# In[8]: +# In[13]: low, upp = ci_df.loc['beta', :] smprop.binom_tost(count, nobs, low, upp) -# In[9]: +# In[14]: print('score', smprop.binom_tost(count, nobs, *ci_df.loc['wilson', :])) print('wald ', smprop.binom_tost(count, nobs, *ci_df.loc['normal', :])) -# In[10]: +# In[15]: smprop.proportions_ztost(count, nobs, *ci_df.loc['wilson', :]) -# In[11]: +# In[16]: smprop.proportions_ztost(count, nobs, *ci_df.loc['beta', :]) @@ -113,7 +172,7 @@ # # where p0 = 0.3 -# In[12]: +# In[17]: te = smprop.binom_test(count, nobs, prop=p_null, alternative='larger') tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='larger') @@ -130,7 +189,7 @@ # # where p0 = 0.3 -# In[13]: +# In[18]: te = smprop.binom_test(count, nobs, prop=p_null, alternative='smaller') tw = smprop.proportions_ztest(count, nobs, value=p_null, alternative='smaller') @@ -142,7 +201,7 @@ # We can look at null hypothesis that are further away from the observed proportion to see which hypothesis are rejected. The observed proportion is 0.23, our new null hypothesis value is 0.6. -# In[14]: +# In[19]: p_null_ = 0.6 te = smprop.binom_test(count, nobs, prop=p_null_, alternative='smaller') @@ -153,7 +212,7 @@ print('score: ', ts[1]) -# In[15]: +# In[20]: p_null_ = 0.6 te = smprop.binom_test(count, nobs, prop=p_null_, alternative='larger') @@ -201,7 +260,7 @@ # # We can also use the standard t-test in large samples if we encode the data with 0 for no event and 1 for the success event. The t-test estimates the variance from the data and does not take the relationship between mean and variance explicitly into account. However, by the law of large numbers the mean, i.e. the proportion in the current case, will be asymptotically distributed as normal which can be approximated by the t-distribution. -# In[16]: +# In[21]: import statsmodels.stats.weightstats as smsw yy = np.repeat([0, 1], [nobs - count, count]) @@ -209,37 +268,37 @@ ds.ttest_mean(0.3) -# In[17]: +# In[22]: vars(ds) -# In[18]: +# In[23]: ds.ttest_mean(0.3, alternative='larger') -# In[19]: +# In[24]: ds.ttest_mean(0.3, alternative='smaller') # In this example the p-values from the t-test are in between the asymptotic score and wald tests based on the normal distribution for all three alternatives. The t-test based toast has a p-value that is slightly larger than the normal distribution based TOST test for proportions, 0.049 versus 0.041 which are both larger than the binomial distribution based TOST, which is 0.025 when we use the latter's confidence interval for the equivalence margins. -# In[20]: +# In[25]: ds.ttost_mean(*ci_df.loc['beta', :]) # We used a full sample with individual observations in the above. However, `DescrStatsW` allows us to use weights and we can specify the sample by the frequency of each level of the observation. The results are the same as before. -# In[21]: +# In[26]: ds2 = smsw.DescrStatsW([0, 1], weights=[nobs - count, count]) ds2.ttest_mean(0.3, alternative='smaller') -# In[22]: +# In[27]: ds2.ttost_mean(*ci_df.loc['beta', :]) @@ -264,7 +323,7 @@ # **Rejection region** -# In[23]: +# In[28]: rej = np.array([smprop.proportions_ztest(count_, nobs, value=p_null, alternative='two-sided', prop_var=p_null)[1] for count_ in range(nobs + 1)]) @@ -273,7 +332,7 @@ rej_indicator_score = rej_indicator # keep for later use -# In[24]: +# In[29]: rej = np.array([smprop.binom_test(count_, nobs, prop=p_null, alternative='two-sided') for count_ in range(nobs + 1)]) rej_indicator = (rej < 0.05) #.astype(int) @@ -286,14 +345,14 @@ # # We can use the set of values for which the null hypothesis is rejected instead of using a boolean indicator. -# In[25]: +# In[30]: x = np.arange(nobs + 1) x_rej = x[rej_indicator] x_rej_score = x[rej_indicator_score] -# In[26]: +# In[31]: print('binom', x_rej) print('score', x_rej_score) @@ -303,7 +362,7 @@ # For the current case we use the exact binomial distribution to calculate the power. The null hypothesis in this example is a two-sided test for p = 0.3. Use p1 for the proportion at which the power or rejection probability is calculated. First we check the size of the test, i.e. p1 = p_null = 0.3 -# In[27]: +# In[32]: p1 = 0.3 stats.binom.pmf(x_rej, nobs, p1).sum() @@ -311,7 +370,7 @@ # Because we are using the exact test, the probability of rejection under the null is smaller than the required alpha = 0.05. In this example the exact probability is close to the 0.05 threshold. In contrast to this, the score test is liberal in this example and rejects with probability 0.07 instead of the required 0.05. -# In[28]: +# In[33]: stats.binom.pmf(x_rej_score, nobs, p1).sum() @@ -320,7 +379,7 @@ # # In the case of the binomial distribution with probability p_null under the null hypothesis has tail probabilities at most alpha / 2 in each tail (for equal tailed hypothesis tests). -# In[29]: +# In[34]: lowi, uppi = stats.binom.interval(0.95, nobs, p_null) lowi, uppi @@ -338,26 +397,26 @@ # Because of the discreteness of the sample space having tail probabilities equal to alpha / 2 is in general not possible. # -# In[30]: +# In[35]: low, upp = lowi, uppi -# In[31]: +# In[36]: stats.binom.ppf(0.025, nobs, p_null), stats.binom.isf(0.025, nobs, p_null) # If we reject at 4 and smaller and reject at 14 and larger, then the probability of rejection is larger than 0.025 in each tail: -# In[32]: +# In[37]: stats.binom.cdf(low, nobs, p_null), stats.binom.sf(upp - 1, nobs, p_null) # If we shrink the rejection region in each tail by one, so we reject at 3 and smaller and reject at 15 and larger, then the probability of rejection is smaller than 0.025 in each tail. The total rejection probability is at 0.026 smaller than 0.05 and shows the typical case that exact tests are conservative, i.e. reject less often than alpha, often considerably less: -# In[33]: +# In[38]: prob_low = stats.binom.cdf(low - 1, nobs, p_null) prob_upp = stats.binom.sf(upp, nobs, p_null) @@ -366,21 +425,21 @@ # In this case we can increase the lower rejection threshold by one and still stay below the total rejection probability of 0.05, although in this case the rejection probability in the lower tail is larger than 0.025. In this example the same also works on the other side by expanding only the rejection region in the upper tail. -# In[34]: +# In[39]: prob_low = stats.binom.cdf(low, nobs, p_null) prob_upp = stats.binom.sf(upp, nobs, p_null) prob_low, prob_upp, prob_low + prob_upp -# In[35]: +# In[40]: prob_low = stats.binom.cdf(low - 1, nobs, p_null) prob_upp = stats.binom.sf(upp - 1, nobs, p_null) prob_low, prob_upp, prob_low + prob_upp -# In[36]: +# In[41]: stats.binom.cdf(upp, nobs, p_null) - stats.binom.cdf(low, nobs, p_null) @@ -388,12 +447,12 @@ # TODO: why does binom_test reject at 4? # binom_test is used from scipy.stats for the two-sided alternative. -# In[37]: +# In[42]: smprop.binom_test(3, nobs, prop=p_null, alternative='smaller'), smprop.binom_test(4, nobs, prop=p_null, alternative='smaller') -# In[38]: +# In[43]: smprop.binom_test(4, nobs, prop=p_null, alternative='two-sided') # we get the same answer as in R @@ -405,7 +464,7 @@ # # The pvalue for the centered test is based on doubling the probability of the smaller tail. Given that it does not exist, we can implement it quickly, and check against R's exactci package, which matches our results. -# In[39]: +# In[44]: def binom_test_centered(count, nobs, prop=0.5): """two-sided centered binomial test""" @@ -414,12 +473,12 @@ def binom_test_centered(count, nobs, prop=0.5): return 2 * min(prob_low, prob_upp) -# In[40]: +# In[45]: binom_test_centered(3, nobs, prop=p_null), binom_test_centered(4, nobs, prop=p_null) -# In[41]: +# In[46]: binom_test_centered(13, nobs, prop=p_null), binom_test_centered(14, nobs, prop=p_null) @@ -441,7 +500,7 @@ def binom_test_centered(count, nobs, prop=0.5): # After this more extended detour we go back to our power calculations. So assuming we know the critical values of our rejection region, we can calculate the power using the cdf and sf function of the binomial distribution. -# In[42]: +# In[47]: def power_binom_reject(low, upp, prop, nobs): """ calculate the power of a test given the rejection intervals @@ -480,7 +539,7 @@ def power_binom_reject(low, upp, prop, nobs): return prob_low + prob_upp -# In[43]: +# In[48]: for test, l, u in [('binom ', 4, 15), ('binom_central', 3, 15), ('score ', 4, 14)]: print(test, l, u, power_binom_reject(l, u, p_null, nobs)) @@ -491,7 +550,7 @@ def power_binom_reject(low, upp, prop, nobs): -# In[44]: +# In[49]: def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwds=None, item=None, use_idx=False): """calculate power for proportion test by explicit numeration of sample space @@ -543,7 +602,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # We can use this function to check the size of the two binomial tests. Both results are what we already had before and agree with the results of R packages. -# In[45]: +# In[50]: print(power_binom_proptest(smprop.binom_test, p_null, p_null, nobs)) print(power_binom_proptest(smprop.binom_test, p_null, p_null, nobs, use_idx=True)) @@ -555,7 +614,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd -# In[46]: +# In[51]: print(power_binom_proptest(binom_test_centered, p_null, p_null, nobs)) print(power_binom_proptest(binom_test_centered, p_null, p_null, nobs, use_idx=True)) @@ -564,14 +623,14 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # We obtain the power of the test at a proportion that is different from the proportion of the null hypothesis. Using the minlike binomial test the power if the true proportion is 0.5 is 0.57, the power for the central binomial test differs only in the 5th decimal from this. -# In[47]: +# In[52]: print(power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs)) print(power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs, use_idx=True)) # 0.572262 R library MESS: power.binom.test(n = 30, p0 = 0.3, pa = 0.5) -# In[48]: +# In[53]: print(power_binom_proptest(binom_test_centered, p_null, 0.5, nobs)) print(power_binom_proptest(binom_test_centered, p_null, 0.5, nobs, use_idx=True)) @@ -580,7 +639,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # surprisingly this also works in vectorized for to calculate the power for a set of alternatives. -# In[49]: +# In[54]: p1 = np.linspace(0.1, 0.8, 15) pbminlike = power_binom_proptest(smprop.binom_test, p_null, p1, nobs) @@ -591,14 +650,14 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # to check this let's use a list comprehension and explicitly loop over all alternative proportions -# In[50]: +# In[55]: [power_binom_proptest(smprop.binom_test, p_null, p1_, nobs) for p1_ in p1] # And finally a plot. -# In[51]: +# In[56]: import matplotlib.pyplot as plt plt.figure(figsize=(8, 6)) @@ -610,7 +669,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # From the plot we can see that both binomial test have the same power for large true proportions, but the standard minlike binomial test is more powerful than the central binomial test for small true proportions. For example, if the true proportion is 0.15, then the probability of rejecting the null hypothesis are 0.52 versus 0.32. We can verify that the two R packages produce the same result -# In[52]: +# In[57]: # 0.5244758 power.binom.test(n = 30, p0 = 0.3, pa = 0.15) # 0.321667 powerBinom(n = 30, p0 = 0.3, p1 = 0.15, strict=TRUE) @@ -619,7 +678,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # ### Power as a function of nobs -# In[53]: +# In[58]: nobs_arr = np.arange(30, 100) #this doesn't work vectorized in nobs @@ -627,13 +686,13 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd pbcentral_nobs -# In[54]: +# In[59]: pbminlike_nobs = [power_binom_proptest(smprop.binom_test, p_null, 0.5, nobs_) for nobs_ in nobs_arr] pbminlike_nobs -# In[55]: +# In[60]: pbcentral_nobs_arr, rej_minlike = list(zip(*pbcentral_nobs)) pbcentral_nobs_arr @@ -641,7 +700,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd np.column_stack((nobs_arr, pbminlike_nobs_arr, pbcentral_nobs_arr)) -# In[56]: +# In[61]: plt.figure(figsize=(8, 6)) plt.plot(nobs_arr, pbminlike_nobs_arr, label='minlike') @@ -649,14 +708,14 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd plt.legend(loc='lower right') -# In[57]: +# In[62]: xx = (np.arange(10)<4) | (np.arange(10) > 6) print(xx) np.nonzero(np.diff(xx))[0] -# In[58]: +# In[63]: p_null, nobs @@ -668,7 +727,7 @@ def power_binom_proptest(test_func, p_null, prop, nobs, alpha=0.05, args=(), kwd # Lachine summarizes sample size calculations for proportions based on the normal distribution if we only consider the power in one tail. In this case we have an explicit formula for the required sample size. This is a good approximation to two sided tests if the probability to be in the small tail is negligible and useful for quick calculations. However, solving the sample size that correctly takes both tails into account can be done numerically without much computational effort. -# In[59]: +# In[64]: # from Lachine 1981 equ (3) and (4) @@ -683,33 +742,33 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): return stats.norm.cdf(crit_pow) -# In[60]: +# In[65]: pa = 0.5 power_normal_greater(pa - p_null, np.sqrt(p_null * (1 - p_null)), np.sqrt(pa * (1 - pa)), 30, alpha=0.05) -# In[61]: +# In[66]: std_null, std_alt = np.sqrt(p_null * (1 - p_null)), np.sqrt(pa * (1 - pa)) sample_size_normal_greater(pa - p_null, std_null, std_alt, alpha=0.05, power=0.7528) -# In[62]: +# In[67]: p0 = 0.6 pa = 0.5 power_normal_greater(pa - p0, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 25, alpha=0.05) -# In[63]: +# In[68]: p0 = 0.5 pa = 0.4 power_normal_greater(pa - p0, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.05) -# In[64]: +# In[69]: p0 = 0.3 pa = 0.5 @@ -717,7 +776,7 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): power_normal_greater(diff, np.sqrt(p0 * (1 - p0)), np.sqrt(pa * (1 - pa)), 50, alpha=0.05) -# In[65]: +# In[70]: p0 = 0.5 pa = 0.5 @@ -727,7 +786,7 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): # 0.80743 PASS manual example Chow, Shao, and Wang (2008) 2-sided S(Phat) -# In[66]: +# In[71]: p0 = 0.5 pa = 0.6 @@ -736,7 +795,7 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): # 0.80125 PASS doc example from Ryan (2013) for one-sided alternative -# In[67]: +# In[72]: # copied and adjusted from statsmodels.stats.power def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, std_alt=1): @@ -764,7 +823,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, return pow_ #, (crit* std_null - d*np.sqrt(nobs))/std_alt, (crit* std_null - d*np.sqrt(nobs))/std_alt, crit_pow -# In[68]: +# In[73]: p0 = 0.5 pa = 0.5 @@ -777,7 +836,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # close to above 0.80742957881382105, closer to pass 0.80743 -# In[69]: +# In[74]: p0 = 0.5 pa = 0.6 @@ -794,7 +853,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # check size (power at null) -# In[70]: +# In[75]: p0 = 0.6 pa = 0.6 @@ -814,7 +873,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # Next we try exact power for the already available proportion_ztest -# In[71]: +# In[76]: p0 = 0.5 pa = 0.6 @@ -822,7 +881,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, smprop.proportions_ztest(nobs_ * (pa), nobs_, value=p0, alternative='two-sided', prop_var=p0) -# In[72]: +# In[77]: #power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, use_idx=1) #this raises exception @@ -830,12 +889,12 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) #use_idx=False raises exception -# In[73]: +# In[78]: p0, pa, nobs_ -# In[74]: +# In[79]: pv = [smprop.proportions_ztest(x, nobs_, value=p0, alternative='two-sided', prop_var=p0)[1] for x in np.arange(60, 99)] pv = np.asarray(pv) @@ -856,7 +915,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # 50 0.5000 0.6000 0.0500 0.23706 0.33613 0.23706 0.33613 0.23706 # ``` -# In[75]: +# In[80]: p0, pa, nobs_ = 0.5, 0.6, 50 power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) @@ -869,7 +928,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[76]: +# In[81]: pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None) power_binom_proptest(pzt_wald, p0, pa, nobs_, item=1, use_idx=1) @@ -877,7 +936,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # 10 0.5000 0.6000 0.0500 0.04804 0.04804 0.04804 0.17958 0.17958 -# In[77]: +# In[82]: nobs_ = 10 pzt_wald = lambda x, nobs, p_null: smprop.proportions_ztest(x, nobs, value=p_null, prop_var=None) @@ -886,24 +945,24 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # This is the same as the Wald test, while the score test has much lower power in this example. It is only around 0.048 which is the same in PASS and our calculations at the provided print precision. -# In[78]: +# In[83]: power_binom_proptest(pzt, p0, pa, nobs_, item=1, use_idx=1) # Know we know how to use it, and I added keywords to the `power_binom_proptest` above, we can drop the use of lambda functions. -# In[79]: +# In[84]: power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=1) -# In[80]: +# In[85]: power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=1) -# In[81]: +# In[86]: print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, item=1, use_idx=0)) print(power_binom_proptest(smprop.proportions_ztest, p0, pa, nobs_, kwds={'prop_var': p0}, item=1, use_idx=0)) @@ -914,6 +973,111 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, +# ## Sensitivity to misspecification +# +# This is just a quick experiment. +# +# +# We go back to the exact binomial test in the standard minlike version with power evaluated using the exact distribution. The underlying assumption is that we have a set of independent Bernoulli experiments with identical probability of an event. +# +# As a simple deviation we consider that we have 3 groups of observations with different true proportions. For the initial analysis we calculate the rejection rate, size and power, using Monte Carlo. +# +# It looks like in this example with three fixed groups we have underdispersion, and the rejection ratio is lower than with a sinlge group. That means that in this case the binomial test is even more conservative than in the case where the binomial distribution is correctly specified. This is a surprising because unobserved heterogeneity and mixture distribution should lead to over dispersion, but we keep the composition of the population and of the sample fixed in this experiment and consequently do not get extra variation from a changing sample composition. +# +# I had used equal group sizes in my intial choice of numbers for the Monte Carlo setup. That case did not show any overdispersion in the sampled proportions. This needs further investigation. + +# In[87]: + +smprop.binom_test(31, 60) + + +# In[88]: + +power_binom_proptest(smprop.binom_test, 0.5, 0.5, 60) + + +# In[89]: + +# our binomial sampling process +rvs = np.random.binomial(60, 0.5, size=10000) +m = rvs.mean() +m, rvs.var(), m / 60 * (1 - m / 60) * 60 + + +# In[90]: + +def binom_mix_rvs(size=1): + #group fraction + # np.random.multinomial(60, [1./3] * 3, size=size) + # assume fixed population group size, instead of multinomial + rvs1 = np.random.binomial([26, 20, 14], [0.33847, 0.5, 0.8], size=(size, 3)) + return rvs1.sum(1) + + +# In[91]: + +# true binomial distribution + +n_rep = 10000 +res0 = np.empty(n_rep, float) +res0.fill(np.nan) +for i in range(n_rep): + xc = np.random.binomial(60, 0.5) + res0[i] = smprop.binom_test(xc, 60) + +print((res0 < 0.05).mean()) + + +# In[92]: + +# mixed binomial distribution + +n_rep = 10000 +res0 = np.empty(n_rep, float) +res0.fill(np.nan) +for i in range(n_rep): + xc = binom_mix_rvs() + res0[i] = smprop.binom_test(xc, 60) + +print((res0 < 0.05).mean()) + + +# These two Monte Carlo experiments show that the rejection rate under the null hypothesis drops from 0.0276 to 0.0148. As expected, the rejection rate in the Monte Carlo corresponds closely to the exact power calculations which is 0.0273. +# +# Below are some checks to see whether the random sampling works as expected. + +# In[93]: + +np.random.multinomial(60, [1./3] * 3, size=10) + + +# In[94]: + +np.random.binomial([25, 10, 25], [0.4, 0.5, 0.6], size=(10, 3)) + + +# In[95]: + +np.random.binomial(20, [0.4, 0.5, 0.6], size=(1000, 3)).mean(0) + + +# In[96]: + +rvs1 = binom_mix_rvs(size=100000) +m = rvs1.mean() +m, rvs1.var(), m / 60 * (1 - m / 60) * 60, (rvs1 / 60 * (1 - rvs1 / 60) * 60).mean() + + +# In[97]: + +(np.array([26, 20, 14]) * [0.33847, 0.5, 0.8]).sum() + + +# In[ ]: + + + + # ## Summary # # Now, we have almost all the necessary pieces working and verified on a few example. The next step is to clean this up, convert it to usage friendly function or classes and convert the examples to unit tests. @@ -939,14 +1103,14 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # TODO: The following is not correct because when we change the sample size, then the rejection region also changes. -# In[82]: +# In[98]: [power_binom_reject(4, 15, p_null, nobs_) for nobs_ in range(30, 50)] # We can also calculate this in vectorized form for the set of sample sizes and all three tests: -# In[83]: +# In[99]: power_binom_reject(np.array([4, 3, 4]), np.array([15, 15, 14]), p_null, np.arange(30, 50)[:, None]) @@ -963,27 +1127,27 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # ## Trying out two sample proportion, incorrect if nobs is scalar instead of same length as count. -# In[84]: +# In[100]: smprop.proportions_ztest(np.array([6,7]), nobs, value=0, alternative='two-sided', prop_var=p_null) -# In[85]: +# In[101]: smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=1/30, alternative='two-sided', prop_var=p_null) -# In[86]: +# In[102]: smprop.proportions_ztest(np.array([6,7]), nobs, value=1/30, alternative='two-sided', prop_var=p_null) -# In[87]: +# In[103]: smprop.proportions_ztest(np.array([6,7]), nobs, value=-1/30, alternative='two-sided', prop_var=p_null) -# In[88]: +# In[104]: smprop.proportions_ztest(np.array([6,7]), nobs*np.ones(2), value=-1/30, alternative='two-sided', prop_var=p_null) @@ -993,17 +1157,17 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[89]: +# In[105]: #?smprop.proportion_confint() -# In[90]: +# In[106]: smprop.proportion_confint(count, nobs) -# In[91]: +# In[107]: from statsmodels.stats.proportion import proportion_effectsize es = proportion_effectsize(0.4, 0.5) @@ -1011,7 +1175,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, # R pwr 0.3447014091272153 -# In[92]: +# In[108]: smpow.NormalIndPower().solve_power(proportion_effectsize(0.4, 0.5), nobs1=None, alpha=0.05, ratio=0, power=0.9) @@ -1031,7 +1195,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[93]: +# In[109]: low, upp, nobs, p_alt = 0.7, 0.9, 509/2, 0.82 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', @@ -1040,7 +1204,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[94]: +# In[110]: low, upp, nobs, p_alt = 0.7, 0.9, 419/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', @@ -1048,7 +1212,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[95]: +# In[111]: low, upp, nobs, p_alt = 0.7, 0.9, 417/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='norm', @@ -1056,7 +1220,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[96]: +# In[112]: low, upp, nobs, p_alt = 0.7, 0.9, 420/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', @@ -1064,7 +1228,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[97]: +# In[113]: low, upp, nobs, p_alt = 0.7, 0.9, 414/2, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.025, dist='norm', @@ -1082,30 +1246,30 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[98]: +# In[114]: low, upp, nobs = 0.4, 0.6, 100 smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) -# In[99]: +# In[115]: value, nobs = 0.4, 50 smprop.binom_test_reject_interval(value, nobs, alpha=0.05) -# In[100]: +# In[116]: smprop.proportion_confint(50, 100, method='beta') -# In[101]: +# In[117]: low, upp, nobs = 0.7, 0.9, 100 smprop.binom_tost_reject_interval(low, upp, nobs, alpha=0.05) -# In[102]: +# In[118]: low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 smprop.power_ztost_prop(low, upp, nobs, p_alt, alpha=0.05, dist='binom', @@ -1113,13 +1277,13 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, critval_continuity=0) -# In[103]: +# In[119]: low, upp, nobs, p_alt = 0.7, 0.9, 100, 0.8 smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) -# In[104]: +# In[120]: low, upp, nobs, p_alt = 0.7, 0.9, 125, 0.8 smprop.power_binom_tost(low, upp, nobs, p_alt, alpha=0.05) @@ -1140,7 +1304,7 @@ def normal_power(effect_size, nobs, alpha, alternative='two-sided', std_null=1, -# In[105]: +# In[121]: # from Lachine 1981 equ (3) and (4) @@ -1155,7 +1319,7 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): return stats.norm.cdf(crit_pow) -# In[106]: +# In[122]: # Note for two sample comparison we have to adjust the standard deviation for unequal sample sizes n_frac1 = 0.5 @@ -1174,30 +1338,30 @@ def power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05): nobs -# In[107]: +# In[123]: #nobs = 858 power_normal_greater(diff, std_null, std_alt, nobs, alpha=0.05) -# In[108]: +# In[124]: alpha=0.05; power=0.9 stats.norm.isf(alpha), stats.norm.isf(1 - power) -# In[109]: +# In[125]: crit_alpha = stats.norm.isf(alpha) (np.sqrt(nobs) * np.abs(diff) - crit_alpha * std_null) / std_alt -# In[110]: +# In[126]: stats.norm.cdf(_) -# In[111]: +# In[127]: smprop.binom_test_reject_interval([0.4, 0.6], [100], alpha=0.05)