summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorCharles Harris <charlesr.harris@gmail.com>2016-06-15 13:03:33 -0600
committerGitHub <noreply@github.com>2016-06-15 13:03:33 -0600
commit96030cd8f29828fccea16f681f50a749bb836af0 (patch)
treec66f20602935dde9c6a7609e2dea52faaaf28190
parente26738a628de0fe1897f2960708ec16e4c3177f7 (diff)
parent330291ffcba6d00b6534bcf80107f3baea5b48a4 (diff)
downloadnumpy-96030cd8f29828fccea16f681f50a749bb836af0.tar.gz
Merge pull request #7747 from charris/update-7672
Update 7672, BUG: Make sure we don't divide by zero
-rw-r--r--numpy/lib/polynomial.py3
-rw-r--r--numpy/lib/tests/test_polynomial.py8
2 files changed, 10 insertions, 1 deletions
diff --git a/numpy/lib/polynomial.py b/numpy/lib/polynomial.py
index c0a1cdaed..d96b8969f 100644
--- a/numpy/lib/polynomial.py
+++ b/numpy/lib/polynomial.py
@@ -599,6 +599,9 @@ def polyfit(x, y, deg, rcond=None, full=False, w=None, cov=False):
# it is included here because the covariance of Multivariate Student-T
# (which is implied by a Bayesian uncertainty analysis) includes it.
# Plus, it gives a slightly more conservative estimate of uncertainty.
+ if len(x) <= order + 2:
+ raise ValueError("the number of data points must exceed order + 2 "
+ "for Bayesian estimate the covariance matrix")
fac = resids / (len(x) - order - 2.0)
if y.ndim == 1:
return c, Vbase * fac
diff --git a/numpy/lib/tests/test_polynomial.py b/numpy/lib/tests/test_polynomial.py
index 6d2e330ec..00dffd3d3 100644
--- a/numpy/lib/tests/test_polynomial.py
+++ b/numpy/lib/tests/test_polynomial.py
@@ -81,7 +81,7 @@ poly1d([ 2.])
import numpy as np
from numpy.testing import (
run_module_suite, TestCase, assert_, assert_equal, assert_array_equal,
- assert_almost_equal, assert_array_almost_equal, rundocs
+ assert_almost_equal, assert_array_almost_equal, assert_raises, rundocs
)
@@ -135,6 +135,12 @@ class TestDocs(TestCase):
err = [1, -1, 1, -1, 1, -1, 1]
weights = np.arange(8, 1, -1)**2/7.0
+ # Check exception when too few points for variance estimate. Note that
+ # the Bayesian estimate requires the number of data points to exceed
+ # degree + 3.
+ assert_raises(ValueError, np.polyfit,
+ [0, 1, 3], [0, 1, 3], deg=0, cov=True)
+
# check 1D case
m, cov = np.polyfit(x, y+err, 2, cov=True)
est = [3.8571, 0.2857, 1.619]