Skip to content

Instantly share code, notes, and snippets.

Show Gist options
  • Save HudsonHuang/200dbcf66179079715fb5d6f78ed2b29 to your computer and use it in GitHub Desktop.
Save HudsonHuang/200dbcf66179079715fb5d6f78ed2b29 to your computer and use it in GitHub Desktop.
计算——“你的资产在全世界人民里百分之几?”
# -*- coding: utf-8 -*-
"""
总资产=总资产市场价-总贷款
"""
import matplotlib.pyplot as plt
import numpy as np
import scipy as sp
from scipy.stats import norm
from sklearn.pipeline import Pipeline
from sklearn.linear_model import LinearRegression
from sklearn.preprocessing import PolynomialFeatures
from sklearn import linear_model
''''' 数据生成 '''
#x = np.arange(0, 1, 0.002)
#y = norm.rvs(0, size=500, scale=0.1)
#y = y + x**2
x = [10000,100000,1000000,5000000,10000000,50000000]
y = [3474000000,4528000000,4919000000,4950365100,4954902500,4954050700]
x = np.array(x).reshape(-1, 1)
y = np.array(y).reshape(-1, 1)
x_test = np.array(range(1,75000000000,10000)).reshape(-1, 1)
##''''' 均方误差根 '''
##def rmse(y_test, y):
## return sp.sqrt(sp.mean((y_test - y) ** 2))
##
##''''' 与均值相比的优秀程度,介于[0~1]。0表示不如均值。1表示完美预测.这个版本的实现是参考scikit-learn官网文档 '''
##def R2(y_test, y_true):
## return 1 - ((y_test - y_true)**2).sum() / ((y_true - y_true.mean())**2).sum()
##
##
##''''' 这是Conway&White《机器学习使用案例解析》里的版本 '''
##def R22(y_test, y_true):
## y_mean = np.array(y_true)
## y_mean[:] = y_mean.mean()
## return 1 - rmse(y_test, y_true) / rmse(y_mean, y_true)
##
#
#plt.scatter(x, y, s=5)
#degree = [3,10,40]
##y_test = []
##y_test = np.array(y_test)
#
#
#for d in degree:
# clf = Pipeline([('poly', PolynomialFeatures(degree=d)),
# (LinearRegression())])
## clf.fit(x[:, np.newaxis], y)
# clf = LinearRegression()
# clf.fit(x,y)
# y_test = clf.predict(x_test)
#
# print(clf.named_steps['linear'].coef_)
## print('rmse=%.2f, R2=%.2f, R22=%.2f, clf.score=%.2f' %
## (rmse(y_test, y),
## R2(y_test, y),
## R22(y_test, y),
## clf.score(x[:, np.newaxis], y)))
#
# plt.plot(x_test, y_test, linewidth=2)
#
#plt.grid()
#plt.legend(['3','10','40'], loc='upper left')
#plt.show()
#import numpy as np
#from sklearn.linear_model import LinearRegression
#from sklearn.preprocessing import PolynomialFeatures
#X_train = x
#y_train = y
#X_test = x_test
#y_test = [[8], [12], [15], [18]]
## 建立线性回归,并用训练的模型绘图
#regressor = LinearRegression()
#regressor.fit(X_train, y_train)
#xx = np.linspace(0, 26, 100)
#yy = regressor.predict(xx.reshape(xx.shape[0], 1))
##plt = runplt()
#plt.plot(X_train, y_train, 'k.')
#plt.plot(xx, yy)
#
#quadratic_featurizer = PolynomialFeatures(degree=2)
#X_train_quadratic = quadratic_featurizer.fit_transform(X_train)
#X_test_quadratic = quadratic_featurizer.transform(X_test)
#regressor_quadratic = LinearRegression()
#regressor_quadratic.fit(X_train_quadratic, y_train)
#xx_quadratic = quadratic_featurizer.transform(xx.reshape(xx.shape[0], 1))
#plt.plot(xx, regressor_quadratic.predict(xx_quadratic), 'r-')
#plt.show()
#print(X_train)
#print(X_train_quadratic)
#print(X_test)
#print(X_test_quadratic)
#y_test = regressor_quadratic.predict(X_test)
#print('1 r-squared', regressor.score(X_test, y_test))
#print('2 r-squared', regressor_quadratic.score(X_test_quadratic, y_test))
from scipy.optimize import curve_fit
import matplotlib.pyplot as plt
import numpy as np
from scipy.integrate import quad,dblquad,nquad
def func(x, a, b, c):
return a * np.exp(-b * x) + c
x = [10000,100000,1000000,5000000,10000000,50000000]
y = [3474000000,4528000000,4919000000,4950365100,4954902500,4954050700]
x = np.array(x)
y = np.array(y)
xdata = x
ydata = 4954347100-y
#plt.plot(xdata,ydata,'b-')
#popt, pcov = curve_fit(func, xdata, ydata)
##popt数组中,三个值分别是待求参数a,b,c
#y2 = [func(i, popt[0],popt[1],popt[2]) for i in xdata]
#plt.plot(xdata,y2,'r--')
#print (popt)
def func(x, a, b, c):
return a * np.exp(-b * x) + c
plt.plot(xdata,ydata,'b-')
popt, pcov = curve_fit(func, xdata, ydata)
#popt数组中,三个值分别是待求参数a,b,c
y2 = [func(i, popt[0],popt[1],popt[2]) for i in xdata]
plt.plot(xdata,y2,'r--')
print (popt)
print (quad(lambda x:func(x,popt[0],popt[1],popt[2]),0,88888))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment