栏目分类:
子分类:
返回
名师互学网用户登录
快速导航关闭
当前搜索
当前分类
子分类
实用工具
热门搜索
名师互学网 > IT > 软件开发 > 后端开发 > Python

Logistics算法实现

Python 更新时间: 发布时间: IT归档 最新发布 模块sitemap 名妆网 法律咨询 聚返吧 英语巴士网 伯小乐 网商动力

Logistics算法实现

原理参考:【DL笔记3】一步步用python实现Logistic回归
logistic回归——PYTHON实现
第二篇原理比较易懂,代码好像有点问题。

python实现:

import numpy as np

#Logistic回归模型
class LogisticRegression():
    def __init__(self, x, y):
        self.x = x
        self.y = y
        self.w = np.zeros(self.x.shape[1])
        self.b = 0

    def Logistic_sigmoid(self, y):
        #非线性层,将值域空间映射为(0,1)
        return np.exp(y)/(1+np.exp(y))

    def Logistic_cost(self, p, y):
        #损失函数
        return np.sum(-y*np.log(p)-(1-y)*np.log(1-p))

    def Logistic_BP(self, alpha, iters):
        #反向传播函数
        for i in range(iters):
            p = np.dot(self.x, self.w.T)+self.b
            a = self.Logistic_sigmoid(p)
            print('iters:', i,' cost:',  self.Logistic_cost(a, y))
            dz = a -self.y
            self.w -= alpha*np.dot(dz.T, self.x)
            self.b -= alpha*sum(dz)
        return self.w, self.b

    def Logistic_predict(self, x):
        #预测函数
        return self.Logistic_sigmoid(np.dot(x, self.w.T)+self.b)


if __name__ == '__main__':
    x = np.array([[0], [1], [2], [3]])
    y = np.array([0, 0, 1, 1])
    lg = LogisticRegression(x, y)
    w, b = lg.Logistic_BP(alpha=0.1, iters=100)
    print('最终训练得到的w和b为:', w, ',', b)
    pre = lg.Logistic_predict(np.array([[2.9]]))
    print('预测结果为:', pre)

python调包:

import numpy as np
from sklearn.linear_model import LogisticRegression

#Logistic回归模型
class MyLogisticRegression():
    def __init__(self, x, y):
        self.x = x
        self.y = y  
        self.clf = LogisticRegression()    
    
    def train(self):
        self.clf.fit(self.x, self.y)   
        w = self.clf.coef_
        b = self.clf.intercept_ 
        return w, b

    def predict(self, x):
        return self.clf.predict_proba(x)
        

if __name__ == '__main__':
    x = np.array([[0], [1], [2], [3]])
    y = np.array([[0], [0], [1], [1]])
    lr = MyLogisticRegression(x, y)

    w, b = lr.train()
    print('最终训练得到的w和b为:', w, ',', b)   
    print('预测结果为:', lr.predict([[2.9]]))

C++实现:

#include 
#include 
#include 
#include 

//k邻近模型
class KNN
{
public:
	KNN(std::vector> x, std::vector y, int k, float p) : m_x(x), m_y(y), m_k(k), m_p(p) {};

	int predict(std::vector> x)
	{
		x.resize(m_x.size());
		for (size_t i = 0; i < x.size(); i++)
		{
			x[i] = x[0];
		}

		//计算预测数据和训练数据的差值
		std::vector> diff = x;
		for (size_t i = 0; i < diff.size(); i++)
		{
			for (size_t j = 0; j < diff[0].size(); j++)
			{
				diff[i][j] -= m_x[i][j];
			}
		}

		//计算范数
		std::vector dist(diff.size(), 0);
		for (size_t i = 0; i < diff.size(); i++)
		{
			for (size_t j = 0; j < diff[0].size(); j++)
			{
				dist[i] += pow(diff[i][j], m_p);
			}
			dist[i] = pow(dist[i], 1.0 / m_p);
		}

		//返回从小到大排序的索引
		std::vector  dist_sorted(dist.size());
		for (size_t i = 0; i != dist_sorted.size(); ++i) dist_sorted[i] = i;
		std::sort(dist_sorted.begin(), dist_sorted.end(), [&dist](size_t i, size_t j) {return dist[i] <  dist[j]; });

		//分类投票
		std::map count;
		for (size_t i = 0; i < m_k; i++)
		{
			float vote = m_y[dist_sorted[i]];
			count[vote] += 1;
		}

		//返回投票最多的类别标签
		return count.rbegin()->first;
	}
	
private:
	std::vector> m_x;
	std::vector m_y;
	int m_k;
	float m_p;
};


int main(int argc, char* argv[])
{
	std::vector> x = { { 0, 10 },{ 1, 8 },{ 10, 1 },{ 7, 4 } };
	std::vector y = { 0, 0, 1, 1 };

	KNN knn = KNN(x, y, 3, 2);
	std::cout << "预测值为:" << knn.predict({ {6,2} }) << std::endl;

	system("pause");
	return EXIT_SUCCESS;
}
转载请注明:文章转载自 www.mshxw.com
本文地址:https://www.mshxw.com/it/846448.html
我们一直用心在做
关于我们 文章归档 网站地图 联系我们

版权所有 (c)2021-2022 MSHXW.COM

ICP备案号:晋ICP备2021003244-6号