python 解决从数据库乱码和保存为csv乱码问题
代码如下:#!/usr/bin/env python#-*- coding:utf-8 -*-# @Time: 2018/6/2616:19#Auther:yao#从数据库获取各个业务系统的每日访问量统计数据,区分工作日统计数据和休息日统计数据。使用分位数5,95 获取分界线,并将分界线保存为csv文件,供以后使用import pymysqlimport datetime ...
·
代码如下:
#!/usr/bin/env python #-*- coding:utf-8 -*- # @Time : 2018/6/26 16:19 #Auther:yao #从数据库获取各个业务系统的每日访问量统计数据,区分工作日统计数据和休息日统计数据。使用分位数5,95 获取分界线,并将分界线保存为csv文件,供以后使用 import pymysql import datetime as dt import pandas as pd import numpy as np str="""安**** 智*** 沃*** 电*** 统****** 自***** 资** 资*** 集****""" serviceNames= str.split("\n") # for i in serviceNames: # print i def getPercentile(): curdate = dt.datetime.now().strftime('%Y-%m-%d %H:%M') # 连接数据库 db = pymysql.connect(host='10.126.128.110', user='', passwd='', db='cmscp', port=9100, charset='utf8') cs = db.cursor() #各个业务系统的上界和下届汇总 thisFinal = [] for serviceName in serviceNames: sql = "SELECT createDate ,icount FROM cmscp.t_web_logcount WHERE serviceName= '%s' AND icount!= 0 ORDER BY createDate " %(serviceName) try: workday=[] freeday=[] cs.execute(sql) results=cs.fetchall() for i in results: ic = i[0].weekday() if ic in range(5): workday.append(i[1]) else: freeday.append(i[1]) except: print("Error") workdaynp = np.array(workday) freedaynp = np.array(freeday) #获取工作日,和休息日的上界和下界 workper5 =np.percentile(workdaynp,5) workper95 = np.percentile(workdaynp,95) freedayper5 = np.percentile(freedaynp,5) freedayper95 = np.percentile(freedaynp,95) w = workdaynp[(workdaynp>workper95)|(workdaynp<workper5)] f = freedaynp[(freedaynp>freedayper95)|(freedaynp< freedayper5)] #该业务系统的上界和下届 thisSys=[serviceName,workper5,workper95,freedayper5,freedayper95] thisFinal.append(thisSys) print thisSys print workday print w print freeday print f print ("--------------") name =["serviceName","workper5","workper95","freeper5","freeper95"] thisFinaldf = pd.DataFrame(columns=name,data=thisFinal) #从数据库中获取的数据一个个输出就不会出现乱码问题 for i in thisFinal: print i[0] # 下面一行的编码解决了保存为CSV中文乱码问题 thisFinaldf.to_csv("C:/Users/Administrator/Desktop/thisFinal.csv",encoding="GBK") getPercentile()
更多推荐
已为社区贡献2条内容
所有评论(0)