# -*- coding: UTF-8 -*-
import threading
import redis
from com.functions import *
from com.dbhelper import *
from com.dbconfig import *
import multiprocessing
import threading
class MyThread(threading.Thread):
def __init__(self,func,args,name=''):
threading.Thread.__init__(self)
self.name=name
self.func=func
self.args=args
def run(self):
apply(self.func,self.args)
def get_avg_value(keylist):
avg_value=[]
for i in range(len(keylist)):
tmp_list = []
for j in range(len(keylist[i])):
tmp_list.append(keylist[i][j][1])
avg_value.append(sum(tmp_list)/len(tmp_list))
# print 'len of tmplist',len(tmp_list)
del tmp_list
return avg_value
def get_data_from_redis(para_conn_pool_redis,para_conn_mysql,para_keylist):
# keylist=[]
tmp_result_avg=[]
re = redis.Redis(connection_pool=para_conn_pool_redis)
redis_pipe=re.pipeline()
insert_mysql="INSERT INTO MSSql_Data_TableSpace_total(machine_name, host_name, db_name, table_name, schema_name, row_count, reserved_kb, data_kb, index_size_kb, unused_kb, collection_time) VALUES(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)"
count=0
keys=[]
for item in para_keylist:
redis_pipe.zrange(item,0,-1,withscores=True)
if count>500:
count=0
tmp_dataset=redis_pipe.execute()
for keyvalue in tmp_dataset:
keys.append(keyvalue[0][0])
tmp_result_avg+=(get_avg_value(tmp_dataset))
count+=1
tmp_dataset=redis_pipe.execute()
for keyvalue in tmp_dataset:
keys.append(keyvalue[0][0])
tmp_result_avg+=(get_avg_value(tmp_dataset))
result=[]
# for i in range(0,len(tmp_result_avg),5):
# tmp=para_keylist[i].split(':')
# tmp=tmp[:-1]
# tmp.append(tmp_result_avg[i])
# tmp.append(tmp_result_avg[i+1])
# tmp.append(tmp_result_avg[i + 2])
# tmp.append(tmp_result_avg[i + 3])
# tmp.append(tmp_result_avg[i + 4])
# tmp.append(keys[i])
# result.append(tuple(tmp))
# del tmp
# para_conn_mysql.executemany(insert_mysql,result)
#queue.put(tmp_result_avg)
#def func_get_avg_value(redis_keys,thread_num):
def main():
pool = redis.ConnectionPool(host='', port=, password='', db=)
# re=redis.Redis(connection_pool=pool)
# redis_pipe=re.pipeline()
re = redis.Redis(connection_pool=pool)
mysqlconn = DBHelper(connectshort("xxxxx"))
keyslist=re.keys()
# keyslist=keyslist[0:500000]
keyslist.sort()
keyslist=keyslist[0:50000]
#thread_num=4
threads=[]
queues=[]
results=[]
final_results=[]
thread_num=4
# sublist_count=len(keyslist)/thread_num
# for i in range(thread_num):
# if i ==(thread_num-1):
# t = MyThread(get_data_from_redis,(pool,mysqlconn,keyslist[i*sublist_count:]),get_data_from_redis.__name__)
# else:
# t = MyThread(get_data_from_redis,(pool, mysqlconn, keyslist[i * sublist_count:(i+1)*sublist_count]), get_data_from_redis.__name__)
# t.start()
# threads.append(t)
# for process in threads:
# process.join()
get_data_from_redis(pool,mysqlconn,keyslist)
if __name__ == '__main__':
main()
没有合适的资源?快使用搜索试试~ 我知道了~
python 单线程多线程和多进程的比较
共3个文件
py:3个
需积分: 44 18 下载量 145 浏览量
2016-09-26
10:31:19
上传
评论 1
收藏 1KB 7Z 举报
温馨提示
比较python 单线程,多线程和多进程的处理速度情况 测试用例为从redis中读出5W条keys,每个key含有48条记录,然后对这5W个keys分别求平均
资源推荐
资源详情
资源评论
收起资源包目录
python-threading-processing.7z (3个子文件)
pyconnectionpool_multiprocessing.py 3KB
pyconnectionpool_multithreading.py 3KB
pyconnectionpool_singlethreading.py 3KB
共 3 条
- 1
资源评论
chaojixiaogou
- 粉丝: 0
- 资源: 5
上传资源 快速赚钱
- 我的内容管理 展开
- 我的资源 快来上传第一个资源
- 我的收益 登录查看自己的收益
- 我的积分 登录查看自己的积分
- 我的C币 登录后查看C币余额
- 我的收藏
- 我的下载
- 下载帮助
安全验证
文档复制为VIP权益,开通VIP直接复制
信息提交成功