Python 官方文档:入门教程 => 点击学习
目录python连接clickhouse数据库主要针对clickhouse_driver的使用进行简要介绍Python将数据写入clickhousepython连接clickhous
在Python中获取系统信息的一个好办法是使用psutil这个第三方模块。
顾名思义,psutil = process and system utilities,它不仅可以通过一两行代码实现系统监控,还可以跨平台使用。
第一步:
第二步:
from clickhouse_driver import Client
from datetime import datetime
import psutil
host_name = '192.168.50.94'
client = Client(host=host_name,database='default',user='default',passWord='自己设的密码',send_receive_timeout=20,port=55666)
now = datetime.now()
time_stamp = now.strftime('%a %b %d %H:%M:%S CST %Y')# Tue Apr 06 15:32:55 CST 2021 <class 'str'>
create_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
disk_io = psutil.disk_io_counters()
net_io = psutil.net_io_counters()
chart_name = ["磁盘IO","网络IO"]
metric_name1 = ["读(数量)","写(数量)", "读(字节)", "写(字节)", "读(时间)", "写(时间)"]
metric_name2 = ["发送字节数","接收字节数","发送包数","接收包"]
metric_value1 = [disk_io.read_count,disk_io.write_count,disk_io.read_bytes,disk_io.write_bytes,disk_io.read_time,disk_io.write_time]
metric_value2 = [net_io.bytes_sent,net_io.bytes_recv,net_io.packets_sent,net_io.packets_recv]
try:
for i in chart_name:
if i is "磁盘IO":
for j in metric_name1:
sql = "insert into clickhouse_host_metrics777(time_stamp,host_name, chart_name, metric_name,metric_value,create_at) " \
"values('%s','%s','%s','%s','%s','%s')" % \
(time_stamp, host_name, i, j, metric_value1[metric_name1.index(j)], create_at)
res = client.execute(sql)
elif i is "网络IO":
for j in metric_name2:
sql = "insert into clickhouse_host_metrics777(time_stamp,host_name, chart_name, metric_name,metric_value,create_at) " \
"values('%s','%s','%s','%s','%s','%s')" % \
(time_stamp, host_name, i, j, metric_value2[metric_name2.index(j)], create_at)
res = client.execute(sql)
print("成功写入数据")
except Exception as e:
print(str(e))
from datetime import datetime
import psutil
from clickhouse_driver import connect
host_name = '192.168.50.94'
#账号:密码@主机名:端口号/数据库
conn = connect('clickhouse://default:自己设的密码@'+host_name+':55666/default')
cursor = conn.cursor()
now = datetime.now()
time_stamp = now.strftime('%a %b %d %H:%M:%S CST %Y')# Tue Apr 06 15:32:55 CST 2021 <class 'str'>
create_at = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
disk_io = psutil.disk_io_counters()
net_io = psutil.net_io_counters()
chart_name = ["磁盘IO","网络IO"]
metric_name1 = ["读(数量)","写(数量)", "读(字节)", "写(字节)", "读(时间)", "写(时间)"]
metric_name2 = ["发送字节数","接收字节数","发送包数","接收包"]
metric_value1 = [disk_io.read_count,disk_io.write_count,disk_io.read_bytes,disk_io.write_bytes,disk_io.read_time,disk_io.write_time]
metric_value2 = [net_io.bytes_sent,net_io.bytes_recv,net_io.packets_sent,net_io.packets_recv]
try:
for i in chart_name:
if i is "磁盘IO":
for j in metric_name1:
sql = "insert into clickhouse_host_metrics777(time_stamp,host_name, chart_name, metric_name,metric_value,create_at) values('%s','%s','%s','%s','%s','%s')" % \
(time_stamp, host_name, i, j, metric_value1[metric_name1.index(j)], create_at)
# res = client.execute(sql)
res = cursor.execute(sql)
elif i is "网络IO":
for j in metric_name2:
sql = "insert into clickhouse_host_metrics777(time_stamp,host_name, chart_name, metric_name,metric_value,create_at) values('%s','%s','%s','%s','%s','%s')" % \
(time_stamp, host_name, i, j, metric_value2[metric_name2.index(j)], create_at)
res = cursor.execute(sql)
cursor.close()
print("成功写入数据")
except Exception as e:
print(str(e))
from clickhouse_driver import Client
# connect ClickHouse
client = Client(host= ,port= ,user= ,database= , password=)
# 得到table1中查询的数据导入table2中(database2中应该事先建立对应的table2表)
query_ck_sql = """ SELECT *
FROM database1.table1
WHERE date = today() """
# 导入数据到临时表
try:
# 导入数据
client.execute("insert into {official_table_db}.{official_all_table_name} \
{query_ck_sql}".fORMat(
official_table_db = database2,
official_table_name = table2,
query_ck_sql = query_ck_sql)
,types_check = True)
except Exception as e:
print str(e)
以上为个人经验,希望能给大家一个参考,也希望大家多多支持编程网。
--结束END--
本文标题: python连接clickhouse数据库的两种方式小结
本文链接: https://lsjlt.com/news/118009.html(转载时请注明来源链接)
有问题或投稿请发送至: 邮箱/279061341@qq.com QQ/279061341
2024-03-01
2024-03-01
2024-03-01
2024-02-29
2024-02-29
2024-02-29
2024-02-29
2024-02-29
2024-02-29
2024-02-29
回答
回答
回答
回答
回答
回答
回答
回答
回答
回答
0