YACWC
This commit is contained in:
356
db_conn.py
Executable file
356
db_conn.py
Executable file
@@ -0,0 +1,356 @@
|
||||
|
||||
|
||||
#tables_meta = ['motionlogs', 'floor2temperature', 'horizontalwheel', 'verticalwheel','heartbeats', 'activities','food_weight','weight','daily_counts','hourly_temperature','food_dispenser', 'food_forager', 'touch_surface_grid']
|
||||
import inspect
|
||||
import clickhouse_driver as ch
|
||||
from functools import partial
|
||||
import json
|
||||
|
||||
|
||||
|
||||
# %%
|
||||
|
||||
c_to_ch_dtype_map = {'uint8': 'UInt8', 'uint16':'UInt16', 'float32': 'Float32','datetime':'DateTime', 'point':'Point','str':'String', 'float':'Float32','int':'Int16'}
|
||||
class db_api():
|
||||
def __init__(self):
|
||||
host = '192.168.1.242'
|
||||
host = update_host(do_loopback, host)
|
||||
self.conn = ch.connect('clickhouse://'+host);
|
||||
self.client = ch.Client(host=host)
|
||||
self.cursor = self.conn.cursor()
|
||||
kff = ['altitude','humidity','pressure','temperature','weight','hydroponics_ph','hydroponics_ec','hydroponics_rtd']
|
||||
|
||||
for kv in kff:
|
||||
func_name = 'insert_'+kv
|
||||
func_set = partial(self.generic_when_where, first_key = kv)
|
||||
setattr(self, func_name, func_set)
|
||||
|
||||
|
||||
def get_query_time_filtered(self, table, time_start, time_end):
|
||||
cquery = f"select * from {table} where when > '{str(time_start)}' and when < '{str(time_end)}' order by when asc"
|
||||
return cquery
|
||||
|
||||
|
||||
def expand_to_list(self, vals): #dict of lists -> list of dicts
|
||||
max_len = 1
|
||||
for k,v in vals.items():
|
||||
if isinstance(v,list):
|
||||
max_len = max(max_len, len(v))
|
||||
|
||||
output = list()
|
||||
for i in range(max_len):
|
||||
output.append(dict())
|
||||
|
||||
for k,v in vals.items():
|
||||
if isinstance(v, list):
|
||||
for idx, val in enumerate(v):
|
||||
output[idx][k] = val
|
||||
else:
|
||||
for f in output:
|
||||
f[k] = v
|
||||
|
||||
return output
|
||||
|
||||
def get_insert_statement(self,table_name, keys):
|
||||
kvars = ', '.join(keys)
|
||||
return f'INSERT INTO {table_name} ({kvars}) values '
|
||||
|
||||
def insert(self, dd, table, schema = None):
|
||||
if schema is not None:
|
||||
table = schema + '.' + table
|
||||
|
||||
if isinstance(dd, dict):
|
||||
insert_this = self.expand_to_list( dd)
|
||||
else:
|
||||
insert_this = dd
|
||||
|
||||
keys = insert_this[0].keys()
|
||||
insert_statement = self.get_insert_statement(table,keys)
|
||||
|
||||
self.cursor.executemany(insert_statement, insert_this)
|
||||
|
||||
|
||||
def query(self, query, no_results = False, **kwargs):
|
||||
self.cursor.execute(query, **kwargs)
|
||||
if no_results:
|
||||
return None
|
||||
else:
|
||||
return self.cursor.fetchall()
|
||||
def generic_when_where(self, v_in , when = None, where = None, first_key = None):
|
||||
keys = [first_key,'where','when']
|
||||
table = 'nuggets.'+first_key
|
||||
cb = locals()
|
||||
cb[first_key] = v_in
|
||||
dd = {x:cb[x] for x in keys}
|
||||
|
||||
self.insert(dd, table)
|
||||
|
||||
|
||||
def get_table_create_statement(self, schema, table_name, dtypes, insert_when = False, nullable = set()):
|
||||
|
||||
entries = list()
|
||||
if insert_when:
|
||||
entries.append('`when` DateTime64(3)')
|
||||
|
||||
for field_name, data_type in dtypes.items():
|
||||
ch_dtype = c_to_ch_dtype_map[data_type]
|
||||
if field_name in nullable:
|
||||
ch_dtype = 'Nullable('+ch_dtype+')'
|
||||
entries.append( f' `{field_name}` {ch_dtype}')
|
||||
|
||||
|
||||
dtype_text = ',\n'.join(entries)
|
||||
|
||||
create_header = f'create table {schema}.{table_name} '
|
||||
|
||||
|
||||
create_footer = f' \nENGINE = ReplacingMergeTree\nORDER BY when\nSETTINGS index_granularity = 8192'
|
||||
create_statement = create_header + '\n(\n' + dtype_text + '\n)\n' + create_footer
|
||||
|
||||
return create_statement
|
||||
|
||||
def check_if_table_exists(self, schema, table_name):
|
||||
query = f'''SELECT count(*) as num_tables
|
||||
FROM information_schema.tables where table_schema == '{schema}' and table_name == '{table_name}' '''
|
||||
|
||||
return self.query(query)[0][0]== 1
|
||||
|
||||
|
||||
def get_table_contents(self, table, make_map = False, make_inv_map = False):
|
||||
query = f'select * from {table}'
|
||||
self.cursor.execute(query);
|
||||
results = self.cursor.fetchall()
|
||||
|
||||
if make_map:
|
||||
cm = dict()
|
||||
|
||||
for f in results:
|
||||
cm[f[0]] = f[1]
|
||||
return cm
|
||||
|
||||
if make_inv_map:
|
||||
cm = dict()
|
||||
for f in results:
|
||||
cm[f[1]] = f[0]
|
||||
return cm
|
||||
|
||||
|
||||
return results
|
||||
|
||||
|
||||
|
||||
# def insert_temperature(self, temperature, when = None, where = None):
|
||||
# keys = ['temperature','who','when']
|
||||
# table = 'nuggets.temperature'
|
||||
# cb = locals()
|
||||
# dd = {x:cb[x] for x in keys}
|
||||
# self.insert(dd, table, keys)
|
||||
|
||||
|
||||
# def insert_pressure(self, pressure, when = None, where = None):
|
||||
# keys = ['temperature','who','when']
|
||||
# table = 'nuggets.pressure'
|
||||
# cb = locals()
|
||||
# dd = {x:cb[x] for x in keys}
|
||||
# self.insert(dd, table, keys)
|
||||
|
||||
# def insert_weight(self, weight, when=None, where=None):
|
||||
# keys = ['weight','who','when']
|
||||
# table = 'nuggets.weight'
|
||||
# cb = locals()
|
||||
# dd = {x:cb[x] for x in keys}
|
||||
# self.insert(dd, table, keys)
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.engine import create_engine
|
||||
from sqlalchemy.schema import MetaData
|
||||
from sqlalchemy.orm import Session
|
||||
import os
|
||||
import json
|
||||
import socket
|
||||
if socket.gethostname() == 'tree':
|
||||
do_loopback = True
|
||||
else:
|
||||
do_loopback = False
|
||||
|
||||
def update_host(do_loopback, ip_addr):
|
||||
if do_loopback and ip_addr=='192.168.1.242':
|
||||
# print('Updating host to be 127.0.0.1')
|
||||
return '127.0.0.1'
|
||||
else:
|
||||
return ip_addr
|
||||
|
||||
def connect( user='postgres', password='', db='nuggets', host='192.168.1.242', port=5432, echo = False):
|
||||
|
||||
host = update_host(do_loopback, host)
|
||||
|
||||
|
||||
from sqlalchemy.engine import create_engine
|
||||
from sqlalchemy.schema import MetaData
|
||||
from sqlalchemy.orm import Session
|
||||
import os
|
||||
import json
|
||||
|
||||
if False:
|
||||
|
||||
user='postgres'
|
||||
password=''
|
||||
db='nuggets'
|
||||
host='192.168.1.242'
|
||||
port=5432
|
||||
echo = False
|
||||
|
||||
url = 'postgresql://{}:{}@{}:{}/{}'
|
||||
url = url.format(user, password, host, port, db)
|
||||
|
||||
conn = create_engine(url, connect_args={"application_name":"python_commoncode"}, echo = echo, future=True)
|
||||
if db == 'nuggets':
|
||||
schemas =['public','health','videos']
|
||||
elif db == 'winslow':
|
||||
schemas=['body','notes','sensors','video']
|
||||
|
||||
|
||||
|
||||
metadata = MetaData()
|
||||
for schema in schemas:
|
||||
metadata.reflect(conn, schema,views=True)
|
||||
|
||||
|
||||
session = Session(conn)
|
||||
|
||||
tables = {k.split('.')[-1]:v for k,v in metadata.tables.items()}
|
||||
|
||||
from sqlalchemy import text
|
||||
|
||||
def execute_sub(query_in, has_data = None):
|
||||
|
||||
if has_data is not None:
|
||||
with conn.connect() as cox:
|
||||
result = cox.execute(query_in, has_data)
|
||||
try:
|
||||
cox.commit()
|
||||
except:
|
||||
pass
|
||||
|
||||
if hasattr(query_in, 'compile'):
|
||||
|
||||
|
||||
compiled = query_in.compile(dialect=postgresql.dialect())
|
||||
|
||||
with conn.connect() as cox:
|
||||
result = cox.execute(query_in, compiled.params)
|
||||
try:
|
||||
cox.commit()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
|
||||
else:
|
||||
with conn.connect() as cox:
|
||||
result = cox.execute(text(query_in))
|
||||
try:
|
||||
cox.commit()
|
||||
except:
|
||||
pass
|
||||
|
||||
return result
|
||||
|
||||
setattr(conn, 'execute',execute_sub)
|
||||
|
||||
|
||||
# with conn.connect() as ce:
|
||||
if True:
|
||||
ce = conn
|
||||
pid = ce.execute('select pg_backend_pid()').fetchall()[0][0]
|
||||
if not os.path.exists('/dev/shm/pg/'):
|
||||
try:
|
||||
os.mkdir('/dev/shm/pg')
|
||||
except:
|
||||
pass
|
||||
# os.chmod('/dev/shm/pg',0o777)
|
||||
|
||||
# fname = '/dev/shm/pg/'+str(pid)
|
||||
|
||||
details = list()
|
||||
for x in inspect.stack():
|
||||
details.append({'filename':x.filename,'function':x.function,'lineno':x.lineno})
|
||||
|
||||
# with open(fname,'w') as ff:
|
||||
# json.dump(details, ff, indent=4, sort_keys = False)
|
||||
# os.chmod(fname,0o777)
|
||||
# %%
|
||||
|
||||
return {'s':session, 't':tables, 'c':conn, 'm':metadata}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def old_connect( user='postgres', password='', db='nuggets', host='192.168.1.242', port=5432):
|
||||
|
||||
if False:
|
||||
user = 'postgres'
|
||||
password=''
|
||||
db='winslow'
|
||||
host='192.168.1.242'
|
||||
port=5432
|
||||
import time
|
||||
|
||||
|
||||
|
||||
from sqlalchemy import and_, func, Table, MetaData, create_engine, inspect
|
||||
from sqlalchemy.orm import Session, load_only
|
||||
from sqlalchemy.ext.automap import automap_base
|
||||
from sqlalchemy.pool import NullPool
|
||||
|
||||
url = 'postgresql://{}:{}@{}:{}/{}'
|
||||
url = url.format(user, password, host, port, db)
|
||||
|
||||
|
||||
conn = create_engine(url, client_encoding='utf8',poolclass=NullPool, future=True)
|
||||
|
||||
|
||||
def get_tables_in_schema(schema_name):
|
||||
|
||||
|
||||
output = conn.execute("SELECT table_name FROM information_schema.tables WHERE table_schema='"+schema_name+"'").fetchall()
|
||||
return [x[0] for x in output]
|
||||
|
||||
|
||||
schemas = [x.strip() for x in conn.execute('show search_path').fetchall()[0][0].split(',')]
|
||||
|
||||
|
||||
tables_meta = list()
|
||||
for schema in schemas:
|
||||
tables_meta.extend(get_tables_in_schema(schema))
|
||||
|
||||
|
||||
materialized_tables = [x[0] for x in conn.execute('select matviewname from pg_matviews')]
|
||||
tables_meta.extend(materialized_tables)
|
||||
|
||||
meta = MetaData(bind=conn)#, reflect = True)
|
||||
meta.reflect(conn)
|
||||
|
||||
|
||||
base = automap_base()
|
||||
base.prepare(conn, reflect=True)
|
||||
|
||||
session = Session(conn)
|
||||
tables = dict()
|
||||
|
||||
|
||||
|
||||
for table in tables_meta:
|
||||
try:
|
||||
tables[table] = Table(table, meta, #MetaData(),
|
||||
autoload=False, autoload_with=conn)
|
||||
except:
|
||||
print(table, 'broke')
|
||||
pass
|
||||
|
||||
|
||||
|
||||
return {'s':session, 't':tables, 'c':conn, 'm':meta, 'b':base}
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user