Newer
Older
import numpy as np
import pandas as pd
import bokeh as bokeh
from bokeh.models.widgets import DataTable, TableColumn
from bokeh.models import ColumnDataSource
from bokeh.layouts import column
from bokeh.plotting import curdoc, figure
from bokeh.io import output_file, show
from bokeh.models import LinearAxis, Range1d
from bokeh.models.widgets import FileInput
import json
from hashlib import md5
from cloudant.client import CouchDB
from requests.adapters import HTTPAdapter
from urllib.parse import urlparse
import os
from datetime import datetime, timezone
from config import cfg
from pccDccTools import dsiVector, dsiMultiVector, DsiASCICOnversion, dsiJSONEncoder, dsiDCC
#TODO review reftype mapper since api change in dsiVector done due to tableTool development
def __init__(self, refTypsMappingJSON):
f = open(refTypsMappingJSON, "r",encoding='utf-8')
self.RefTypeMapping = json.loads(f.read())
self.refTypesForNames[lang][self.RefTypeMapping[refType]['names'][lang]] = refType
self.refTypesForNames[lang] = {self.RefTypeMapping[refType]['names'][lang]: refType}
def getNamesForReftype(self, reftype):
def getNamesForReftype(self, reftype, language=None):
if language is not None:
return self.RefTypeMapping[reftype]['names'][language]
else:
return self.RefTypeMapping[reftype]['names']
def getReftypeFromName(self, name, lang=None, returnLang=False):
if returnLang:
return self.refTypesForNames[lang][name], lang
else:
return self.refTypesForNames[lang][name]
raise KeyError(str(name) + " was not found in anny language")
def __getitem__(self, item):
return self.RefTypeMapping[item]
def getReftypeDict(self, item):
return {item: self[item]}
typeMapper = refTypeMapper('vibRefTypesAndNames.json')
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.ndarray):
return obj.tolist()
return json.JSONEncoder.default(self, obj)
class CDBAcces:
def __init__(self):
self.session_id = ""
self.db = False # change in self.connectDB
self.cb_disabled = False # enable callbacks by default (only fitting-CB)
# the main data structure (dict) that hold the data base entry
# self.data = amp_data() # the currently used data (from file or selected in table)
self.QueryData = [] # Array of data sets received from Query
def connect_DB(self):
if 'REMOVE_PROXY' in cfg:
for k in os.environ:
if "proxy" in k.lower():
os.environ.pop(k)
if (
): # if we have a database username then we have to include it into the DB_URL
CDB_SERVER_URL = cfg["DB_URL"]
CDB_SERVER_URL = cfg["DB_URL"]
# connect to the data base
try:
couch = CouchDB(
cfg['DB_UID'],
cfg['DB_UPW'],
url=CDB_SERVER_URL,
connect=True,
# adapter=myadapter(),
adapter=HTTPAdapter(),
encoder=dsiJSONEncoder # for JSON-serializing np.array
)
except:
raise Exception("No CouchDB-Server running at %s" % CDB_SERVER_URL)
print("success in setting coonection to CouchDB with %s" % CDB_SERVER_URL)
"""
# check if DB is existing, if not, create it
if not cfg['DB_NAME'] in couch.all_dbs():
self.db = couch.create_database(self.config['DB_NAME'])
if not self.db.exists():
raise Exception(
"Failure in creating db %s on server %s"
% (cfg['DB_NAME'], CDB_SERVER_URL)
)
else:
self.db = couch[cfg['DB_NAME']]
if not self.db.exists():
raise Exception(
"Failure in opening existing db %s on server %s"
% (self.config.cdb_dbname, self.config.cdb_server_url)
)
"""
try:
self.db = couch[cfg['DB_NAME']]
except KeyError as ke:
print(ke)
print("Database " + str(cfg['DB_NAME']) + " does not exisit please create it ")
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
# submit the data{} structure to CouchDB data base
# identification is provided by the MD5-hash of the summary data file (base64 encoded string)
def submit(self, forced=False):
assert (
len(self.data["SummaryFile"]) > 50
), "No valid Summary File" # make sure we don't have fake
# prepare the hashing
hash = md5()
hash.update(
self.data["SummaryFile"].encode("utf-8")
) # set the data for hashing
ID = hash.hexdigest() # get the hexlyfied hash string
record = {"_id": ID} # initialise the record-dict with the MD5-hash-id
if ID in self.db:
doc = self.db[ID] # check whether it already exists; non-existance returns False
print("document with ID=%s exists" % ID)
self.data["_rev"] = doc["_rev"]
self.data["_id"] = ID
# print("data already in data base") # do not submit but complain
self.DivWarnExists.visible = True
self.ForceSubmitBtn.visible = True
if forced: # forced submit button was pressed
# record.update(self.data) # append the data to the (still) blank record
for k, v in self.data.items():
doc[k] = v
doc.save()
# self.db.put(record) # overwrite existing DB-record
self.UUIDInp.value = ID
self.DivWarnExists.visible = False
self.ForceSubmitBtn.visible = False
else: # record does not yet exist
print("document with ID=%s does not yet exist" % ID)
record.update(self.data) # append the data to the (still) blank record dict
record['_id'] = ID # _id was overwritten with "" by the previous record.update(...)
doc = self.db.create_document(record)
doc_exists = ID in self.db
if doc_exists:
print('wrote %s to CouchDB' % ID)
self.UUIDInp.value = ID
else:
print("Failed to write to database")
print(self.db.server)
print(self.db)
pass
# check whether a field-string is a valid date (for csv-data in ParseSummary() )
def is_valid_date(self, field):
try:
dmy = datetime.strptime(field, "%d.%m.%Y")
self.data["Date"] = datetime.strftime(dmy, "%Y-%m-%d")
return True
except:
return False
# combine the URL-parts for the data base access
def combined_URL(self, url, name, pw):
parts = urlparse(url)
combined = (
parts.scheme
+ "://"
+ name
+ ":"
+ pw
+ "@"
+ parts.netloc
+ "/"
+ parts.path
)
return combined
# Query the data base with the info from the form
def queryDB(self):
if not self.cb_disabled:
# ---- set-up the query-json-string
criteria = []
for k, v in self.data.items():
if k in (
"FrequencyResponse",
"SummaryFile",
"DetailsFile",
"_rev",
"_id",
"SetupSetting",
"DutSetting",
):
pass
elif isinstance(v, str) and (v != ""): # for string-type fields
cond = {"$regex": v}
criteria.append({k: cond})
elif isinstance(v, (float, int)) and v != 0: # for numerical fields
cond = {"$gt": 0.9 * v, "$lt": 1.1 * v}
criteria.append({k: cond})
selector = {"$and": criteria}
docs = self.db.get_query_result(selector) # apply query
if docs: # if data received, build the result table's ColumnDataSource
self.QueryData = []
names = []
dates = []
S_nom = []
Fmin = []
Fmax = []
for d in docs: # this is for the query-result-table in the GUI
names.append(d["SummaryFileName"])
dates.append(d["Date"])
if d['DataType'] == 'Charge-Amp':
S_nom.append(1.0e-9 * d["SNom"])
else:
S_nom.append(d["SNom"])
Fmin.append(d["Fmin"])
Fmax.append(d["Fmax"])
self.QueryData.append(d)
self.TableSource.data = dict(
name=names, date=dates, S_nom=S_nom, f_low=Fmin, f_high=Fmax
)
def createUser(self, name, type="user", roles=[], password='defaultPassowrd'):
userDocument = {
'_id': 'org.couchdb.user:' + name,
'name': name,
'type': type,
'roles': roles,
'password': password
}
self.db.create_document(userDocument)
class SensorCalibrationData:
def __init__(self, xlsFileName, deviceType: str, serialNo: str, owner: str, operator: str, type: str, date=None):
self.xlsFileName = xlsFileName
#wb_obj = openpyxl.load_workbook(xlsFileName,encodings='utf-8')
self.df=tmpdf = pd.read_excel(xlsFileName, 'ERGEBNISSE', engine='openpyxl', skiprows=[1])
self.dfUnits=tmpdfUnits = pd.read_excel(xlsFileName, 'ERGEBNISSE', engine='openpyxl', nrows=1)
#self.df =pd.read_csv(StringIO(tmpdf.to_csv(encoding='utf8',sep=';',lineterminator='\n',index=False)),sep=";",lineterminator='\n')# ui ui ui nasty read datafram save as csv convert to bystream endcode utf-8 load as dataframe .... excel sucks
#self.dfUnits = pd.read_csv(StringIO(tmpdfUnits.to_csv(encoding='utf8',sep=';',lineterminator='\n',index=False)),sep=";",lineterminator='\n')# ui ui ui nasty read datafram save as csv convert to bystream endcode utf-8 load as dataframe .... excel sucks
self.dfUnits = self.dfUnits.fillna("None")
self.dfKeydict = dict(
zip(['Freq', 'Amp', 'Sensetivity', 'Sens_uncer', 'Mag_device', 'Phase', 'Phase_uncer', 'Phase_Device'],
self.df.keys()))
self.errorDf = pd.DataFrame()
self.errorDf[self.dfKeydict['Freq']] = self.df[self.dfKeydict['Freq']]
self.errorDf['mag_error'] = self.df[self.dfKeydict['Sens_uncer']] / 100 * self.df[self.dfKeydict['Sensetivity']]
self.errorDf['mag_error_low'] = self.df[self.dfKeydict['Sensetivity']] - self.errorDf['mag_error']
self.errorDf['mag_error_high'] = self.df[self.dfKeydict['Sensetivity']] + self.errorDf['mag_error']
self.errorDf['phase_error_low'] = self.df[self.dfKeydict['Phase']] - self.df[self.dfKeydict['Phase_uncer']]
self.errorDf['phase_error_high'] = self.df[self.dfKeydict['Phase']] + self.df[self.dfKeydict['Phase_uncer']]
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
self.dsiMultivector = self.toDsiMultiVector()
self.dsiDCC = dsiDCC(self.dsiMultivector, deviceType, serialNo, owner, operator, type, date)
def createDict(self, deviceType='Not Set', serialNo="Not Set", owner="Not Set", date=None, operator="NotSet"):
if date == None:
date = datetime.fromtimestamp(os.stat(self.xlsFileName).st_mtime).isoformat(timespec='microseconds')
freqResponseDict1D = {"Frequency": self.df[self.dfKeydict['Freq']].tolist(),
"ExcitationAmp": self.df[self.dfKeydict['Amp']].tolist(),
"Magnitude": self.df[self.dfKeydict['Sensetivity']].tolist(),
"StdevMag": self.errorDf['mag_error'].tolist(),
"RelStdevMagPerCent": self.df[self.dfKeydict['Sens_uncer']].tolist(),
"PhaseInDeg": self.df[self.dfKeydict['Phase']].tolist(),
"StdevPhaseInDeg": self.df[self.dfKeydict['Phase_uncer']].tolist(),
"Mag_Device": self.df[self.dfKeydict['Mag_device']].tolist(),
"Phase_Device": self.df[self.dfKeydict['Phase_Device']].tolist(),
"units": self.dfUnits.to_dict(orient='index')[0]
}
self.dict = {"_id": re.sub(r'\W+', '', str(date)) + re.sub(r'\W+', '', deviceType),
"DataType": "Acceleration sensor Transferfunction 1D",
"DeviceType": deviceType,
"SerialNo": serialNo,
"Owner": owner,
"Date": str(date),
"Operator": operator,
"FrequencyResponse": freqResponseDict1D
}
print(self.dict)
return self.dict
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
def toDsiMultiVector(self):
freqDsiVector = dsiVector(self.df[self.dfKeydict['Freq']].to_numpy(), None, self.dfKeydict['Freq'],
DsiASCICOnversion[self.dfUnits[self.dfKeydict['Freq']][0]], uncerType=None,
refTypeAndNames=typeMapper.getReftypeDict(
typeMapper.getReftypeFromName(self.dfKeydict['Freq'])))
magDsiVector = dsiVector(self.df[self.dfKeydict['Sensetivity']].to_numpy(),
self.df[self.dfKeydict['Sens_uncer']].to_numpy(), self.dfKeydict['Sensetivity'],
DsiASCICOnversion[self.dfUnits[self.dfKeydict['Sensetivity']][0]],
uncerType='relPercent', refTypeAndNames=typeMapper.getReftypeDict(
typeMapper.getReftypeFromName(self.dfKeydict['Sensetivity'])))
phaseDsiVector = dsiVector(self.df[self.dfKeydict['Phase']].to_numpy(),
self.df[self.dfKeydict['Phase_uncer']].to_numpy(), self.dfKeydict['Phase'],
DsiASCICOnversion[self.dfUnits[self.dfKeydict['Phase']][0]], uncerType='relPercent',
refTypeAndNames=typeMapper.getReftypeDict(
typeMapper.getReftypeFromName(self.dfKeydict['Phase'])))
ampDsiVector = dsiVector(self.df[self.dfKeydict['Amp']].to_numpy(), None, self.dfKeydict['Amp'],
DsiASCICOnversion[self.dfUnits[self.dfKeydict['Amp']][0]], uncerType=None,
refTypeAndNames=typeMapper.getReftypeDict(
typeMapper.getReftypeFromName(self.dfKeydict['Amp'])))
return dsiMultiVector(freqDsiVector, [magDsiVector, phaseDsiVector, ampDsiVector],
interpolationTypes={self.dfKeydict['Phase']: ('scipy', 'linear'),
self.dfKeydict['Sensetivity']: ('scipy', 'linear'),
self.dfKeydict['Amp']: ('scipy', 'linear')})
def uploadDataAndAttachemntsToCouchDB(self, cdb):
self.dbEntry = cdb.create_document(self.dsiDCC.toDict())
self.dbEntry.put_attachment('Measurment XSLX',
'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
open(self.xlsFileName, "rb").read())
def uploadDUMYDCCXML(self, dumyXMLFileName, dataBaseEntry=None):
if dataBaseEntry == None:
except NameError:
raise RuntimeError(
"You need to upload data from this class to databes with command uploadDataAndAttachemntsToCouchDB before uploading an dummyXML. OR specify databaseentry to upload xml to with dataBaseEntry keyword ")
self.dbEntry.put_attachment('DCC_Dummy',
'application/xml',
open(dumyXMLFileName, "rb").read())
def uploadAttachmentFile(self, FileName, dataBaseEntry=None, aplicationType='application/pdf',
attachmentName='calibrationCertifikateAnalog'):
if dataBaseEntry == None:
raise RuntimeError(
"You need to upload data from this class to databes with command uploadDataAndAttachemntsToCouchDB before uploading an dummyXML. OR specify databaseentry to upload xml to with dataBaseEntry keyword ")
self.dbEntry.put_attachment(attachmentName,
aplicationType,
open(FileName, "rb").read())
xlsFileName = '1_7calibrationanddccsampledata/sinCal/20220708_8305/20220708_8305_SN1842876_Auswertung.xlsx'
calPDFFileName = '1_7calibrationanddccsampledata/sinCal/20220708_8305/20220708_8305_SN1842876_CalSchein.pdf'
# (xlsFileName, deviceType: str, serialNo: str, owner: str, operator: str, type: str, date=None):
callData = SensorCalibrationData(xlsFileName, 'Brueel & Kjaer 8305-001', "1842876", "Accelerator Meas GmbH",
"D. Nordmann", "Acceleration sensor Transferfunction 1D", None)
calDatadsiMultiVector = callData.toDsiMultiVector()
def upload_fit_data(attr, old, new):
print("fit data upload succeeded")
print(FI.value)
FI = FileInput(accept=".csv,.json,.txt")
FI.on_change('value', upload_fit_data)
# set output to static HTML file
output_file(filename="custom_filename.html", title="Static HTML file")
# This is important! Save curdoc() to make sure all threads
# see the same document.
doc = curdoc()
# Make the plot
tfPlot = bokeh.plotting.figure(
height=500,
width=1000,
x_axis_label=str(callData.dfKeydict['Freq'] + ' in ' + callData.dfUnits[callData.dfKeydict['Freq']][0]),
y_axis_label=str(
callData.dfKeydict['Sensetivity'] + ' in ' + callData.dfUnits[callData.dfKeydict['Sensetivity']][0]),
y_range=(
callData.df[callData.dfKeydict['Sensetivity']].min(), callData.df[callData.dfKeydict['Sensetivity']].max()),
)
tfPlot.circle(
source=callData.df,
x=callData.dfKeydict['Freq'],
y=callData.dfKeydict['Sensetivity'],
)
# Add error bars
tfPlot.segment(
source=callData.errorDf,
x0=callData.dfKeydict['Freq'],
y0='mag_error_low',
x1=callData.dfKeydict['Freq'],
y1='mag_error_high',
line_width=2
)
tfPlot.extra_y_ranges = {"y2": Range1d(
start=callData.df[callData.dfKeydict['Phase']].min() - callData.df[callData.dfKeydict['Phase_uncer']].max(),
end=callData.df[callData.dfKeydict['Phase']].max() + callData.df[callData.dfKeydict['Phase_uncer']].max())}
tfPlot.add_layout(LinearAxis(y_range_name="y2",
axis_label=str(callData.dfKeydict['Phase'] + ' in ' +
callData.dfUnits[callData.dfKeydict['Phase']][0])), 'right', )
tfPlot.circle(source=callData.df, x=callData.dfKeydict['Freq'], y=callData.dfKeydict['Phase'], color="red",
y_range_name="y2")
tfPlot.segment(
source=callData.errorDf,
x0=callData.dfKeydict['Freq'],
y0='phase_error_low',
x1=callData.dfKeydict['Freq'],
y1='phase_error_high',
line_width=2,
color='red',
y_range_name="y2"
)
data_table = DataTable(
columns=[TableColumn(field=Ci, title=Ci) for Ci in callData.df.columns],
source=ColumnDataSource(callData.df),
height=500,
width=1000
)
show(column(FI, tfPlot, data_table))
# calDataDict=callData.createDict(deviceType='Brueel & Kjaer 8305-001',serialNo= "1502245",owner ="Customer Company",operator="D. Nordmann",date= None)
loadedDummydccJson=json.load(open('1_7calibrationanddccsampledata/sinCal/20220708_8305/20220708_8305_SN1842876_DCCDummy.json','r',encoding='utf-8'))
loadedDummydccJson['dcc:digitalCalibrationCertificate']['dcc:measurementResults']['dcc:measurementResult'][
'dcc:results']['dcc:result']['dcc:data']=dccJSOnDict
jsonstr=json.dumps(loadedDummydccJson,indent=2)
with open('1_7calibrationanddccsampledata/sinCal/20220708_8305/20220708_8305_SN1842876_DCCDummy_2.json','w',encoding='utf-8') as file:
file.write(jsonstr)
CDBA.connect_DB()
caldataDBEntry = callData.uploadDataAndAttachemntsToCouchDB(CDBA.db)
dccDict = caldataDBEntry.copy()
callData.uploadDUMYDCCXML(
'1_7calibrationanddccsampledata/sinCal/20220708_8305/20220708_8305_SN1842876_DCCDummy.xml')
# Check that the document exists in the database