get first paper and add meta-review
This commit is contained in:
parent
887bcd13b7
commit
cc34bfe953
20 changed files with 8175 additions and 0 deletions
74
trust/EMSOFT24/mmlab_checker.py
Normal file
74
trust/EMSOFT24/mmlab_checker.py
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
import numpy as np
|
||||
from datetime import datetime, timedelta
|
||||
from glob import glob
|
||||
|
||||
# PARAMETERS
|
||||
#trace_time_length = timedelta(hours=4)
|
||||
#sampling_rate = 20 # Hz
|
||||
#chunk_time_length = timedelta(minutes=10) # chunk of time to get one check value
|
||||
|
||||
#print(f"Considering {trace_time_length/chunk_time_length} chunks of {chunk_time_length} over a {trace_time_length} trace.")
|
||||
|
||||
data_folder = "./data"
|
||||
data_selector = "/ARTHUR-103/*.npy"
|
||||
|
||||
# load data
|
||||
data_filenames = sorted(glob(data_folder+data_selector))
|
||||
print(f"Listed {len(data_filenames)} traces.")
|
||||
|
||||
# define the policy checking function
|
||||
def checker_long_high(labels):
|
||||
"""Check a policy on a trace (array of state label).
|
||||
Produce a single ternary value 1=OK, 0=Unsure, -1=Not OK
|
||||
Policy: No continuous High load (label=2) for more than 3m
|
||||
"""
|
||||
|
||||
req_L = int(timedelta(minutes=3).total_seconds()*sampling_rate)
|
||||
#inneficient non-numpy shit, tempormanent solution
|
||||
for i in range(labels.shape[0]):
|
||||
if labels[i] == 2 or labels[i] == -1 :
|
||||
k=1
|
||||
while i+k < labels.shape[0] and (labels[i+k] == 2 or labels[i+k] == -1):
|
||||
k+=1
|
||||
if k == req_L:
|
||||
if -1 in labels[i:i+k]:
|
||||
return 0
|
||||
else:
|
||||
return -1
|
||||
return 1
|
||||
|
||||
def checker_off(trace):
|
||||
chunk_sample_length = 30
|
||||
thresh = 1000
|
||||
N = trace.shape[0]
|
||||
nbr_windows = int(N/chunk_sample_length)
|
||||
windows = trace[:chunk_sample_length*nbr_windows].reshape(nbr_windows,chunk_sample_length)
|
||||
res = np.zeros(windows.shape[0],dtype=int)
|
||||
for i in range(windows.shape[0]):
|
||||
if np.any(windows[i]<thresh):
|
||||
res[i] = -1
|
||||
elif np.all(windows[i]>=thresh):
|
||||
res[i] = 1
|
||||
else:
|
||||
pass
|
||||
#value is already 0 by default
|
||||
return res
|
||||
|
||||
total_counts = {"-1":0,"0":0,"1":0}
|
||||
all_res = []
|
||||
for f in data_filenames:
|
||||
trace = np.load(f)
|
||||
res = checker_off(trace)
|
||||
print(res.shape)
|
||||
unique, counts = np.unique(res,return_counts=True)
|
||||
counts = dict(zip(unique,counts))
|
||||
for key in counts.keys():
|
||||
total_counts[str(key)]+=counts[key]
|
||||
|
||||
all_res.append(res.reshape(1,-1))
|
||||
|
||||
print(f"{f.split('/')[-1]}: {res.shape[0]} evaluations, {total_counts}")
|
||||
|
||||
final_res = np.concatenate(all_res,axis=0)
|
||||
print(final_res.shape)
|
||||
np.save(f"generated/{data_filenames[0].split('/')[-1].split('--')[0]}.npy",final_res)
|
||||
Loading…
Add table
Add a link
Reference in a new issue