Spaces:
Sleeping
Sleeping
Commit
·
908704e
1
Parent(s):
93b494a
update
Browse files- app.py +12 -9
- app_utils.py +7 -7
app.py
CHANGED
@@ -302,13 +302,16 @@ with gr.Blocks() as demo:
|
|
302 |
stage1_id = uuid.uuid4().hex
|
303 |
os.mkdir(rootpath+'/'+stage1_id+'/')
|
304 |
|
|
|
|
|
|
|
305 |
stage1_info = {
|
306 |
"filePath" : rootpath+'/'+stage1_id+'/',
|
307 |
"fileNames" : {
|
308 |
"inputLocation" : in_loc,
|
309 |
"inputMontage" : rootpath+'/'+stage1_id+'/input_montage.png',
|
310 |
"mappedMontage" : rootpath+'/'+stage1_id+'/mapped_montage.png',
|
311 |
-
"outputResult" : rootpath+'/'+stage1_id+'/
|
312 |
},
|
313 |
"state" : "step1-initializing",
|
314 |
"step2" : {
|
@@ -799,14 +802,14 @@ with gr.Blocks() as demo:
|
|
799 |
stage2_id = uuid.uuid4().hex
|
800 |
os.mkdir(rootpath+'/'+stage2_id+'/')
|
801 |
|
802 |
-
|
803 |
-
|
804 |
|
805 |
stage2_info = {
|
806 |
"filePath" : rootpath+'/'+stage2_id+'/',
|
807 |
"fileNames" : {
|
808 |
"inputData" : in_data,
|
809 |
-
"outputData" : rootpath+'/'+stage2_id+'/'+
|
810 |
},
|
811 |
"state" : "running",
|
812 |
"sampleRate" : int(samplerate)
|
@@ -822,8 +825,8 @@ with gr.Blocks() as demo:
|
|
822 |
mapping_results = stage1_info["mappingResults"]
|
823 |
samplerate = stage2_info["sampleRate"]
|
824 |
filepath = stage2_info["filePath"]
|
825 |
-
|
826 |
-
|
827 |
|
828 |
break_flag = False
|
829 |
for i in range(batch_num):
|
@@ -838,13 +841,13 @@ with gr.Blocks() as demo:
|
|
838 |
os.mkdir(filepath+'temp_data/')
|
839 |
|
840 |
# step1: Reorder input data
|
841 |
-
data_shape = app_utils.reorder_data(new_idx, fill_flags,
|
842 |
# step2: Data preprocessing
|
843 |
total_file_num = utils.preprocessing(filepath+'temp_data/', m_filename, samplerate)
|
844 |
# step3: Signal reconstruction
|
845 |
utils.reconstruct(modelname, total_file_num, filepath+'temp_data/', d_filename, samplerate)
|
846 |
# step4: Restore original order
|
847 |
-
app_utils.restore_order(i, data_shape, new_idx, fill_flags, filepath+'temp_data/'+d_filename,
|
848 |
except FileNotFoundError:
|
849 |
print('break!!')
|
850 |
break_flag = True
|
@@ -861,7 +864,7 @@ with gr.Blocks() as demo:
|
|
861 |
run_btn : gr.Button(visible=True),
|
862 |
cancel_btn : gr.Button(visible=False),
|
863 |
batch_md : gr.Markdown(visible=False),
|
864 |
-
out_data_file : gr.File(
|
865 |
|
866 |
run_btn.click(
|
867 |
fn = reset_stage2,
|
|
|
302 |
stage1_id = uuid.uuid4().hex
|
303 |
os.mkdir(rootpath+'/'+stage1_id+'/')
|
304 |
|
305 |
+
inputname = os.path.basename(str(in_loc))
|
306 |
+
outputname = os.path.splitext(inputname)[0]+'_mapping_result.csv'
|
307 |
+
|
308 |
stage1_info = {
|
309 |
"filePath" : rootpath+'/'+stage1_id+'/',
|
310 |
"fileNames" : {
|
311 |
"inputLocation" : in_loc,
|
312 |
"inputMontage" : rootpath+'/'+stage1_id+'/input_montage.png',
|
313 |
"mappedMontage" : rootpath+'/'+stage1_id+'/mapped_montage.png',
|
314 |
+
"outputResult" : rootpath+'/'+stage1_id+'/'+outputname
|
315 |
},
|
316 |
"state" : "step1-initializing",
|
317 |
"step2" : {
|
|
|
802 |
stage2_id = uuid.uuid4().hex
|
803 |
os.mkdir(rootpath+'/'+stage2_id+'/')
|
804 |
|
805 |
+
inputname = os.path.basename(str(in_data))
|
806 |
+
outputname = modelname+'_'+os.path.splitext(inputname)[0]+'.csv'
|
807 |
|
808 |
stage2_info = {
|
809 |
"filePath" : rootpath+'/'+stage2_id+'/',
|
810 |
"fileNames" : {
|
811 |
"inputData" : in_data,
|
812 |
+
"outputData" : rootpath+'/'+stage2_id+'/'+outputname
|
813 |
},
|
814 |
"state" : "running",
|
815 |
"sampleRate" : int(samplerate)
|
|
|
825 |
mapping_results = stage1_info["mappingResults"]
|
826 |
samplerate = stage2_info["sampleRate"]
|
827 |
filepath = stage2_info["filePath"]
|
828 |
+
inputname = stage2_info["fileNames"]["inputData"]
|
829 |
+
outputname = stage2_info["fileNames"]["outputData"]
|
830 |
|
831 |
break_flag = False
|
832 |
for i in range(batch_num):
|
|
|
841 |
os.mkdir(filepath+'temp_data/')
|
842 |
|
843 |
# step1: Reorder input data
|
844 |
+
data_shape = app_utils.reorder_data(new_idx, fill_flags, inputname, filepath+'temp_data/'+m_filename)
|
845 |
# step2: Data preprocessing
|
846 |
total_file_num = utils.preprocessing(filepath+'temp_data/', m_filename, samplerate)
|
847 |
# step3: Signal reconstruction
|
848 |
utils.reconstruct(modelname, total_file_num, filepath+'temp_data/', d_filename, samplerate)
|
849 |
# step4: Restore original order
|
850 |
+
app_utils.restore_order(i, data_shape, new_idx, fill_flags, filepath+'temp_data/'+d_filename, outputname)
|
851 |
except FileNotFoundError:
|
852 |
print('break!!')
|
853 |
break_flag = True
|
|
|
864 |
run_btn : gr.Button(visible=True),
|
865 |
cancel_btn : gr.Button(visible=False),
|
866 |
batch_md : gr.Markdown(visible=False),
|
867 |
+
out_data_file : gr.File(outputname, visible=True)}
|
868 |
|
869 |
run_btn.click(
|
870 |
fn = reset_stage2,
|
app_utils.py
CHANGED
@@ -10,9 +10,9 @@ from scipy.interpolate import Rbf
|
|
10 |
from scipy.optimize import linear_sum_assignment
|
11 |
from sklearn.neighbors import NearestNeighbors
|
12 |
|
13 |
-
def reorder_data(idx_order, fill_flags,
|
14 |
# read the input data
|
15 |
-
raw_data = utils.read_train_data(
|
16 |
#print(raw_data.shape)
|
17 |
new_data = np.zeros((30, raw_data.shape[1]))
|
18 |
|
@@ -26,23 +26,23 @@ def reorder_data(idx_order, fill_flags, filename, new_filename):
|
|
26 |
tmp_data = [raw_data[j, :] for j in idx_set]
|
27 |
new_data[i, :] = np.mean(tmp_data, axis=0)
|
28 |
|
29 |
-
utils.save_data(new_data,
|
30 |
return raw_data.shape
|
31 |
|
32 |
-
def restore_order(batch_cnt, raw_data_shape, idx_order, fill_flags,
|
33 |
# read the denoised data
|
34 |
-
d_data = utils.read_train_data(
|
35 |
if batch_cnt == 0:
|
36 |
new_data = np.zeros((raw_data_shape[0], d_data.shape[1]))
|
37 |
#print(new_data.shape)
|
38 |
else:
|
39 |
-
new_data = utils.read_train_data(
|
40 |
|
41 |
for i, (idx_set, flag) in enumerate(zip(idx_order, fill_flags)):
|
42 |
if flag == False: # ignore if this channel was filled using "fillmode"
|
43 |
new_data[idx_set[0], :] = d_data[i, :]
|
44 |
|
45 |
-
utils.save_data(new_data,
|
46 |
return
|
47 |
|
48 |
def get_matched(tpl_order, tpl_dict):
|
|
|
10 |
from scipy.optimize import linear_sum_assignment
|
11 |
from sklearn.neighbors import NearestNeighbors
|
12 |
|
13 |
+
def reorder_data(idx_order, fill_flags, inputname, m_filename):
|
14 |
# read the input data
|
15 |
+
raw_data = utils.read_train_data(inputname)
|
16 |
#print(raw_data.shape)
|
17 |
new_data = np.zeros((30, raw_data.shape[1]))
|
18 |
|
|
|
26 |
tmp_data = [raw_data[j, :] for j in idx_set]
|
27 |
new_data[i, :] = np.mean(tmp_data, axis=0)
|
28 |
|
29 |
+
utils.save_data(new_data, m_filename)
|
30 |
return raw_data.shape
|
31 |
|
32 |
+
def restore_order(batch_cnt, raw_data_shape, idx_order, fill_flags, d_filename, outputname):
|
33 |
# read the denoised data
|
34 |
+
d_data = utils.read_train_data(d_filename)
|
35 |
if batch_cnt == 0:
|
36 |
new_data = np.zeros((raw_data_shape[0], d_data.shape[1]))
|
37 |
#print(new_data.shape)
|
38 |
else:
|
39 |
+
new_data = utils.read_train_data(outputname)
|
40 |
|
41 |
for i, (idx_set, flag) in enumerate(zip(idx_order, fill_flags)):
|
42 |
if flag == False: # ignore if this channel was filled using "fillmode"
|
43 |
new_data[idx_set[0], :] = d_data[i, :]
|
44 |
|
45 |
+
utils.save_data(new_data, outputname)
|
46 |
return
|
47 |
|
48 |
def get_matched(tpl_order, tpl_dict):
|