Datasets:

ArXiv:
DOI:
License:
zhuwq0 commited on
Commit
eca3c1b
·
1 Parent(s): 4390ac2
Files changed (2) hide show
  1. merge_hdf5.py +56 -0
  2. upload.py +11 -0
merge_hdf5.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # %%
2
+ import os
3
+
4
+ import h5py
5
+ import matplotlib.pyplot as plt
6
+ from tqdm import tqdm
7
+
8
+ # %%
9
+ h5_dir = "waveform_h5"
10
+ h5_out = "waveform.h5"
11
+ h5_train = "waveform_train.h5"
12
+ h5_test = "waveform_test.h5"
13
+
14
+ h5_files = sorted(os.listdir(h5_dir))
15
+ train_files = h5_files[:-1]
16
+ test_files = h5_files[-1:]
17
+ print(f"train files: {train_files}")
18
+ print(f"test files: {test_files}")
19
+
20
+ # %%
21
+ with h5py.File(h5_out, "w") as fp:
22
+ # external linked file
23
+ for h5_file in h5_files:
24
+ with h5py.File(os.path.join(h5_dir, h5_file), "r") as f:
25
+ for event in tqdm(f.keys(), desc=h5_file, total=len(f.keys())):
26
+ if event not in fp:
27
+ fp[event] = h5py.ExternalLink(os.path.join(h5_dir, h5_file), event)
28
+ else:
29
+ print(f"{event} already exists")
30
+ continue
31
+
32
+ # %%
33
+ with h5py.File(h5_train, "w") as fp:
34
+ # external linked file
35
+ for h5_file in train_files:
36
+ with h5py.File(os.path.join(h5_dir, h5_file), "r") as f:
37
+ for event in tqdm(f.keys(), desc=h5_file, total=len(f.keys())):
38
+ if event not in fp:
39
+ fp[event] = h5py.ExternalLink(os.path.join(h5_dir, h5_file), event)
40
+ else:
41
+ print(f"{event} already exists")
42
+ continue
43
+
44
+ # %%
45
+ with h5py.File(h5_test, "w") as fp:
46
+ # external linked file
47
+ for h5_file in test_files:
48
+ with h5py.File(os.path.join(h5_dir, h5_file), "r") as f:
49
+ for event in tqdm(f.keys(), desc=h5_file, total=len(f.keys())):
50
+ if event not in fp:
51
+ fp[event] = h5py.ExternalLink(os.path.join(h5_dir, h5_file), event)
52
+ else:
53
+ print(f"{event} already exists")
54
+ continue
55
+
56
+ # %%
upload.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from huggingface_hub import HfApi
2
+
3
+ api = HfApi()
4
+
5
+ # Upload all the content from the local folder to your remote Space.
6
+ # By default, files are uploaded at the root of the repo
7
+ api.upload_folder(
8
+ folder_path="./",
9
+ repo_id="AI4EPS/quakeflow_nc",
10
+ repo_type="space",
11
+ )