|
10 | 10 | exp_dir = str(sys.argv[1]) |
11 | 11 | index_algorithm = str(sys.argv[2]) |
12 | 12 |
|
13 | | -try: |
14 | | - feature_dir = os.path.join(exp_dir, f"extracted") |
15 | | - model_name = os.path.basename(exp_dir) |
| 13 | +feature_dir = os.path.join(exp_dir, "extracted") |
| 14 | +model_name = os.path.basename(exp_dir) |
16 | 15 |
|
17 | | - if not os.path.exists(feature_dir): |
18 | | - print( |
19 | | - f"Feature to generate index file not found at {feature_dir}. Did you run preprocessing and feature extraction steps?" |
20 | | - ) |
21 | | - sys.exit(1) |
| 16 | +if not os.path.exists(feature_dir): |
| 17 | + print( |
| 18 | + f"Feature to generate index file not found at {feature_dir}. Did you run preprocessing and feature extraction steps?" |
| 19 | + ) |
| 20 | + sys.exit(1) |
22 | 21 |
|
23 | | - index_filename_added = f"{model_name}.index" |
24 | | - index_filepath_added = os.path.join(exp_dir, index_filename_added) |
| 22 | +index_filename_added = f"{model_name}.index" |
| 23 | +index_filepath_added = os.path.join(exp_dir, index_filename_added) |
25 | 24 |
|
26 | | - if os.path.exists(index_filepath_added): |
27 | | - pass |
28 | | - else: |
29 | | - npys = [] |
30 | | - listdir_res = sorted(os.listdir(feature_dir)) |
| 25 | +if os.path.exists(index_filepath_added): |
| 26 | + pass |
| 27 | +else: |
| 28 | + npys = [] |
| 29 | + print(f"Generating index for '{model_name}', this may take a while...") |
| 30 | + listdir_res = sorted(os.listdir(feature_dir)) |
31 | 31 |
|
32 | | - for name in listdir_res: |
33 | | - file_path = os.path.join(feature_dir, name) |
34 | | - phone = np.load(file_path) |
35 | | - npys.append(phone) |
| 32 | + for name in listdir_res: |
| 33 | + file_path = os.path.join(feature_dir, name) |
| 34 | + phone = np.load(file_path) |
| 35 | + npys.append(phone) |
36 | 36 |
|
37 | | - big_npy = np.concatenate(npys, axis=0) |
| 37 | + if not npys: |
| 38 | + print( |
| 39 | + f"Feature files in {feature_dir} could not be loaded correctly. Did you run preprocessing and feature extraction steps?" |
| 40 | + ) |
| 41 | + sys.exit(1) |
38 | 42 |
|
39 | | - big_npy_idx = np.arange(big_npy.shape[0]) |
40 | | - np.random.shuffle(big_npy_idx) |
41 | | - big_npy = big_npy[big_npy_idx] |
| 43 | + big_npy = np.concatenate(npys, axis=0) |
42 | 44 |
|
43 | | - if big_npy.shape[0] > 2e5 and ( |
44 | | - index_algorithm == "Auto" or index_algorithm == "KMeans" |
45 | | - ): |
46 | | - big_npy = ( |
47 | | - MiniBatchKMeans( |
48 | | - n_clusters=10000, |
49 | | - verbose=True, |
50 | | - batch_size=256 * cpu_count(), |
51 | | - compute_labels=False, |
52 | | - init="random", |
53 | | - ) |
54 | | - .fit(big_npy) |
55 | | - .cluster_centers_ |
56 | | - ) |
| 45 | + big_npy_idx = np.arange(big_npy.shape[0]) |
| 46 | + np.random.shuffle(big_npy_idx) |
| 47 | + big_npy = big_npy[big_npy_idx] |
57 | 48 |
|
58 | | - n_ivf = min(int(16 * np.sqrt(big_npy.shape[0])), big_npy.shape[0] // 39) |
| 49 | + if big_npy.shape[0] > 2e5 and ( |
| 50 | + index_algorithm == "Auto" or index_algorithm == "KMeans" |
| 51 | + ): |
| 52 | + big_npy = ( |
| 53 | + MiniBatchKMeans( |
| 54 | + n_clusters=10000, |
| 55 | + verbose=True, |
| 56 | + batch_size=256 * cpu_count(), |
| 57 | + compute_labels=False, |
| 58 | + init="random", |
| 59 | + ) |
| 60 | + .fit(big_npy) |
| 61 | + .cluster_centers_ |
| 62 | + ) |
59 | 63 |
|
60 | | - # index_added |
61 | | - index_added = faiss.index_factory(768, f"IVF{n_ivf},Flat") |
62 | | - index_ivf_added = faiss.extract_index_ivf(index_added) |
63 | | - index_ivf_added.nprobe = 1 |
64 | | - index_added.train(big_npy) |
| 64 | + n_ivf = min(int(16 * np.sqrt(big_npy.shape[0])), big_npy.shape[0] // 39) |
65 | 65 |
|
66 | | - batch_size_add = 8192 |
67 | | - for i in range(0, big_npy.shape[0], batch_size_add): |
68 | | - index_added.add(big_npy[i : i + batch_size_add]) |
| 66 | + # index_added |
| 67 | + index_added = faiss.index_factory(768, f"IVF{n_ivf},Flat") |
| 68 | + index_ivf_added = faiss.extract_index_ivf(index_added) |
| 69 | + index_ivf_added.nprobe = 1 |
| 70 | + index_added.train(big_npy) |
69 | 71 |
|
70 | | - faiss.write_index(index_added, index_filepath_added) |
71 | | - print(f"Saved index file '{index_filepath_added}'") |
| 72 | + batch_size_add = 8192 |
| 73 | + for i in range(0, big_npy.shape[0], batch_size_add): |
| 74 | + index_added.add(big_npy[i : i + batch_size_add]) |
72 | 75 |
|
73 | | -except Exception as error: |
74 | | - print(f"An error occurred extracting the index: {error}") |
75 | | - print( |
76 | | - "If you are running this code in a virtual environment, make sure you have enough GPU available to generate the Index file." |
77 | | - ) |
| 76 | + faiss.write_index(index_added, index_filepath_added) |
| 77 | + print(f"Saved index file '{index_filepath_added}'") |
0 commit comments