|
47 | 47 | if int(torch.__version__.split(".")[0]) >= 1 and int(torch.__version__.split(".")[1]) > 11:
|
48 | 48 | torch.backends.cuda.matmul.allow_tf32 = True
|
49 | 49 |
|
| 50 | +def seed_torch(seed=1029): |
| 51 | + random.seed(seed) |
| 52 | + os.environ['PYTHONHASHSEED'] = str(seed) |
| 53 | + np.random.seed(seed) |
| 54 | + torch.manual_seed(seed) |
| 55 | + torch.cuda.manual_seed(seed) |
| 56 | + torch.cuda.manual_seed_all(seed) |
| 57 | + torch.backends.cudnn.benchmark = False |
| 58 | + torch.backends.cudnn.deterministic = True |
| 59 | + torch.use_deterministic_algorithms(True) |
| 60 | + |
50 | 61 | @contextlib.contextmanager
|
51 | 62 | def temp_fasta_file(fasta_str: str):
|
52 | 63 | with tempfile.NamedTemporaryFile('w', suffix='.fasta') as fasta_file:
|
@@ -215,9 +226,11 @@ def inference_multimer_model(args):
|
215 | 226 | )
|
216 | 227 |
|
217 | 228 | output_dir_base = args.output_dir
|
| 229 | + |
218 | 230 | random_seed = args.data_random_seed
|
219 | 231 | if random_seed is None:
|
220 | 232 | random_seed = random.randrange(sys.maxsize)
|
| 233 | + # seed_torch(seed=1029) |
221 | 234 |
|
222 | 235 | feature_processor = feature_pipeline.FeaturePipeline(
|
223 | 236 | config.data
|
@@ -347,9 +360,12 @@ def inference_monomer_model(args):
|
347 | 360 | data_processor = data_pipeline.DataPipeline(template_featurizer=template_featurizer,)
|
348 | 361 |
|
349 | 362 | output_dir_base = args.output_dir
|
| 363 | + |
350 | 364 | random_seed = args.data_random_seed
|
351 | 365 | if random_seed is None:
|
352 | 366 | random_seed = random.randrange(sys.maxsize)
|
| 367 | + # seed_torch(seed=1029) |
| 368 | + |
353 | 369 | feature_processor = feature_pipeline.FeaturePipeline(config.data)
|
354 | 370 | if not os.path.exists(output_dir_base):
|
355 | 371 | os.makedirs(output_dir_base)
|
|
0 commit comments