@@ -630,11 +630,13 @@ def __init__(self, fname):
630
630
else :
631
631
self .metric = MinkowskiMetric ()
632
632
coords = list (CoordinateDict [coordinates ].values ())[::- 1 ][- dimension :]
633
-
633
+
634
634
for s in self .file .keys ():
635
635
if any ([k .startswith ("X" ) for k in self .file [s ].keys ()]):
636
636
# cell-centered coords
637
- cc_coords = {c : self .file [s ][f"X{ i + 1 } " ] for i , c in enumerate (coords [::- 1 ])}
637
+ cc_coords = {
638
+ c : self .file [s ][f"X{ i + 1 } " ] for i , c in enumerate (coords [::- 1 ])
639
+ }
638
640
# cell edges
639
641
cell_1 = {
640
642
f"{ c } _1" : (
@@ -664,7 +666,7 @@ def __init__(self, fname):
664
666
)
665
667
666
668
self .dataset = xr .Dataset ()
667
-
669
+
668
670
# -------------------------------- load fields ------------------------------- #
669
671
fields = None
670
672
f_outsteps = []
@@ -678,6 +680,10 @@ def __init__(self, fname):
678
680
f_times .append (self .file [s ]["Time" ][()])
679
681
f_steps .append (self .file [s ]["Step" ][()])
680
682
683
+ f_outsteps = sorted (f_outsteps , key = lambda x : int (x .replace ("Step" , "" )))
684
+ f_steps = sorted (f_steps )
685
+ f_times = np .array (sorted (f_times ), dtype = np .float64 )
686
+
681
687
for k in self .file .attrs .keys ():
682
688
if (
683
689
type (self .file .attrs [k ]) == bytes
@@ -722,7 +728,7 @@ def __init__(self, fname):
722
728
},
723
729
)
724
730
self .dataset [k_ ] = x
725
-
731
+
726
732
# ------------------------------ load particles ------------------------------ #
727
733
particles = None
728
734
p_outsteps = []
@@ -735,19 +741,27 @@ def __init__(self, fname):
735
741
p_outsteps .append (s )
736
742
p_times .append (self .file [s ]["Time" ][()])
737
743
p_steps .append (self .file [s ]["Step" ][()])
738
-
744
+
745
+ p_outsteps = sorted (p_outsteps , key = lambda x : int (x .replace ("Step" , "" )))
746
+ p_steps = sorted (p_steps )
747
+ p_times = np .array (sorted (p_times ), dtype = np .float64 )
748
+
739
749
self ._particles = {}
740
-
741
750
742
751
if len (p_outsteps ) > 0 :
743
752
species = np .unique (
744
- [int (pq .split ("_" )[1 ]) for pq in self .file [p_outsteps [0 ]].keys () if pq .startswith ("p" )]
753
+ [
754
+ int (pq .split ("_" )[1 ])
755
+ for pq in self .file [p_outsteps [0 ]].keys ()
756
+ if pq .startswith ("p" )
757
+ ]
745
758
)
746
759
747
760
def list_to_ragged (arr ):
748
761
max_len = np .max ([len (a ) for a in arr ])
749
762
return map (
750
- lambda a : np .concatenate ([a , np .full (max_len - len (a ), np .nan )]), arr
763
+ lambda a : np .concatenate ([a , np .full (max_len - len (a ), np .nan )]),
764
+ arr ,
751
765
)
752
766
753
767
for s in species :
@@ -773,11 +787,16 @@ def list_to_ragged(arr):
773
787
if "p" + q in self .file [step_k ].keys ():
774
788
prtl_data [q_ ].append (self .file [step_k ]["p" + q ])
775
789
else :
776
- prtl_data [q_ ].append (np .full_like (prtl_data [q_ ][- 1 ], np .nan ))
790
+ prtl_data [q_ ].append (
791
+ np .full_like (prtl_data [q_ ][- 1 ], np .nan )
792
+ )
777
793
prtl_data [q_ ] = list_to_ragged (prtl_data [q_ ])
778
794
prtl_data [q_ ] = da .from_array (list (prtl_data [q_ ]))
779
795
prtl_data [q_ ] = xr .DataArray (
780
- prtl_data [q_ ], dims = ["t" , "id" ], name = q_ , coords = {"t" : p_times , "s" : ("t" , p_steps )}
796
+ prtl_data [q_ ],
797
+ dims = ["t" , "id" ],
798
+ name = q_ ,
799
+ coords = {"t" : p_times , "s" : ("t" , p_steps )},
781
800
)
782
801
if coordinates == "sph" :
783
802
prtl_data ["x" ] = (
@@ -794,7 +813,7 @@ def list_to_ragged(arr):
794
813
prtl_data [PrtlDict [coordinates ]["X2" ]]
795
814
)
796
815
self ._particles [s ] = xr .Dataset (prtl_data )
797
-
816
+
798
817
# ------------------------------- load spectra ------------------------------- #
799
818
spectra = None
800
819
s_outsteps = []
@@ -807,13 +826,21 @@ def list_to_ragged(arr):
807
826
s_outsteps .append (s )
808
827
s_times .append (self .file [s ]["Time" ][()])
809
828
s_steps .append (self .file [s ]["Step" ][()])
810
-
829
+
830
+ s_outsteps = sorted (s_outsteps , key = lambda x : int (x .replace ("Step" , "" )))
831
+ s_steps = sorted (s_steps )
832
+ s_times = np .array (sorted (s_times ), dtype = np .float64 )
833
+
811
834
self ._spectra = xr .Dataset ()
812
835
log_bins = self .file .attrs ["output.spectra.log_bins" ]
813
836
814
837
if len (s_outsteps ) > 0 :
815
838
species = np .unique (
816
- [int (pq .split ("_" )[1 ]) for pq in self .file [s_outsteps [0 ]].keys () if pq .startswith ("sN" )]
839
+ [
840
+ int (pq .split ("_" )[1 ])
841
+ for pq in self .file [s_outsteps [0 ]].keys ()
842
+ if pq .startswith ("sN" )
843
+ ]
817
844
)
818
845
e_bins = self .file [s_outsteps [0 ]]["sEbn" ]
819
846
if log_bins :
@@ -839,8 +866,6 @@ def list_to_ragged(arr):
839
866
)
840
867
self ._spectra [f"n_{ sp } " ] = x
841
868
842
-
843
-
844
869
def __del__ (self ):
845
870
self .file .close ()
846
871
@@ -855,15 +880,11 @@ def __enter__(self):
855
880
856
881
def __exit__ (self , exc_type , exc_value , traceback ):
857
882
self .file .close ()
858
- self .close ()
859
- for _ , v in self ._particles .items ():
860
- del v
861
- del self
862
883
863
884
@property
864
885
def particles (self ):
865
886
return self ._particles
866
-
887
+
867
888
@property
868
889
def spectra (self ):
869
890
return self ._spectra
0 commit comments