Cosmetics
Former-commit-id: f7a02a39ae67af4fb835892be4ff8356f3e91c9d
This commit is contained in:
parent
3407b5e47f
commit
43702a00c8
|
@ -75,7 +75,8 @@ for i in unitcell_indices:
|
||||||
parser.add_argument('--frequency_multiplier', action='store', type=float, default=1., help='Multiplies the frequencies in the TMatrix file by a given factor.')
|
parser.add_argument('--frequency_multiplier', action='store', type=float, default=1., help='Multiplies the frequencies in the TMatrix file by a given factor.')
|
||||||
# TODO enable more flexible per-sublattice specification
|
# TODO enable more flexible per-sublattice specification
|
||||||
pargs=parser.parse_args()
|
pargs=parser.parse_args()
|
||||||
print(pargs)
|
if pargs.verbose:
|
||||||
|
print(pargs, file = sys.stderr)
|
||||||
|
|
||||||
maxlayer=pargs.maxlayer
|
maxlayer=pargs.maxlayer
|
||||||
eVfreq = pargs.eVfreq
|
eVfreq = pargs.eVfreq
|
||||||
|
@ -104,7 +105,8 @@ for oparg in pargs.ops:
|
||||||
ops.append(((opm.group(2),) if opm.group(2) else unitcell_indices, opm.group(1), oparg[1]))
|
ops.append(((opm.group(2),) if opm.group(2) else unitcell_indices, opm.group(1), oparg[1]))
|
||||||
else:
|
else:
|
||||||
raise # should not happen
|
raise # should not happen
|
||||||
print(ops)
|
if(verbose):
|
||||||
|
print(ops, file = sys.stderr)
|
||||||
|
|
||||||
|
|
||||||
# -----------------finished basic CLI parsing (except for op arguments) ------------------
|
# -----------------finished basic CLI parsing (except for op arguments) ------------------
|
||||||
|
@ -129,12 +131,9 @@ if pargs.lMax:
|
||||||
lMax = pargs.lMax if pargs.lMax else lMaxTM
|
lMax = pargs.lMax if pargs.lMax else lMaxTM
|
||||||
my, ny = qpms.get_mn_y(lMax)
|
my, ny = qpms.get_mn_y(lMax)
|
||||||
nelem = len(my)
|
nelem = len(my)
|
||||||
print(TMatrices_orig.shape)
|
|
||||||
if pargs.lMax: #force commandline specified lMax
|
if pargs.lMax: #force commandline specified lMax
|
||||||
TMatrices_orig = TMatrices_orig[...,0:nelem,:,0:nelem]
|
TMatrices_orig = TMatrices_orig[...,0:nelem,:,0:nelem]
|
||||||
print(TMatrices_orig.shape)
|
|
||||||
TMatrices = np.array(np.broadcast_to(TMatrices_orig[:,nx,:,:,:,:],(len(freqs_orig),unitcell_size,2,nelem,2,nelem)) )
|
TMatrices = np.array(np.broadcast_to(TMatrices_orig[:,nx,:,:,:,:],(len(freqs_orig),unitcell_size,2,nelem,2,nelem)) )
|
||||||
print(TMatrices.shape)
|
|
||||||
xfl = qpms.xflip_tyty(lMax)
|
xfl = qpms.xflip_tyty(lMax)
|
||||||
yfl = qpms.yflip_tyty(lMax)
|
yfl = qpms.yflip_tyty(lMax)
|
||||||
zfl = qpms.zflip_tyty(lMax)
|
zfl = qpms.zflip_tyty(lMax)
|
||||||
|
@ -221,7 +220,6 @@ for op in ops:
|
||||||
else:
|
else:
|
||||||
raise #unknown operation; should not happen
|
raise #unknown operation; should not happen
|
||||||
|
|
||||||
print(TMatrices.shape)
|
|
||||||
TMatrices_interp = interpolate.interp1d(freqs_orig*interpfreqfactor, TMatrices, axis=0, kind='linear',fill_value="extrapolate")
|
TMatrices_interp = interpolate.interp1d(freqs_orig*interpfreqfactor, TMatrices, axis=0, kind='linear',fill_value="extrapolate")
|
||||||
|
|
||||||
|
|
||||||
|
@ -241,12 +239,11 @@ kz = np.sqrt(k_0 - (kx ** 2 + ky ** 2))
|
||||||
|
|
||||||
klist_full = np.stack((kx,ky,kz), axis=-1).reshape((-1,3))
|
klist_full = np.stack((kx,ky,kz), axis=-1).reshape((-1,3))
|
||||||
TMatrices_om = TMatrices_interp(freq)
|
TMatrices_om = TMatrices_interp(freq)
|
||||||
print(TMatrices_om.shape)
|
|
||||||
|
|
||||||
chunkn = math.ceil(klist_full.size / 3 / chunklen)
|
chunkn = math.ceil(klist_full.size / 3 / chunklen)
|
||||||
|
|
||||||
if verbose:
|
if verbose:
|
||||||
print('Evaluating %d k-points in %d chunks' % (klist_full.size / 3, chunkn), file = sys.stderr)
|
print('Evaluating %d k-points' % klist_full.size + ('in %d chunks'%chunkn) if chunkn>1 else '' , file = sys.stderr)
|
||||||
sys.stderr.flush()
|
sys.stderr.flush()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -209,13 +209,17 @@ class Scattering_2D_zsym(Scattering):
|
||||||
if not self.prepared_TE:
|
if not self.prepared_TE:
|
||||||
if self.interaction_matrix_TE is None:
|
if self.interaction_matrix_TE is None:
|
||||||
self.build_interaction_matrix(0, verbose)
|
self.build_interaction_matrix(0, verbose)
|
||||||
|
sbtime = _time_b(verbose, step = 'Calculating LU decomposition of the interaction matrix, TE part')
|
||||||
self.lupiv_TE = scipy.linalg.lu_factor(self.interaction_matrix_TE, overwrite_a = not keep_interaction_matrix)
|
self.lupiv_TE = scipy.linalg.lu_factor(self.interaction_matrix_TE, overwrite_a = not keep_interaction_matrix)
|
||||||
|
_time_e(sbtime, verbose, step = 'Calculating LU decomposition of the interaction matrix, TE part')
|
||||||
self.prepared_TE = True
|
self.prepared_TE = True
|
||||||
if (TE_or_TM == 1): #TM
|
if (TE_or_TM == 1): #TM
|
||||||
if not self.prepared_TM:
|
if not self.prepared_TM:
|
||||||
if self.interaction_matrix_TM is None:
|
if self.interaction_matrix_TM is None:
|
||||||
self.build_interaction_matrix(1, verbose)
|
self.build_interaction_matrix(1, verbose)
|
||||||
|
sbtime = _time_b(verbose, step = 'Calculating LU decomposition of the interaction matrix, TM part')
|
||||||
self.lupiv_TM = scipy.linalg.lu_factor(self.interaction_matrix_TM, overwrite_a = not keep_interaction_matrix)
|
self.lupiv_TM = scipy.linalg.lu_factor(self.interaction_matrix_TM, overwrite_a = not keep_interaction_matrix)
|
||||||
|
_time_e(sbtime, verbose, step = 'Calculating LU decomposition of the interaction matrix, TM part')
|
||||||
self.prepared_TM = True
|
self.prepared_TM = True
|
||||||
_time_e(btime, verbose)
|
_time_e(btime, verbose)
|
||||||
|
|
||||||
|
|
2
setup.py
2
setup.py
|
@ -34,7 +34,7 @@ qpms_c = Extension('qpms_c',
|
||||||
)
|
)
|
||||||
|
|
||||||
setup(name='qpms',
|
setup(name='qpms',
|
||||||
version = "0.2.12",
|
version = "0.2.13",
|
||||||
packages=['qpms'],
|
packages=['qpms'],
|
||||||
# setup_requires=['setuptools_cython'],
|
# setup_requires=['setuptools_cython'],
|
||||||
install_requires=['cython>=0.21','quaternion','spherical_functions','py_gmm'],
|
install_requires=['cython>=0.21','quaternion','spherical_functions','py_gmm'],
|
||||||
|
|
Loading…
Reference in New Issue