format java code in marytts-signalproc

incantation:
$ mvn com.googlecode.maven-java-formatter-plugin:maven-java-formatter-plugin:format -pl marytts-signalproc
This commit is contained in:
Ingmar Steiner 2014-12-19 13:14:32 +01:00
Родитель 04063bbd6d
Коммит 78b677d3ab
403 изменённых файлов: 59011 добавлений и 63198 удалений

Просмотреть файл

@ -1,199 +1,184 @@
package Jama;
/** Cholesky Decomposition.
<P>
For a symmetric, positive definite matrix A, the Cholesky decomposition
is an lower triangular matrix L so that A = L*L'.
<P>
If the matrix is not symmetric or positive definite, the constructor
returns a partial decomposition and sets an internal flag that may
be queried by the isSPD() method.
*/
/**
* Cholesky Decomposition.
* <P>
* For a symmetric, positive definite matrix A, the Cholesky decomposition is an lower triangular matrix L so that A = L*L'.
* <P>
* If the matrix is not symmetric or positive definite, the constructor returns a partial decomposition and sets an internal flag
* that may be queried by the isSPD() method.
*/
public class CholeskyDecomposition implements java.io.Serializable {
/* ------------------------
Class variables
* ------------------------ */
/*
* ------------------------ Class variables ------------------------
*/
/** Array for internal storage of decomposition.
@serial internal array storage.
*/
private double[][] L;
/**
* Array for internal storage of decomposition.
*
* @serial internal array storage.
*/
private double[][] L;
/** Row and column dimension (square matrix).
@serial matrix dimension.
*/
private int n;
/**
* Row and column dimension (square matrix).
*
* @serial matrix dimension.
*/
private int n;
/** Symmetric and positive definite flag.
@serial is symmetric and positive definite flag.
*/
private boolean isspd;
/**
* Symmetric and positive definite flag.
*
* @serial is symmetric and positive definite flag.
*/
private boolean isspd;
/* ------------------------
Constructor
* ------------------------ */
/*
* ------------------------ Constructor ------------------------
*/
/** Cholesky algorithm for symmetric and positive definite matrix.
@param A Square, symmetric matrix.
@return Structure to access L and isspd flag.
*/
/**
* Cholesky algorithm for symmetric and positive definite matrix.
*
* @param A
* Square, symmetric matrix.
* @return Structure to access L and isspd flag.
*/
public CholeskyDecomposition (Matrix Arg) {
public CholeskyDecomposition(Matrix Arg) {
// Initialize.
double[][] A = Arg.getArray();
n = Arg.getRowDimension();
L = new double[n][n];
isspd = (Arg.getColumnDimension() == n);
// Main loop.
for (int j = 0; j < n; j++) {
double[] Lrowj = L[j];
double d = 0.0;
for (int k = 0; k < j; k++) {
double[] Lrowk = L[k];
double s = 0.0;
for (int i = 0; i < k; i++) {
s += Lrowk[i] * Lrowj[i];
}
Lrowj[k] = s = (A[j][k] - s) / L[k][k];
d = d + s * s;
isspd = isspd & (A[k][j] == A[j][k]);
}
d = A[j][j] - d;
isspd = isspd & (d > 0.0);
L[j][j] = Math.sqrt(Math.max(d, 0.0));
for (int k = j + 1; k < n; k++) {
L[j][k] = 0.0;
}
}
}
// Initialize.
double[][] A = Arg.getArray();
n = Arg.getRowDimension();
L = new double[n][n];
isspd = (Arg.getColumnDimension() == n);
// Main loop.
for (int j = 0; j < n; j++) {
double[] Lrowj = L[j];
double d = 0.0;
for (int k = 0; k < j; k++) {
double[] Lrowk = L[k];
double s = 0.0;
for (int i = 0; i < k; i++) {
s += Lrowk[i]*Lrowj[i];
}
Lrowj[k] = s = (A[j][k] - s)/L[k][k];
d = d + s*s;
isspd = isspd & (A[k][j] == A[j][k]);
}
d = A[j][j] - d;
isspd = isspd & (d > 0.0);
L[j][j] = Math.sqrt(Math.max(d,0.0));
for (int k = j+1; k < n; k++) {
L[j][k] = 0.0;
}
}
}
/*
* ------------------------ Temporary, experimental code. ------------------------ *\
*
* \** Right Triangular Cholesky Decomposition. <P> For a symmetric, positive definite matrix A, the Right Cholesky
* decomposition is an upper triangular matrix R so that A = R'*R. This constructor computes R with the Fortran inspired
* column oriented algorithm used in LINPACK and MATLAB. In Java, we suspect a row oriented, lower triangular decomposition is
* faster. We have temporarily included this constructor here until timing experiments confirm this suspicion.\
*
* \** Array for internal storage of right triangular decomposition. **\ private transient double[][] R;
*
* \** Cholesky algorithm for symmetric and positive definite matrix.
*
* @param A Square, symmetric matrix.
*
* @param rightflag Actual value ignored.
*
* @return Structure to access R and isspd flag.\
*
* public CholeskyDecomposition (Matrix Arg, int rightflag) { // Initialize. double[][] A = Arg.getArray(); n =
* Arg.getColumnDimension(); R = new double[n][n]; isspd = (Arg.getColumnDimension() == n); // Main loop. for (int j = 0; j <
* n; j++) { double d = 0.0; for (int k = 0; k < j; k++) { double s = A[k][j]; for (int i = 0; i < k; i++) { s = s -
* R[i][k]*R[i][j]; } R[k][j] = s = s/R[k][k]; d = d + s*s; isspd = isspd & (A[k][j] == A[j][k]); } d = A[j][j] - d; isspd =
* isspd & (d > 0.0); R[j][j] = Math.sqrt(Math.max(d,0.0)); for (int k = j+1; k < n; k++) { R[k][j] = 0.0; } } }
*
* \** Return upper triangular factor.
*
* @return R\
*
* public Matrix getR () { return new Matrix(R,n,n); }
*
* \* ------------------------ End of temporary code. ------------------------
*/
/* ------------------------
Temporary, experimental code.
* ------------------------ *\
/*
* ------------------------ Public Methods ------------------------
*/
\** Right Triangular Cholesky Decomposition.
<P>
For a symmetric, positive definite matrix A, the Right Cholesky
decomposition is an upper triangular matrix R so that A = R'*R.
This constructor computes R with the Fortran inspired column oriented
algorithm used in LINPACK and MATLAB. In Java, we suspect a row oriented,
lower triangular decomposition is faster. We have temporarily included
this constructor here until timing experiments confirm this suspicion.
*\
/**
* Is the matrix symmetric and positive definite?
*
* @return true if A is symmetric and positive definite.
*/
\** Array for internal storage of right triangular decomposition. **\
private transient double[][] R;
public boolean isSPD() {
return isspd;
}
\** Cholesky algorithm for symmetric and positive definite matrix.
@param A Square, symmetric matrix.
@param rightflag Actual value ignored.
@return Structure to access R and isspd flag.
*\
/**
* Return triangular factor.
*
* @return L
*/
public CholeskyDecomposition (Matrix Arg, int rightflag) {
// Initialize.
double[][] A = Arg.getArray();
n = Arg.getColumnDimension();
R = new double[n][n];
isspd = (Arg.getColumnDimension() == n);
// Main loop.
for (int j = 0; j < n; j++) {
double d = 0.0;
for (int k = 0; k < j; k++) {
double s = A[k][j];
for (int i = 0; i < k; i++) {
s = s - R[i][k]*R[i][j];
}
R[k][j] = s = s/R[k][k];
d = d + s*s;
isspd = isspd & (A[k][j] == A[j][k]);
}
d = A[j][j] - d;
isspd = isspd & (d > 0.0);
R[j][j] = Math.sqrt(Math.max(d,0.0));
for (int k = j+1; k < n; k++) {
R[k][j] = 0.0;
}
}
}
public Matrix getL() {
return new Matrix(L, n, n);
}
\** Return upper triangular factor.
@return R
*\
/**
* Solve A*X = B
*
* @param B
* A Matrix with as many rows as A and any number of columns.
* @return X so that L*L'*X = B
* @exception IllegalArgumentException
* Matrix row dimensions must agree.
* @exception RuntimeException
* Matrix is not symmetric positive definite.
*/
public Matrix getR () {
return new Matrix(R,n,n);
}
public Matrix solve(Matrix B) {
if (B.getRowDimension() != n) {
throw new IllegalArgumentException("Matrix row dimensions must agree.");
}
if (!isspd) {
throw new RuntimeException("Matrix is not symmetric positive definite.");
}
\* ------------------------
End of temporary code.
* ------------------------ */
// Copy right hand side.
double[][] X = B.getArrayCopy();
int nx = B.getColumnDimension();
/* ------------------------
Public Methods
* ------------------------ */
// Solve L*Y = B;
for (int k = 0; k < n; k++) {
for (int j = 0; j < nx; j++) {
for (int i = 0; i < k; i++) {
X[k][j] -= X[i][j] * L[k][i];
}
X[k][j] /= L[k][k];
}
}
/** Is the matrix symmetric and positive definite?
@return true if A is symmetric and positive definite.
*/
// Solve L'*X = Y;
for (int k = n - 1; k >= 0; k--) {
for (int j = 0; j < nx; j++) {
for (int i = k + 1; i < n; i++) {
X[k][j] -= X[i][j] * L[i][k];
}
X[k][j] /= L[k][k];
}
}
public boolean isSPD () {
return isspd;
}
/** Return triangular factor.
@return L
*/
public Matrix getL () {
return new Matrix(L,n,n);
}
/** Solve A*X = B
@param B A Matrix with as many rows as A and any number of columns.
@return X so that L*L'*X = B
@exception IllegalArgumentException Matrix row dimensions must agree.
@exception RuntimeException Matrix is not symmetric positive definite.
*/
public Matrix solve (Matrix B) {
if (B.getRowDimension() != n) {
throw new IllegalArgumentException("Matrix row dimensions must agree.");
}
if (!isspd) {
throw new RuntimeException("Matrix is not symmetric positive definite.");
}
// Copy right hand side.
double[][] X = B.getArrayCopy();
int nx = B.getColumnDimension();
// Solve L*Y = B;
for (int k = 0; k < n; k++) {
for (int j = 0; j < nx; j++) {
for (int i = 0; i < k ; i++) {
X[k][j] -= X[i][j]*L[k][i];
}
X[k][j] /= L[k][k];
}
}
// Solve L'*X = Y;
for (int k = n-1; k >= 0; k--) {
for (int j = 0; j < nx; j++) {
for (int i = k+1; i < n ; i++) {
X[k][j] -= X[i][j]*L[i][k];
}
X[k][j] /= L[k][k];
}
}
return new Matrix(X,n,nx);
}
return new Matrix(X, n, nx);
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,311 +1,306 @@
package Jama;
/** LU Decomposition.
<P>
For an m-by-n matrix A with m >= n, the LU decomposition is an m-by-n
unit lower triangular matrix L, an n-by-n upper triangular matrix U,
and a permutation vector piv of length m so that A(piv,:) = L*U.
If m < n, then L is m-by-m and U is m-by-n.
<P>
The LU decompostion with pivoting always exists, even if the matrix is
singular, so the constructor will never fail. The primary use of the
LU decomposition is in the solution of square systems of simultaneous
linear equations. This will fail if isNonsingular() returns false.
*/
/**
* LU Decomposition.
* <P>
* For an m-by-n matrix A with m >= n, the LU decomposition is an m-by-n unit lower triangular matrix L, an n-by-n upper
* triangular matrix U, and a permutation vector piv of length m so that A(piv,:) = L*U. If m < n, then L is m-by-m and U is
* m-by-n.
* <P>
* The LU decompostion with pivoting always exists, even if the matrix is singular, so the constructor will never fail. The
* primary use of the LU decomposition is in the solution of square systems of simultaneous linear equations. This will fail if
* isNonsingular() returns false.
*/
public class LUDecomposition implements java.io.Serializable {
/* ------------------------
Class variables
* ------------------------ */
/*
* ------------------------ Class variables ------------------------
*/
/** Array for internal storage of decomposition.
@serial internal array storage.
*/
private double[][] LU;
/**
* Array for internal storage of decomposition.
*
* @serial internal array storage.
*/
private double[][] LU;
/** Row and column dimensions, and pivot sign.
@serial column dimension.
@serial row dimension.
@serial pivot sign.
*/
private int m, n, pivsign;
/**
* Row and column dimensions, and pivot sign.
*
* @serial column dimension.
* @serial row dimension.
* @serial pivot sign.
*/
private int m, n, pivsign;
/** Internal storage of pivot vector.
@serial pivot vector.
*/
private int[] piv;
/**
* Internal storage of pivot vector.
*
* @serial pivot vector.
*/
private int[] piv;
/* ------------------------
Constructor
* ------------------------ */
/*
* ------------------------ Constructor ------------------------
*/
/** LU Decomposition
@param A Rectangular matrix
@return Structure to access L, U and piv.
*/
/**
* LU Decomposition
*
* @param A
* Rectangular matrix
* @return Structure to access L, U and piv.
*/
public LUDecomposition (Matrix A) {
public LUDecomposition(Matrix A) {
// Use a "left-looking", dot-product, Crout/Doolittle algorithm.
// Use a "left-looking", dot-product, Crout/Doolittle algorithm.
LU = A.getArrayCopy();
m = A.getRowDimension();
n = A.getColumnDimension();
piv = new int[m];
for (int i = 0; i < m; i++) {
piv[i] = i;
}
pivsign = 1;
double[] LUrowi;
double[] LUcolj = new double[m];
LU = A.getArrayCopy();
m = A.getRowDimension();
n = A.getColumnDimension();
piv = new int[m];
for (int i = 0; i < m; i++) {
piv[i] = i;
}
pivsign = 1;
double[] LUrowi;
double[] LUcolj = new double[m];
// Outer loop.
// Outer loop.
for (int j = 0; j < n; j++) {
for (int j = 0; j < n; j++) {
// Make a copy of the j-th column to localize references.
// Make a copy of the j-th column to localize references.
for (int i = 0; i < m; i++) {
LUcolj[i] = LU[i][j];
}
for (int i = 0; i < m; i++) {
LUcolj[i] = LU[i][j];
}
// Apply previous transformations.
// Apply previous transformations.
for (int i = 0; i < m; i++) {
LUrowi = LU[i];
for (int i = 0; i < m; i++) {
LUrowi = LU[i];
// Most of the time is spent in the following dot product.
// Most of the time is spent in the following dot product.
int kmax = Math.min(i,j);
double s = 0.0;
for (int k = 0; k < kmax; k++) {
s += LUrowi[k]*LUcolj[k];
}
int kmax = Math.min(i, j);
double s = 0.0;
for (int k = 0; k < kmax; k++) {
s += LUrowi[k] * LUcolj[k];
}
LUrowi[j] = LUcolj[i] -= s;
}
// Find pivot and exchange if necessary.
LUrowi[j] = LUcolj[i] -= s;
}
int p = j;
for (int i = j+1; i < m; i++) {
if (Math.abs(LUcolj[i]) > Math.abs(LUcolj[p])) {
p = i;
}
}
if (p != j) {
for (int k = 0; k < n; k++) {
double t = LU[p][k]; LU[p][k] = LU[j][k]; LU[j][k] = t;
}
int k = piv[p]; piv[p] = piv[j]; piv[j] = k;
pivsign = -pivsign;
}
// Find pivot and exchange if necessary.
// Compute multipliers.
if (j < m & LU[j][j] != 0.0) {
for (int i = j+1; i < m; i++) {
LU[i][j] /= LU[j][j];
}
}
}
}
int p = j;
for (int i = j + 1; i < m; i++) {
if (Math.abs(LUcolj[i]) > Math.abs(LUcolj[p])) {
p = i;
}
}
if (p != j) {
for (int k = 0; k < n; k++) {
double t = LU[p][k];
LU[p][k] = LU[j][k];
LU[j][k] = t;
}
int k = piv[p];
piv[p] = piv[j];
piv[j] = k;
pivsign = -pivsign;
}
/* ------------------------
Temporary, experimental code.
------------------------ *\
// Compute multipliers.
\** LU Decomposition, computed by Gaussian elimination.
<P>
This constructor computes L and U with the "daxpy"-based elimination
algorithm used in LINPACK and MATLAB. In Java, we suspect the dot-product,
Crout algorithm will be faster. We have temporarily included this
constructor until timing experiments confirm this suspicion.
<P>
@param A Rectangular matrix
@param linpackflag Use Gaussian elimination. Actual value ignored.
@return Structure to access L, U and piv.
*\
if (j < m & LU[j][j] != 0.0) {
for (int i = j + 1; i < m; i++) {
LU[i][j] /= LU[j][j];
}
}
}
}
public LUDecomposition (Matrix A, int linpackflag) {
// Initialize.
LU = A.getArrayCopy();
m = A.getRowDimension();
n = A.getColumnDimension();
piv = new int[m];
for (int i = 0; i < m; i++) {
piv[i] = i;
}
pivsign = 1;
// Main loop.
for (int k = 0; k < n; k++) {
// Find pivot.
int p = k;
for (int i = k+1; i < m; i++) {
if (Math.abs(LU[i][k]) > Math.abs(LU[p][k])) {
p = i;
}
}
// Exchange if necessary.
if (p != k) {
for (int j = 0; j < n; j++) {
double t = LU[p][j]; LU[p][j] = LU[k][j]; LU[k][j] = t;
}
int t = piv[p]; piv[p] = piv[k]; piv[k] = t;
pivsign = -pivsign;
}
// Compute multipliers and eliminate k-th column.
if (LU[k][k] != 0.0) {
for (int i = k+1; i < m; i++) {
LU[i][k] /= LU[k][k];
for (int j = k+1; j < n; j++) {
LU[i][j] -= LU[i][k]*LU[k][j];
}
}
}
}
}
/*
* ------------------------ Temporary, experimental code. ------------------------ *\
*
* \** LU Decomposition, computed by Gaussian elimination. <P> This constructor computes L and U with the "daxpy"-based
* elimination algorithm used in LINPACK and MATLAB. In Java, we suspect the dot-product, Crout algorithm will be faster. We
* have temporarily included this constructor until timing experiments confirm this suspicion. <P>
*
* @param A Rectangular matrix
*
* @param linpackflag Use Gaussian elimination. Actual value ignored.
*
* @return Structure to access L, U and piv.\
*
* public LUDecomposition (Matrix A, int linpackflag) { // Initialize. LU = A.getArrayCopy(); m = A.getRowDimension(); n =
* A.getColumnDimension(); piv = new int[m]; for (int i = 0; i < m; i++) { piv[i] = i; } pivsign = 1; // Main loop. for (int k
* = 0; k < n; k++) { // Find pivot. int p = k; for (int i = k+1; i < m; i++) { if (Math.abs(LU[i][k]) > Math.abs(LU[p][k])) {
* p = i; } } // Exchange if necessary. if (p != k) { for (int j = 0; j < n; j++) { double t = LU[p][j]; LU[p][j] = LU[k][j];
* LU[k][j] = t; } int t = piv[p]; piv[p] = piv[k]; piv[k] = t; pivsign = -pivsign; } // Compute multipliers and eliminate
* k-th column. if (LU[k][k] != 0.0) { for (int i = k+1; i < m; i++) { LU[i][k] /= LU[k][k]; for (int j = k+1; j < n; j++) {
* LU[i][j] -= LU[i][k]*LU[k][j]; } } } } }
*
* \* ------------------------ End of temporary code. ------------------------
*/
\* ------------------------
End of temporary code.
* ------------------------ */
/*
* ------------------------ Public Methods ------------------------
*/
/* ------------------------
Public Methods
* ------------------------ */
/**
* Is the matrix nonsingular?
*
* @return true if U, and hence A, is nonsingular.
*/
/** Is the matrix nonsingular?
@return true if U, and hence A, is nonsingular.
*/
public boolean isNonsingular() {
for (int j = 0; j < n; j++) {
if (LU[j][j] == 0)
return false;
}
return true;
}
public boolean isNonsingular () {
for (int j = 0; j < n; j++) {
if (LU[j][j] == 0)
return false;
}
return true;
}
/**
* Return lower triangular factor
*
* @return L
*/
/** Return lower triangular factor
@return L
*/
public Matrix getL() {
Matrix X = new Matrix(m, n);
double[][] L = X.getArray();
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
if (i > j) {
L[i][j] = LU[i][j];
} else if (i == j) {
L[i][j] = 1.0;
} else {
L[i][j] = 0.0;
}
}
}
return X;
}
public Matrix getL () {
Matrix X = new Matrix(m,n);
double[][] L = X.getArray();
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
if (i > j) {
L[i][j] = LU[i][j];
} else if (i == j) {
L[i][j] = 1.0;
} else {
L[i][j] = 0.0;
}
}
}
return X;
}
/**
* Return upper triangular factor
*
* @return U
*/
/** Return upper triangular factor
@return U
*/
public Matrix getU() {
Matrix X = new Matrix(n, n);
double[][] U = X.getArray();
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (i <= j) {
U[i][j] = LU[i][j];
} else {
U[i][j] = 0.0;
}
}
}
return X;
}
public Matrix getU () {
Matrix X = new Matrix(n,n);
double[][] U = X.getArray();
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (i <= j) {
U[i][j] = LU[i][j];
} else {
U[i][j] = 0.0;
}
}
}
return X;
}
/**
* Return pivot permutation vector
*
* @return piv
*/
/** Return pivot permutation vector
@return piv
*/
public int[] getPivot() {
int[] p = new int[m];
for (int i = 0; i < m; i++) {
p[i] = piv[i];
}
return p;
}
public int[] getPivot () {
int[] p = new int[m];
for (int i = 0; i < m; i++) {
p[i] = piv[i];
}
return p;
}
/**
* Return pivot permutation vector as a one-dimensional double array
*
* @return (double) piv
*/
/** Return pivot permutation vector as a one-dimensional double array
@return (double) piv
*/
public double[] getDoublePivot() {
double[] vals = new double[m];
for (int i = 0; i < m; i++) {
vals[i] = (double) piv[i];
}
return vals;
}
public double[] getDoublePivot () {
double[] vals = new double[m];
for (int i = 0; i < m; i++) {
vals[i] = (double) piv[i];
}
return vals;
}
/**
* Determinant
*
* @return det(A)
* @exception IllegalArgumentException
* Matrix must be square
*/
/** Determinant
@return det(A)
@exception IllegalArgumentException Matrix must be square
*/
public double det() {
if (m != n) {
throw new IllegalArgumentException("Matrix must be square.");
}
double d = (double) pivsign;
for (int j = 0; j < n; j++) {
d *= LU[j][j];
}
return d;
}
public double det () {
if (m != n) {
throw new IllegalArgumentException("Matrix must be square.");
}
double d = (double) pivsign;
for (int j = 0; j < n; j++) {
d *= LU[j][j];
}
return d;
}
/**
* Solve A*X = B
*
* @param B
* A Matrix with as many rows as A and any number of columns.
* @return X so that L*U*X = B(piv,:)
* @exception IllegalArgumentException
* Matrix row dimensions must agree.
* @exception RuntimeException
* Matrix is singular.
*/
/** Solve A*X = B
@param B A Matrix with as many rows as A and any number of columns.
@return X so that L*U*X = B(piv,:)
@exception IllegalArgumentException Matrix row dimensions must agree.
@exception RuntimeException Matrix is singular.
*/
public Matrix solve(Matrix B) {
if (B.getRowDimension() != m) {
throw new IllegalArgumentException("Matrix row dimensions must agree.");
}
if (!this.isNonsingular()) {
throw new RuntimeException("Matrix is singular.");
}
public Matrix solve (Matrix B) {
if (B.getRowDimension() != m) {
throw new IllegalArgumentException("Matrix row dimensions must agree.");
}
if (!this.isNonsingular()) {
throw new RuntimeException("Matrix is singular.");
}
// Copy right hand side with pivoting
int nx = B.getColumnDimension();
Matrix Xmat = B.getMatrix(piv, 0, nx - 1);
double[][] X = Xmat.getArray();
// Copy right hand side with pivoting
int nx = B.getColumnDimension();
Matrix Xmat = B.getMatrix(piv,0,nx-1);
double[][] X = Xmat.getArray();
// Solve L*Y = B(piv,:)
for (int k = 0; k < n; k++) {
for (int i = k+1; i < n; i++) {
for (int j = 0; j < nx; j++) {
X[i][j] -= X[k][j]*LU[i][k];
}
}
}
// Solve U*X = Y;
for (int k = n-1; k >= 0; k--) {
for (int j = 0; j < nx; j++) {
X[k][j] /= LU[k][k];
}
for (int i = 0; i < k; i++) {
for (int j = 0; j < nx; j++) {
X[i][j] -= X[k][j]*LU[i][k];
}
}
}
return Xmat;
}
// Solve L*Y = B(piv,:)
for (int k = 0; k < n; k++) {
for (int i = k + 1; i < n; i++) {
for (int j = 0; j < nx; j++) {
X[i][j] -= X[k][j] * LU[i][k];
}
}
}
// Solve U*X = Y;
for (int k = n - 1; k >= 0; k--) {
for (int j = 0; j < nx; j++) {
X[k][j] /= LU[k][k];
}
for (int i = 0; i < k; i++) {
for (int j = 0; j < nx; j++) {
X[i][j] -= X[k][j] * LU[i][k];
}
}
}
return Xmat;
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,218 +1,239 @@
package Jama;
import Jama.util.Maths;
/** QR Decomposition.
<P>
For an m-by-n matrix A with m >= n, the QR decomposition is an m-by-n
orthogonal matrix Q and an n-by-n upper triangular matrix R so that
A = Q*R.
<P>
The QR decompostion always exists, even if the matrix does not have
full rank, so the constructor will never fail. The primary use of the
QR decomposition is in the least squares solution of nonsquare systems
of simultaneous linear equations. This will fail if isFullRank()
returns false.
*/
/**
* QR Decomposition.
* <P>
* For an m-by-n matrix A with m >= n, the QR decomposition is an m-by-n orthogonal matrix Q and an n-by-n upper triangular matrix
* R so that A = Q*R.
* <P>
* The QR decompostion always exists, even if the matrix does not have full rank, so the constructor will never fail. The primary
* use of the QR decomposition is in the least squares solution of nonsquare systems of simultaneous linear equations. This will
* fail if isFullRank() returns false.
*/
public class QRDecomposition implements java.io.Serializable {
/* ------------------------
Class variables
* ------------------------ */
/*
* ------------------------ Class variables ------------------------
*/
/** Array for internal storage of decomposition.
@serial internal array storage.
*/
private double[][] QR;
/**
* Array for internal storage of decomposition.
*
* @serial internal array storage.
*/
private double[][] QR;
/** Row and column dimensions.
@serial column dimension.
@serial row dimension.
*/
private int m, n;
/**
* Row and column dimensions.
*
* @serial column dimension.
* @serial row dimension.
*/
private int m, n;
/** Array for internal storage of diagonal of R.
@serial diagonal of R.
*/
private double[] Rdiag;
/**
* Array for internal storage of diagonal of R.
*
* @serial diagonal of R.
*/
private double[] Rdiag;
/* ------------------------
Constructor
* ------------------------ */
/*
* ------------------------ Constructor ------------------------
*/
/** QR Decomposition, computed by Householder reflections.
@param A Rectangular matrix
@return Structure to access R and the Householder vectors and compute Q.
*/
/**
* QR Decomposition, computed by Householder reflections.
*
* @param A
* Rectangular matrix
* @return Structure to access R and the Householder vectors and compute Q.
*/
public QRDecomposition (Matrix A) {
// Initialize.
QR = A.getArrayCopy();
m = A.getRowDimension();
n = A.getColumnDimension();
Rdiag = new double[n];
public QRDecomposition(Matrix A) {
// Initialize.
QR = A.getArrayCopy();
m = A.getRowDimension();
n = A.getColumnDimension();
Rdiag = new double[n];
// Main loop.
for (int k = 0; k < n; k++) {
// Compute 2-norm of k-th column without under/overflow.
double nrm = 0;
for (int i = k; i < m; i++) {
nrm = Maths.hypot(nrm,QR[i][k]);
}
// Main loop.
for (int k = 0; k < n; k++) {
// Compute 2-norm of k-th column without under/overflow.
double nrm = 0;
for (int i = k; i < m; i++) {
nrm = Maths.hypot(nrm, QR[i][k]);
}
if (nrm != 0.0) {
// Form k-th Householder vector.
if (QR[k][k] < 0) {
nrm = -nrm;
}
for (int i = k; i < m; i++) {
QR[i][k] /= nrm;
}
QR[k][k] += 1.0;
if (nrm != 0.0) {
// Form k-th Householder vector.
if (QR[k][k] < 0) {
nrm = -nrm;
}
for (int i = k; i < m; i++) {
QR[i][k] /= nrm;
}
QR[k][k] += 1.0;
// Apply transformation to remaining columns.
for (int j = k+1; j < n; j++) {
double s = 0.0;
for (int i = k; i < m; i++) {
s += QR[i][k]*QR[i][j];
}
s = -s/QR[k][k];
for (int i = k; i < m; i++) {
QR[i][j] += s*QR[i][k];
}
}
}
Rdiag[k] = -nrm;
}
}
// Apply transformation to remaining columns.
for (int j = k + 1; j < n; j++) {
double s = 0.0;
for (int i = k; i < m; i++) {
s += QR[i][k] * QR[i][j];
}
s = -s / QR[k][k];
for (int i = k; i < m; i++) {
QR[i][j] += s * QR[i][k];
}
}
}
Rdiag[k] = -nrm;
}
}
/* ------------------------
Public Methods
* ------------------------ */
/*
* ------------------------ Public Methods ------------------------
*/
/** Is the matrix full rank?
@return true if R, and hence A, has full rank.
*/
/**
* Is the matrix full rank?
*
* @return true if R, and hence A, has full rank.
*/
public boolean isFullRank () {
for (int j = 0; j < n; j++) {
if (Rdiag[j] == 0)
return false;
}
return true;
}
public boolean isFullRank() {
for (int j = 0; j < n; j++) {
if (Rdiag[j] == 0)
return false;
}
return true;
}
/** Return the Householder vectors
@return Lower trapezoidal matrix whose columns define the reflections
*/
/**
* Return the Householder vectors
*
* @return Lower trapezoidal matrix whose columns define the reflections
*/
public Matrix getH () {
Matrix X = new Matrix(m,n);
double[][] H = X.getArray();
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
if (i >= j) {
H[i][j] = QR[i][j];
} else {
H[i][j] = 0.0;
}
}
}
return X;
}
public Matrix getH() {
Matrix X = new Matrix(m, n);
double[][] H = X.getArray();
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
if (i >= j) {
H[i][j] = QR[i][j];
} else {
H[i][j] = 0.0;
}
}
}
return X;
}
/** Return the upper triangular factor
@return R
*/
/**
* Return the upper triangular factor
*
* @return R
*/
public Matrix getR () {
Matrix X = new Matrix(n,n);
double[][] R = X.getArray();
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (i < j) {
R[i][j] = QR[i][j];
} else if (i == j) {
R[i][j] = Rdiag[i];
} else {
R[i][j] = 0.0;
}
}
}
return X;
}
public Matrix getR() {
Matrix X = new Matrix(n, n);
double[][] R = X.getArray();
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
if (i < j) {
R[i][j] = QR[i][j];
} else if (i == j) {
R[i][j] = Rdiag[i];
} else {
R[i][j] = 0.0;
}
}
}
return X;
}
/** Generate and return the (economy-sized) orthogonal factor
@return Q
*/
/**
* Generate and return the (economy-sized) orthogonal factor
*
* @return Q
*/
public Matrix getQ () {
Matrix X = new Matrix(m,n);
double[][] Q = X.getArray();
for (int k = n-1; k >= 0; k--) {
for (int i = 0; i < m; i++) {
Q[i][k] = 0.0;
}
Q[k][k] = 1.0;
for (int j = k; j < n; j++) {
if (QR[k][k] != 0) {
double s = 0.0;
for (int i = k; i < m; i++) {
s += QR[i][k]*Q[i][j];
}
s = -s/QR[k][k];
for (int i = k; i < m; i++) {
Q[i][j] += s*QR[i][k];
}
}
}
}
return X;
}
public Matrix getQ() {
Matrix X = new Matrix(m, n);
double[][] Q = X.getArray();
for (int k = n - 1; k >= 0; k--) {
for (int i = 0; i < m; i++) {
Q[i][k] = 0.0;
}
Q[k][k] = 1.0;
for (int j = k; j < n; j++) {
if (QR[k][k] != 0) {
double s = 0.0;
for (int i = k; i < m; i++) {
s += QR[i][k] * Q[i][j];
}
s = -s / QR[k][k];
for (int i = k; i < m; i++) {
Q[i][j] += s * QR[i][k];
}
}
}
}
return X;
}
/** Least squares solution of A*X = B
@param B A Matrix with as many rows as A and any number of columns.
@return X that minimizes the two norm of Q*R*X-B.
@exception IllegalArgumentException Matrix row dimensions must agree.
@exception RuntimeException Matrix is rank deficient.
*/
/**
* Least squares solution of A*X = B
*
* @param B
* A Matrix with as many rows as A and any number of columns.
* @return X that minimizes the two norm of Q*R*X-B.
* @exception IllegalArgumentException
* Matrix row dimensions must agree.
* @exception RuntimeException
* Matrix is rank deficient.
*/
public Matrix solve (Matrix B) {
if (B.getRowDimension() != m) {
throw new IllegalArgumentException("Matrix row dimensions must agree.");
}
if (!this.isFullRank()) {
throw new RuntimeException("Matrix is rank deficient.");
}
// Copy right hand side
int nx = B.getColumnDimension();
double[][] X = B.getArrayCopy();
public Matrix solve(Matrix B) {
if (B.getRowDimension() != m) {
throw new IllegalArgumentException("Matrix row dimensions must agree.");
}
if (!this.isFullRank()) {
throw new RuntimeException("Matrix is rank deficient.");
}
// Compute Y = transpose(Q)*B
for (int k = 0; k < n; k++) {
for (int j = 0; j < nx; j++) {
double s = 0.0;
for (int i = k; i < m; i++) {
s += QR[i][k]*X[i][j];
}
s = -s/QR[k][k];
for (int i = k; i < m; i++) {
X[i][j] += s*QR[i][k];
}
}
}
// Solve R*X = Y;
for (int k = n-1; k >= 0; k--) {
for (int j = 0; j < nx; j++) {
X[k][j] /= Rdiag[k];
}
for (int i = 0; i < k; i++) {
for (int j = 0; j < nx; j++) {
X[i][j] -= X[k][j]*QR[i][k];
}
}
}
return (new Matrix(X,n,nx).getMatrix(0,n-1,0,nx-1));
}
// Copy right hand side
int nx = B.getColumnDimension();
double[][] X = B.getArrayCopy();
// Compute Y = transpose(Q)*B
for (int k = 0; k < n; k++) {
for (int j = 0; j < nx; j++) {
double s = 0.0;
for (int i = k; i < m; i++) {
s += QR[i][k] * X[i][j];
}
s = -s / QR[k][k];
for (int i = k; i < m; i++) {
X[i][j] += s * QR[i][k];
}
}
}
// Solve R*X = Y;
for (int k = n - 1; k >= 0; k--) {
for (int j = 0; j < nx; j++) {
X[k][j] /= Rdiag[k];
}
for (int i = 0; i < k; i++) {
for (int j = 0; j < nx; j++) {
X[i][j] -= X[k][j] * QR[i][k];
}
}
}
return (new Matrix(X, n, nx).getMatrix(0, n - 1, 0, nx - 1));
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,162 +1,275 @@
package Jama.examples;
import java.util.Date; import Jama.EigenvalueDecomposition; import Jama.LUDecomposition; import Jama.Matrix; import Jama.QRDecomposition;
/** Example of use of Matrix Class, featuring magic squares. **/
public class MagicSquareExample {
/** Generate magic square test matrix. **/
public static Matrix magic (int n) {
double[][] M = new double[n][n];
// Odd order
if ((n % 2) == 1) {
int a = (n+1)/2;
int b = (n+1);
for (int j = 0; j < n; j++) {
for (int i = 0; i < n; i++) {
M[i][j] = n*((i+j+a) % n) + ((i+2*j+b) % n) + 1;
}
}
// Doubly Even Order
} else if ((n % 4) == 0) {
for (int j = 0; j < n; j++) {
for (int i = 0; i < n; i++) {
if (((i+1)/2)%2 == ((j+1)/2)%2) {
M[i][j] = n*n-n*i-j;
} else {
M[i][j] = n*i+j+1;
}
}
}
// Singly Even Order
} else {
int p = n/2;
int k = (n-2)/4;
Matrix A = magic(p);
for (int j = 0; j < p; j++) {
for (int i = 0; i < p; i++) {
double aij = A.get(i,j);
M[i][j] = aij;
M[i][j+p] = aij + 2*p*p;
M[i+p][j] = aij + 3*p*p;
M[i+p][j+p] = aij + p*p;
}
}
for (int i = 0; i < p; i++) {
for (int j = 0; j < k; j++) {
double t = M[i][j]; M[i][j] = M[i+p][j]; M[i+p][j] = t;
}
for (int j = n-k+1; j < n; j++) {
double t = M[i][j]; M[i][j] = M[i+p][j]; M[i+p][j] = t;
}
}
double t = M[k][0]; M[k][0] = M[k+p][0]; M[k+p][0] = t;
t = M[k][k]; M[k][k] = M[k+p][k]; M[k+p][k] = t;
}
return new Matrix(M);
}
/** Shorten spelling of print. **/
private static void print (String s) {
System.out.print(s);
}
/** Format double with Fw.d. **/
public static String fixedWidthDoubletoString (double x, int w, int d) {
java.text.DecimalFormat fmt = new java.text.DecimalFormat();
fmt.setMaximumFractionDigits(d);
fmt.setMinimumFractionDigits(d);
fmt.setGroupingUsed(false);
String s = fmt.format(x);
while (s.length() < w) {
s = " " + s;
}
return s;
}
/** Format integer with Iw. **/
public static String fixedWidthIntegertoString (int n, int w) {
String s = Integer.toString(n);
while (s.length() < w) {
s = " " + s;
}
return s;
}
public static void main (String argv[]) {
/*
| Tests LU, QR, SVD and symmetric Eig decompositions.
|
| n = order of magic square.
| trace = diagonal sum, should be the magic sum, (n^3 + n)/2.
| max_eig = maximum eigenvalue of (A + A')/2, should equal trace.
| rank = linear algebraic rank,
| should equal n if n is odd, be less than n if n is even.
| cond = L_2 condition number, ratio of singular values.
| lu_res = test of LU factorization, norm1(L*U-A(p,:))/(n*eps).
| qr_res = test of QR factorization, norm1(Q*R-A)/(n*eps).
*/
print("\n Test of Matrix Class, using magic squares.\n");
print(" See MagicSquareExample.main() for an explanation.\n");
print("\n n trace max_eig rank cond lu_res qr_res\n\n");
Date start_time = new Date();
double eps = Math.pow(2.0,-52.0);
for (int n = 3; n <= 32; n++) {
print(fixedWidthIntegertoString(n,7));
Matrix M = magic(n);
int t = (int) M.trace();
print(fixedWidthIntegertoString(t,10));
EigenvalueDecomposition E =
new EigenvalueDecomposition(M.plus(M.transpose()).times(0.5));
double[] d = E.getRealEigenvalues();
print(fixedWidthDoubletoString(d[n-1],14,3));
int r = M.rank();
print(fixedWidthIntegertoString(r,7));
double c = M.cond();
print(c < 1/eps ? fixedWidthDoubletoString(c,12,3) :
" Inf");
LUDecomposition LU = new LUDecomposition(M);
Matrix L = LU.getL();
Matrix U = LU.getU();
int[] p = LU.getPivot();
Matrix R = L.times(U).minus(M.getMatrix(p,0,n-1));
double res = R.norm1()/(n*eps);
print(fixedWidthDoubletoString(res,12,3));
QRDecomposition QR = new QRDecomposition(M);
Matrix Q = QR.getQ();
R = QR.getR();
R = Q.times(R).minus(M);
res = R.norm1()/(n*eps);
print(fixedWidthDoubletoString(res,12,3));
print("\n");
}
Date stop_time = new Date();
double etime = (stop_time.getTime() - start_time.getTime())/1000.;
print("\nElapsed Time = " +
fixedWidthDoubletoString(etime,12,3) + " seconds\n");
print("Adios\n");
}
}
package Jama.examples;
import java.util.Date;
import Jama.EigenvalueDecomposition;
import Jama.LUDecomposition;
import Jama.Matrix;
import Jama.QRDecomposition;
/** Example of use of Matrix Class, featuring magic squares. **/
public class MagicSquareExample {
/** Generate magic square test matrix. **/
public static Matrix magic(int n) {
double[][] M = new double[n][n];
// Odd order
if ((n % 2) == 1) {
int a = (n + 1) / 2;
int b = (n + 1);
for (int j = 0; j < n; j++) {
for (int i = 0; i < n; i++) {
M[i][j] = n * ((i + j + a) % n) + ((i + 2 * j + b) % n) + 1;
}
}
// Doubly Even Order
} else if ((n % 4) == 0) {
for (int j = 0; j < n; j++) {
for (int i = 0; i < n; i++) {
if (((i + 1) / 2) % 2 == ((j + 1) / 2) % 2) {
M[i][j] = n * n - n * i - j;
} else {
M[i][j] = n * i + j + 1;
}
}
}
// Singly Even Order
} else {
int p = n / 2;
int k = (n - 2) / 4;
Matrix A = magic(p);
for (int j = 0; j < p; j++) {
for (int i = 0; i < p; i++) {
double aij = A.get(i, j);
M[i][j] = aij;
M[i][j + p] = aij + 2 * p * p;
M[i + p][j] = aij + 3 * p * p;
M[i + p][j + p] = aij + p * p;
}
}
for (int i = 0; i < p; i++) {
for (int j = 0; j < k; j++) {
double t = M[i][j];
M[i][j] = M[i + p][j];
M[i + p][j] = t;
}
for (int j = n - k + 1; j < n; j++) {
double t = M[i][j];
M[i][j] = M[i + p][j];
M[i + p][j] = t;
}
}
double t = M[k][0];
M[k][0] = M[k + p][0];
M[k + p][0] = t;
t = M[k][k];
M[k][k] = M[k + p][k];
M[k + p][k] = t;
}
return new Matrix(M);
}
/** Shorten spelling of print. **/
private static void print(String s) {
System.out.print(s);
}
/** Format double with Fw.d. **/
public static String fixedWidthDoubletoString(double x, int w, int d) {
java.text.DecimalFormat fmt = new java.text.DecimalFormat();
fmt.setMaximumFractionDigits(d);
fmt.setMinimumFractionDigits(d);
fmt.setGroupingUsed(false);
String s = fmt.format(x);
while (s.length() < w) {
s = " " + s;
}
return s;
}
/** Format integer with Iw. **/
public static String fixedWidthIntegertoString(int n, int w) {
String s = Integer.toString(n);
while (s.length() < w) {
s = " " + s;
}
return s;
}
public static void main(String argv[]) {
/*
*
* | Tests LU, QR, SVD and symmetric Eig decompositions.
*
* |
*
* | n = order of magic square.
*
* | trace = diagonal sum, should be the magic sum, (n^3 + n)/2.
*
* | max_eig = maximum eigenvalue of (A + A')/2, should equal trace.
*
* | rank = linear algebraic rank,
*
* | should equal n if n is odd, be less than n if n is even.
*
* | cond = L_2 condition number, ratio of singular values.
*
* | lu_res = test of LU factorization, norm1(L*U-A(p,:))/(n*eps).
*
* | qr_res = test of QR factorization, norm1(Q*R-A)/(n*eps).
*/
print("\n Test of Matrix Class, using magic squares.\n");
print(" See MagicSquareExample.main() for an explanation.\n");
print("\n n trace max_eig rank cond lu_res qr_res\n\n");
Date start_time = new Date();
double eps = Math.pow(2.0, -52.0);
for (int n = 3; n <= 32; n++) {
print(fixedWidthIntegertoString(n, 7));
Matrix M = magic(n);
int t = (int) M.trace();
print(fixedWidthIntegertoString(t, 10));
EigenvalueDecomposition E =
new EigenvalueDecomposition(M.plus(M.transpose()).times(0.5));
double[] d = E.getRealEigenvalues();
print(fixedWidthDoubletoString(d[n - 1], 14, 3));
int r = M.rank();
print(fixedWidthIntegertoString(r, 7));
double c = M.cond();
print(c < 1 / eps ? fixedWidthDoubletoString(c, 12, 3) :
" Inf");
LUDecomposition LU = new LUDecomposition(M);
Matrix L = LU.getL();
Matrix U = LU.getU();
int[] p = LU.getPivot();
Matrix R = L.times(U).minus(M.getMatrix(p, 0, n - 1));
double res = R.norm1() / (n * eps);
print(fixedWidthDoubletoString(res, 12, 3));
QRDecomposition QR = new QRDecomposition(M);
Matrix Q = QR.getQ();
R = QR.getR();
R = Q.times(R).minus(M);
res = R.norm1() / (n * eps);
print(fixedWidthDoubletoString(res, 12, 3));
print("\n");
}
Date stop_time = new Date();
double etime = (stop_time.getTime() - start_time.getTime()) / 1000.;
print("\nElapsed Time = " +
fixedWidthDoubletoString(etime, 12, 3) + " seconds\n");
print("Adios\n");
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,20 +1,33 @@
package Jama.util;
public class Maths {
/** sqrt(a^2 + b^2) without under/overflow. **/
public static double hypot(double a, double b) {
double r;
if (Math.abs(a) > Math.abs(b)) {
r = b/a;
r = Math.abs(a)*Math.sqrt(1+r*r);
} else if (b != 0) {
r = a/b;
r = Math.abs(b)*Math.sqrt(1+r*r);
} else {
r = 0.0;
}
return r;
}
}
package Jama.util;
public class Maths {
/** sqrt(a^2 + b^2) without under/overflow. **/
public static double hypot(double a, double b) {
double r;
if (Math.abs(a) > Math.abs(b)) {
r = b / a;
r = Math.abs(a) * Math.sqrt(1 + r * r);
} else if (b != 0) {
r = a / b;
r = Math.abs(b) * Math.sqrt(1 + r * r);
} else {
r = 0.0;
}
return r;
}
}

Просмотреть файл

@ -1,75 +1,70 @@
package Jampack;
/**
Block contains a static method for partitioning a matrix.
into a block matrix.
* Block contains a static method for partitioning a matrix. into a block matrix.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Block {
public class Block{
/**
* This method takes a Zmat A and two arrays ii and jj of length m and n and produces an (m-1)x(n-1) block matrix
* Zmat[m-1][n-1], whose (i,j)-element is A.get(ii[i], ii[i+1]-1, jj[j], jj[j+1]-1). Throws a JampackException if
*
* @param A
* The matrix to be partitioned
* @param ii
* [] The row indices of the partition
* @param jj
* [] The column indices of the partition
* @return The block Zmat
* @exception JampackException
* Thrown if the submatrices are not conformable.
*/
public static Zmat[][] o(Zmat A, int ii[], int jj[]) throws JampackException {
/**
This method takes a Zmat A and two arrays ii and jj of length m and
n and produces an (m-1)x(n-1) block matrix Zmat[m-1][n-1], whose
(i,j)-element is A.get(ii[i], ii[i+1]-1, jj[j], jj[j+1]-1).
Throws a JampackException if
int i, j;
@param A The matrix to be partitioned
@param ii[] The row indices of the partition
@param jj[] The column indices of the partition
@return The block Zmat
@exception JampackException
Thrown if the submatrices are not conformable.
*/
public static Zmat[][] o(Zmat A, int ii[], int jj[])
throws JampackException
{
A.getProperties();
int i, j;
int m = ii.length;
int n = jj.length;
A.getProperties();
/* Check the row indices */
int m = ii.length;
int n = jj.length;
if (ii[0] < A.bx || ii[m - 1] > A.rx + 1) {
throw new JampackException("Illegal row array.");
}
for (i = 1; i < m; i++) {
if (ii[i - 1] >= ii[i]) {
throw new JampackException("Illegal row array.");
}
}
/* Check the row indices */
/* Check the column indices */
if (ii[0] < A.bx || ii[m-1]>A.rx+1){
throw new JampackException
("Illegal row array.");
}
for (i=1; i<m; i++){
if (ii[i-1]>=ii[i]){
throw new JampackException
("Illegal row array.");
}
}
if (jj[0] < A.bx || jj[n - 1] > A.cx + 1) {
throw new JampackException("Illegal column array.");
}
for (j = 1; j < n; j++) {
if (jj[j - 1] >= jj[j]) {
throw new JampackException("Illegal column array.");
}
}
/* Check the column indices */
/*
* Create and fill the block matrix with the parition of A.
*/
if (jj[0] < A.bx || jj[n-1]>A.cx+1){
throw new JampackException
("Illegal column array.");
}
for (j=1; j<n; j++){
if (jj[j-1]>=jj[j]){
throw new JampackException
("Illegal column array.");
}
}
Zmat B[][] = new Zmat[m - 1][n - 1];
/* Create and fill the block matrix with the parition
of A. */
Zmat B[][] = new Zmat[m-1][n-1];
for (i=0; i<m-1; i++){
for (j=0; j<n-1; j++){
B[i][j] = A.get(ii[i], ii[i+1]-1, jj[j], jj[j+1]-1);
}
}
return B;
}
for (i = 0; i < m - 1; i++) {
for (j = 0; j < n - 1; j++) {
B[i][j] = A.get(ii[i], ii[i + 1] - 1, jj[j], jj[j + 1] - 1);
}
}
return B;
}
}

Просмотреть файл

@ -1,31 +1,28 @@
package Jampack;
class BlockTest{
class BlockTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(1);
Parameters.setBaseIndex(1);
Zmat A = new Zmat(10,9);
Zmat A = new Zmat(10, 9);
int bx = A.bx;
int rx = A.rx;
int cx = A.cx;
int bx = A.bx;
int rx = A.rx;
int cx = A.cx;
for (int i=bx; i<=rx; i++){
for (int j=bx; j<=cx; j++){
A.put(i, j, new Z(i,j));
}
}
for (int i = bx; i <= rx; i++) {
for (int j = bx; j <= cx; j++) {
A.put(i, j, new Z(i, j));
}
}
int ii[] = { A.bx, 5, 6, A.rx + 1 };
int jj[] = { A.bx, 3, 4, A.cx + 1 };
int ii[] = {A.bx, 5, 6, A.rx+1};
int jj[] = {A.bx, 3, 4, A.cx+1};
Zmat B[][] = Block.o(A, new int[] { A.bx, 5, 6, A.rx + 1 }, new int[] { A.bx, 4, 7, A.cx + 1 });
Zmat B[][] = Block.o(A, new int[] {A.bx, 5, 6, A.rx+1},
new int[] {A.bx, 4, 7, A.cx+1});
Print.o(Norm.fro(Minus.o(A, Merge.o(B))));
}
Print.o(Norm.fro(Minus.o(A, Merge.o(B))));
}
}

Просмотреть файл

@ -1,111 +1,104 @@
package Jampack;
/**
Eig implements the eigenvalue-vector decomposition of
of a square matrix. Specifically given a diagonalizable
matrix A, there is a matrix nonsingular matrix X such that
<pre>
* D = X<sup>-1</sup> AX
</pre>
is diagonal. The columns of X are eigenvectors of A corresponding
to the diagonal elements of D. Eig implements X as a Zmat and
D as a Zdiagmat.
<p>
Warning: if A is defective rounding error will allow Eig to
compute a set of eigevectors. However, the matrix X will
be ill conditioned.
@version Pre-alpha
@author G. W. Stewart
*/
* Eig implements the eigenvalue-vector decomposition of of a square matrix. Specifically given a diagonalizable matrix A, there
* is a matrix nonsingular matrix X such that
*
* <pre>
* D = X<sup>-1</sup> AX
* </pre>
*
* is diagonal. The columns of X are eigenvectors of A corresponding to the diagonal elements of D. Eig implements X as a Zmat and
* D as a Zdiagmat.
* <p>
* Warning: if A is defective rounding error will allow Eig to compute a set of eigevectors. However, the matrix X will be ill
* conditioned.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
public class Eig{
public class Eig {
/** The matrix of eigevectors */
public Zmat X;
/** The matrix of eigevectors */
public Zmat X;
/** The diagonal matrix of eigenvalues */
public Zdiagmat D;
/** The diagonal matrix of eigenvalues */
public Zdiagmat D;
/**
Creates an eigenvalue-vector decomposition of a square matrix A.
/**
* Creates an eigenvalue-vector decomposition of a square matrix A.
*
* @param A
* The matrix whose decomposition is to be computed
* @exception JampackException
* Thrown if A is not square. <br>
* Passed from below.
*/
@param A The matrix whose decomposition is to be
computed
@exception JampackException
Thrown if A is not square. <br>
Passed from below.
*/
public Eig(Zmat A) throws JampackException {
public Eig(Zmat A)
throws JampackException{
int i, j, k;
double norm, scale;
Z z, d;
int i, j, k;
double norm, scale;
Z z, d;
A.getProperties();
A.getProperties();
if (A.nr != A.nc) {
throw new JampackException("Matrix not square.");
}
if (A.nr != A.nc){
throw new JampackException
("Matrix not square.");
}
int n = A.nr;
int n = A.nr;
/* Compute the Schur decomposition of $A$ and set up T and D. */
/* Compute the Schur decomposition of $A$ and set up T and D. */
Schur S = new Schur(A);
Schur S = new Schur(A);
Zmat T = S.T;
D = new Zdiagmat(T);
Zmat T = S.T;
norm = Norm.fro(A);
D = new Zdiagmat(T);
X = new Zmat(n, n);
norm = Norm.fro(A);
/* Compute the eigevectors of T */
X = new Zmat(n, n);
for (k = n - 1; k >= 0; k--) {
/* Compute the eigevectors of T */
d = T.get0(k, k);
for (k=n-1; k>=0; k--){
X.re[k][k] = 1.0;
X.im[k][k] = 0.0;
d = T.get0(k, k);
for (i = k - 1; i >= 0; i--) {
X.re[k][k] = 1.0;
X.im[k][k] = 0.0;
X.re[i][k] = -T.re[i][k];
X.im[i][k] = -T.im[i][k];
for (i=k-1; i>=0; i--){
for (j = i + 1; j < k; j++) {
X.re[i][k] = -T.re[i][k];
X.im[i][k] = -T.im[i][k];
X.re[i][k] = X.re[i][k] - T.re[i][j] * X.re[j][k] + T.im[i][j] * X.im[j][k];
X.im[i][k] = X.im[i][k] - T.re[i][j] * X.im[j][k] - T.im[i][j] * X.re[j][k];
}
for(j=i+1; j<k; j++){
z = T.get0(i, i);
z.Minus(z, d);
if (z.re == 0.0 && z.im == 0.0) { // perturb zero diagonal
z.re = 1.0e-16 * norm; // to avoid division by zero
}
z.Div(X.get0(i, k), z);
X.put0(i, k, z);
}
X.re[i][k] = X.re[i][k] - T.re[i][j]*X.re[j][k]
+ T.im[i][j]*X.im[j][k];
X.im[i][k] = X.im[i][k] - T.re[i][j]*X.im[j][k]
- T.im[i][j]*X.re[j][k];
}
/* Scale the vector so its norm is one. */
z = T.get0(i,i);
z.Minus(z, d);
if (z.re==0.0 && z.im==0.0){ // perturb zero diagonal
z.re = 1.0e-16*norm; // to avoid division by zero
}
z.Div(X.get0(i,k), z);
X.put0(i, k, z);
}
/* Scale the vector so its norm is one. */
scale = 1.0/Norm.fro(X, X.bx, X.rx, X.bx+k, X.bx+k);
for (i=0; i<X.nr; i++){
X.re[i][k] = scale*X.re[i][k];
X.im[i][k] = scale*X.im[i][k];
}
}
X = Times.o(S.U, X);
}
scale = 1.0 / Norm.fro(X, X.bx, X.rx, X.bx + k, X.bx + k);
for (i = 0; i < X.nr; i++) {
X.re[i][k] = scale * X.re[i][k];
X.im[i][k] = scale * X.im[i][k];
}
}
X = Times.o(S.U, X);
}
}

Просмотреть файл

@ -1,26 +1,25 @@
package Jampack;
class EigTest{
class EigTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(1);
Parameters.setBaseIndex(1);
int n = 5;
Z z = new Z();
Zmat A = new Zmat(n,n);
for (int i=A.bx; i<=A.rx; i++){
for (int j=A.bx; j<=A.cx; j++){
A.put(i, j, new Z(i+j, i-2*j));
}
A.put(i,i, new Z(2*n, 2*n));
}
int n = 5;
Z z = new Z();
Zmat A = new Zmat(n, n);
Eig B = new Eig(A);
Print.o(B.D);
Print.o(new Zdiagmat(Times.o(H.o(B.X),B.X)));
Print.o(Norm.fro(Minus.o(Times.o(A, B.X), Times.o(B.X, B.D))));
}
for (int i = A.bx; i <= A.rx; i++) {
for (int j = A.bx; j <= A.cx; j++) {
A.put(i, j, new Z(i + j, i - 2 * j));
}
A.put(i, i, new Z(2 * n, 2 * n));
}
Eig B = new Eig(A);
Print.o(B.D);
Print.o(new Zdiagmat(Times.o(H.o(B.X), B.X)));
Print.o(Norm.fro(Minus.o(Times.o(A, B.X), Times.o(B.X, B.D))));
}
}

Просмотреть файл

@ -1,36 +1,36 @@
package Jampack;
/**
Eye generates a matrix whose diagonal elements are one
and whose off diagonal elements are zero.
*/
* Eye generates a matrix whose diagonal elements are one and whose off diagonal elements are zero.
*/
public class Eye{
/**
Generates an identity matrix of order <tt>n</tt>.
@param <tt>n</tt> The order of the matrx
*/
public static Zmat o(int n){
return o(n, n);
}
public class Eye {
/**
Generates an <tt>mxn</tt> matrix whose diagonal elements are
one and whose off diagonal elements are zero.
@param <tt>m</tt> The number of rows in the matrix
@param <tt>n</tt> The number of columns in the matrix
*/
public static Zmat o(int m, int n){
/**
* Generates an identity matrix of order <tt>n</tt>.
*
* @param <tt>n</tt> The order of the matrx
*/
public static Zmat o(int n) {
return o(n, n);
}
Zmat I = new Zmat(m, n);
/**
* Generates an <tt>mxn</tt> matrix whose diagonal elements are one and whose off diagonal elements are zero.
*
* @param <tt>m</tt> The number of rows in the matrix
* @param <tt>n</tt> The number of columns in the matrix
*/
public static Zmat o(int m, int n) {
for (int i=0; i<Math.min(m, n); i++){
I.re[i][i] = 1;
I.im[i][i] = 0;
}
Zmat I = new Zmat(m, n);
return I;
}
for (int i = 0; i < Math.min(m, n); i++) {
I.re[i][i] = 1;
I.im[i][i] = 0;
}
return I;
}
}

Просмотреть файл

@ -1,62 +1,64 @@
package Jampack;
/**
H computes the conjugate transpose of a matrix
and the transpose of a complex matrix.
* H computes the conjugate transpose of a matrix and the transpose of a complex matrix.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class H {
public class H{
/**
* Returns the conjugate transpose of a Zmat.
*
* @param A
* The matrix to be conjugated and transposed
* @return The conjugate transpose of A
*/
public static Zmat o(Zmat A) {
/**
Returns the conjugate transpose of a Zmat.
@param A The matrix to be conjugated and transposed
@return The conjugate transpose of A
*/
public static Zmat o(Zmat A)
{
Zmat Ah = new Zmat(A.nc, A.nr);
for (int i = 0; i < A.nr; i++)
for (int j = 0; j < A.nc; j++) {
Ah.re[j][i] = A.re[i][j];
Ah.im[j][i] = -A.im[i][j];
}
return Ah;
}
Zmat Ah = new Zmat(A.nc, A.nr);
for (int i=0; i<A.nr; i++)
for (int j=0; j<A.nc; j++){
Ah.re[j][i] = A.re[i][j];
Ah.im[j][i] = -A.im[i][j];
}
return Ah;
}
/**
* Returns the conjugate transpose of a Zdiagmat.
*
* @param D
* The matrix to be conjugated (and transposed)
* @return The conjugate transpose of D
*/
/**
Returns the conjugate transpose of a Zdiagmat.
@param D The matrix to be conjugated (and transposed)
@return The conjugate transpose of D
*/
public static Zdiagmat o(Zdiagmat D) {
public static Zdiagmat o(Zdiagmat D){
Zdiagmat Dh = new Zdiagmat(D);
for (int i = 0; i < Dh.n; i++) {
Dh.im[i] = -Dh.im[i];
}
return Dh;
}
Zdiagmat Dh = new Zdiagmat(D);
for (int i=0; i<Dh.n; i++){
Dh.im[i] = -Dh.im[i];
}
return Dh;
}
/**
* Returns the transpose of a Zmat.
*
* @param A
* The matrix to be transposed
* @return The transpose of A
*/
public static Zmat trans(Zmat A) {
/**
Returns the transpose of a Zmat.
@param A The matrix to be transposed
@return The transpose of A
*/
public static Zmat trans(Zmat A)
{
Zmat Ah = new Zmat(A.nc, A.nr);
for (int i=0; i<A.nr; i++)
for (int j=0; j<A.nc; j++){
Ah.re[j][i] = A.re[i][j];
Ah.im[j][i] = A.im[i][j];
}
return Ah;
}
Zmat Ah = new Zmat(A.nc, A.nr);
for (int i = 0; i < A.nr; i++)
for (int j = 0; j < A.nc; j++) {
Ah.re[j][i] = A.re[i][j];
Ah.im[j][i] = A.im[i][j];
}
return Ah;
}
}

Просмотреть файл

@ -1,378 +1,343 @@
package Jampack;
/**
House provides static methods to generate and apply Householder
transformations.
@version Pre-alpha
@author G. W. Stewart
*/
public class House{
/**
Generates a Householder transformation from within the part of
column c of a Zmat (altered) extending from rows
r1 to r2. The method overwrites the
column with the result of applying the transformation.
@param A The matrix from which the transformation
is to be generated (altered)
@param r1 The index of the row in which the generating column
begins
@param r2 The index of the row in which the generating column
ends
@param c The index of the generating column
@return A Z1 of length r2-r1+1
containing the Householder vector
@exception JampackException
Passed from below.
*/
public static Z1 genc(Zmat A, int r1, int r2, int c)
throws JampackException{
int i, ru;
double norm;
double s;
Z scale;
Z t = new Z();
Z t1 = new Z();
c = c - A.basex;
r1 = r1 - A.basex;
r2 = r2 - A.basex;
ru = r2-r1+1;
Z1 u = new Z1(r2-r1+1);
for (i=r1; i<=r2; i++){
u.put(i-r1, A.re[i][c], A.im[i][c]) ;
A.re[i][c] = 0.0;
A.im[i][c] = 0.0;
}
norm = Norm.fro(u);
if (r1 == r2 || norm == 0){
A.re[r1][c] = -u.re[0];
A.im[r1][c] = -u.im[0];
u.put(0, Math.sqrt(2), 0);
return u;
}
scale = new Z(1/norm, 0);
if (u.re[0] != 0 || u.im[0] != 0){
t = u.get(0);
scale.Times(scale, t.Div(t1.Conj(t), Z.abs(t)));
}
A.put(r1+A.basex, c+A.basex, t.Minus(t.Div(Z.ONE, scale)));
for (i=0; i<ru; i++){
u.Times(i, scale);
}
u.re[0] = u.re[0] + 1;
u.im[0] = 0;
s = Math.sqrt(1/u.re[0]);
for (i=0; i<ru; i++){
u.re[i] = s*u.re[i];
u.im[i] = s*u.im[i];
}
return u;
}
/**
Generates a Householder transformation from within the part of row
r of a Zmat (altered) extending from columns c1 to
c2. The method overwrites the row with the result
of applying the transformation.
@param A The matrix from which the transformation
is to be generated (altered)
@param r The index of the generating row
@param c1 The index of the column in which the generating row
begins
@param c2 The index of the column in which the generating row
ends
@return A Z1 of length r2-r1+1
containing the Householder vector
@exception JampackException
Passed from below.
*/
public static Z1 genr(Zmat A, int r, int c1, int c2)
throws JampackException{
int j, cu;
double norm, s;
Z scale;
Z t = new Z();
Z t1 = new Z();
r = r - A.basex;
c1 = c1 - A.basex;
c2 = c2 - A.basex;
cu = c2-c1+1;
Z1 u = new Z1(cu);
for (j=c1; j<=c2; j++){
u.put(j-c1, A.re[r][j], A.im[r][j]);
A.re[r][j] = 0.0;
A.im[r][j] = 0.0;
}
norm = Norm.fro(u);
if (c1 == c2 || norm == 0){
A.re[r][c1] = -u.re[0];
A.im[r][c1] = -u.im[0];
u.put(0, Math.sqrt(2), 0);
return u;
}
scale = new Z(1/norm, 0);
if (u.re[0] != 0 || u.im[0] != 0){
t = u.get(0);
scale.Times(scale, t.Div(t1.Conj(t), Z.abs(t)));
}
A.put(r+A.basex, c1+A.basex, t.Minus(t.Div(Z.ONE, scale)));
for (j=0; j<cu; j++){
u.Times(j, scale);
}
u.re[0] = u.re[0] + 1;
u.im[0] = 0;
s = Math.sqrt(1/u.re[0]);
for (j=0; j<cu; j++){
u.re[j] = s*u.re[j];
u.im[j] = -s*u.im[j];
}
return u;
}
/**
Premultiplies the Householder transformation contained in a
Z1 into a Zmat A[r1:r2,c1:c2] and overwrites
Zmat A[r1:r2,c1:c2] with the results. If r1 &gt; r2
or c1 &gt; c2 the method does nothing.
@param u The Householder vector
@param A The Zmat to which the transformation
is to be applied (altered)
@param r1 The index of the first row to which the transformation
is to be applied
@param r2 The index of the last row to which the transformation
is to be applied
@param c1 The index of the first column to which the transformation
is index of the to be applied
@param c2 The index of the last column to which the transformation
is to be applied
@param v A work array of length at least c2-c1+1
@return The transformed Zmat A
@exception JampackException
Thrown if either u or v is too short.
*/
public static Zmat ua(Z1 u, Zmat A, int r1, int r2, int c1, int c2, Z1 v)
throws JampackException{
int i, j, ru;
if (r2 < r1 || c2 < c1){
return A;
}
if (r2-r1+1 > u.n){
throw new JampackException
("Householder vector too short.");
}
if (c2-c1+1 > v.n){
throw new JampackException
("Work vector too short.");
}
A.dirty = true;
r1 = r1 - A.basex;
r2 = r2 - A.basex;
c1 = c1 - A.basex;
c2 = c2 - A.basex;
for (j=c1; j<=c2; j++){
v.re[j-c1] = 0;
v.im[j-c1] = 0;
}
for (i=r1; i<=r2; i++){
for (j=c1; j<=c2; j++){
v.re[j-c1] = v.re[j-c1] +
u.re[i-r1]*A.re[i][j] + u.im[i-r1]*A.im[i][j];
v.im[j-c1] = v.im[j-c1] +
u.re[i-r1]*A.im[i][j] - u.im[i-r1]*A.re[i][j];
}
}
for (i=r1; i<=r2; i++){
for (j=c1; j<=c2; j++){
A.re[i][j] = A.re[i][j] -
u.re[i-r1]*v.re[j-c1] + u.im[i-r1]*v.im[j-c1];
A.im[i][j] = A.im[i][j] -
u.re[i-r1]*v.im[j-c1] - u.im[i-r1]*v.re[j-c1];
}
}
return A;
}
/**
Premultiplies the Householder transformation contained in a
Z1 into a Zmat A[r1:r2,c1:c2] and overwrites
Zmat A[r1:r2,c1:c2] with the results. If r1 &gt; r2
or c1 &gt; c2 the method does nothing.
@param u The Householder vector
@param A The Zmat to which the transformation
is to be applied (altered)
@param r1 The index of the first row to which the transformation
is to be applied
@param r2 The index of the last row to which the transformation
is to be applied
@param c1 The index of the first column to which the transformation
is index of the to be applied
@param c2 The index of the last column to which the transformation
is to be applied
@return The transformed Zmat A
@exception JampackException
Passed from below.
*/
public static Zmat ua(Z1 u, Zmat A, int r1, int r2, int c1, int c2)
throws JampackException{
if (c1 > c2){
return A;
}
return ua(u, A, r1, r2, c1, c2, new Z1(c2-c1+1));
}
/**
Postmultiplies the Householder transformation contained in a
Z1 into a Zmat A[r1:r2,c1:c2] and overwrites
Zmat A[r1:r2,c1:c2] with the results. If r1 &gt; r2
or c1 &gt; c2 the method does nothing.
@param u The Householder vector
@param A The Zmat to which the transformation
is to be applied (altered)
@param r1 The index of the first row to which the transformation
is to be applied
@param r2 The index of the last row to which the transformation
is to be applied
@param c1 The index of the first column to which the transformation
is index of the to be applied
@param c2 The index of the last column to which the transformation
is to be applied
@param v A work array of length at least c2-c1+1
@return The transformed Zmat A
@exception JampackException
Thrown if either u or v is too short.
*/
public static Zmat au(Zmat A, Z1 u, int r1, int r2, int c1, int c2, Z1 v)
throws JampackException{
int i, j, cu;
if(r2 < r1 || c2 < c1){
return A;
}
if (c2-c1+1 > u.n){
throw new JampackException
("Householder vector too short.");
}
if (r2-r1+1 > v.n){
throw new JampackException
("Work vector too short.");
}
A.dirty = true;
r1 = r1 - A.basex;
r2 = r2 - A.basex;
c1 = c1 - A.basex;
c2 = c2 - A.basex;
for (i=r1; i<=r2; i++){
v.put(i-r1, 0, 0);
for (j=c1; j<=c2; j++){
v.re[i-r1] = v.re[i-r1] +
A.re[i][j]*u.re[j-c1] - A.im[i][j]*u.im[j-c1];
v.im[i-r1] = v.im[i-r1] +
A.re[i][j]*u.im[j-c1] + A.im[i][j]*u.re[j-c1];
}
}
for (i=r1; i<=r2; i++){
for (j=c1; j<=c2; j++){
A.re[i][j] = A.re[i][j] -
v.re[i-r1]*u.re[j-c1] - v.im[i-r1]*u.im[j-c1];
A.im[i][j] = A.im[i][j] +
v.re[i-r1]*u.im[j-c1] - v.im[i-r1]*u.re[j-c1];
}
}
return A;
}
/**
Postmultiplies the Householder transformation contained in a
Z1 into a Zmat A[r1:r2,c1:c2] and overwrites
Zmat A[r1:r2,c1:c2] with the results. If r1 &gt; r2
or c1 &gt; c2 the method does nothing.
@param u The Householder vector
@param A The Zmat to which the transformation
is to be applied (altered)
@param r1 The index of the first row to which the transformation
is to be applied
@param r2 The index of the last row to which the transformation
is to be applied
@param c1 The index of the first column to which the transformation
is index of the to be applied
@param c2 The index of the last column to which the transformation
is to be applied
@return The transformed Zmat A
@exception JampackException
Passed from below.
*/
public static Zmat au(Zmat A, Z1 u, int r1, int r2, int c1, int c2)
throws JampackException{
if(r2 < r1){
return A;
}
return au(A, u, r1, r2, c1, c2, new Z1(r2-r1+1));
}
* House provides static methods to generate and apply Householder transformations.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
public class House {
/**
* Generates a Householder transformation from within the part of column c of a Zmat (altered) extending from rows r1 to r2.
* The method overwrites the column with the result of applying the transformation.
*
* @param A
* The matrix from which the transformation is to be generated (altered)
* @param r1
* The index of the row in which the generating column begins
* @param r2
* The index of the row in which the generating column ends
* @param c
* The index of the generating column
* @return A Z1 of length r2-r1+1 containing the Householder vector
* @exception JampackException
* Passed from below.
*/
public static Z1 genc(Zmat A, int r1, int r2, int c) throws JampackException {
int i, ru;
double norm;
double s;
Z scale;
Z t = new Z();
Z t1 = new Z();
c = c - A.basex;
r1 = r1 - A.basex;
r2 = r2 - A.basex;
ru = r2 - r1 + 1;
Z1 u = new Z1(r2 - r1 + 1);
for (i = r1; i <= r2; i++) {
u.put(i - r1, A.re[i][c], A.im[i][c]);
A.re[i][c] = 0.0;
A.im[i][c] = 0.0;
}
norm = Norm.fro(u);
if (r1 == r2 || norm == 0) {
A.re[r1][c] = -u.re[0];
A.im[r1][c] = -u.im[0];
u.put(0, Math.sqrt(2), 0);
return u;
}
scale = new Z(1 / norm, 0);
if (u.re[0] != 0 || u.im[0] != 0) {
t = u.get(0);
scale.Times(scale, t.Div(t1.Conj(t), Z.abs(t)));
}
A.put(r1 + A.basex, c + A.basex, t.Minus(t.Div(Z.ONE, scale)));
for (i = 0; i < ru; i++) {
u.Times(i, scale);
}
u.re[0] = u.re[0] + 1;
u.im[0] = 0;
s = Math.sqrt(1 / u.re[0]);
for (i = 0; i < ru; i++) {
u.re[i] = s * u.re[i];
u.im[i] = s * u.im[i];
}
return u;
}
/**
* Generates a Householder transformation from within the part of row r of a Zmat (altered) extending from columns c1 to c2.
* The method overwrites the row with the result of applying the transformation.
*
* @param A
* The matrix from which the transformation is to be generated (altered)
* @param r
* The index of the generating row
* @param c1
* The index of the column in which the generating row begins
* @param c2
* The index of the column in which the generating row ends
* @return A Z1 of length r2-r1+1 containing the Householder vector
* @exception JampackException
* Passed from below.
*/
public static Z1 genr(Zmat A, int r, int c1, int c2) throws JampackException {
int j, cu;
double norm, s;
Z scale;
Z t = new Z();
Z t1 = new Z();
r = r - A.basex;
c1 = c1 - A.basex;
c2 = c2 - A.basex;
cu = c2 - c1 + 1;
Z1 u = new Z1(cu);
for (j = c1; j <= c2; j++) {
u.put(j - c1, A.re[r][j], A.im[r][j]);
A.re[r][j] = 0.0;
A.im[r][j] = 0.0;
}
norm = Norm.fro(u);
if (c1 == c2 || norm == 0) {
A.re[r][c1] = -u.re[0];
A.im[r][c1] = -u.im[0];
u.put(0, Math.sqrt(2), 0);
return u;
}
scale = new Z(1 / norm, 0);
if (u.re[0] != 0 || u.im[0] != 0) {
t = u.get(0);
scale.Times(scale, t.Div(t1.Conj(t), Z.abs(t)));
}
A.put(r + A.basex, c1 + A.basex, t.Minus(t.Div(Z.ONE, scale)));
for (j = 0; j < cu; j++) {
u.Times(j, scale);
}
u.re[0] = u.re[0] + 1;
u.im[0] = 0;
s = Math.sqrt(1 / u.re[0]);
for (j = 0; j < cu; j++) {
u.re[j] = s * u.re[j];
u.im[j] = -s * u.im[j];
}
return u;
}
/**
* Premultiplies the Householder transformation contained in a Z1 into a Zmat A[r1:r2,c1:c2] and overwrites Zmat
* A[r1:r2,c1:c2] with the results. If r1 &gt; r2 or c1 &gt; c2 the method does nothing.
*
* @param u
* The Householder vector
* @param A
* The Zmat to which the transformation is to be applied (altered)
* @param r1
* The index of the first row to which the transformation is to be applied
* @param r2
* The index of the last row to which the transformation is to be applied
* @param c1
* The index of the first column to which the transformation is index of the to be applied
* @param c2
* The index of the last column to which the transformation is to be applied
* @param v
* A work array of length at least c2-c1+1
* @return The transformed Zmat A
* @exception JampackException
* Thrown if either u or v is too short.
*/
public static Zmat ua(Z1 u, Zmat A, int r1, int r2, int c1, int c2, Z1 v) throws JampackException {
int i, j, ru;
if (r2 < r1 || c2 < c1) {
return A;
}
if (r2 - r1 + 1 > u.n) {
throw new JampackException("Householder vector too short.");
}
if (c2 - c1 + 1 > v.n) {
throw new JampackException("Work vector too short.");
}
A.dirty = true;
r1 = r1 - A.basex;
r2 = r2 - A.basex;
c1 = c1 - A.basex;
c2 = c2 - A.basex;
for (j = c1; j <= c2; j++) {
v.re[j - c1] = 0;
v.im[j - c1] = 0;
}
for (i = r1; i <= r2; i++) {
for (j = c1; j <= c2; j++) {
v.re[j - c1] = v.re[j - c1] + u.re[i - r1] * A.re[i][j] + u.im[i - r1] * A.im[i][j];
v.im[j - c1] = v.im[j - c1] + u.re[i - r1] * A.im[i][j] - u.im[i - r1] * A.re[i][j];
}
}
for (i = r1; i <= r2; i++) {
for (j = c1; j <= c2; j++) {
A.re[i][j] = A.re[i][j] - u.re[i - r1] * v.re[j - c1] + u.im[i - r1] * v.im[j - c1];
A.im[i][j] = A.im[i][j] - u.re[i - r1] * v.im[j - c1] - u.im[i - r1] * v.re[j - c1];
}
}
return A;
}
/**
* Premultiplies the Householder transformation contained in a Z1 into a Zmat A[r1:r2,c1:c2] and overwrites Zmat
* A[r1:r2,c1:c2] with the results. If r1 &gt; r2 or c1 &gt; c2 the method does nothing.
*
* @param u
* The Householder vector
* @param A
* The Zmat to which the transformation is to be applied (altered)
* @param r1
* The index of the first row to which the transformation is to be applied
* @param r2
* The index of the last row to which the transformation is to be applied
* @param c1
* The index of the first column to which the transformation is index of the to be applied
* @param c2
* The index of the last column to which the transformation is to be applied
* @return The transformed Zmat A
* @exception JampackException
* Passed from below.
*/
public static Zmat ua(Z1 u, Zmat A, int r1, int r2, int c1, int c2) throws JampackException {
if (c1 > c2) {
return A;
}
return ua(u, A, r1, r2, c1, c2, new Z1(c2 - c1 + 1));
}
/**
* Postmultiplies the Householder transformation contained in a Z1 into a Zmat A[r1:r2,c1:c2] and overwrites Zmat
* A[r1:r2,c1:c2] with the results. If r1 &gt; r2 or c1 &gt; c2 the method does nothing.
*
* @param u
* The Householder vector
* @param A
* The Zmat to which the transformation is to be applied (altered)
* @param r1
* The index of the first row to which the transformation is to be applied
* @param r2
* The index of the last row to which the transformation is to be applied
* @param c1
* The index of the first column to which the transformation is index of the to be applied
* @param c2
* The index of the last column to which the transformation is to be applied
* @param v
* A work array of length at least c2-c1+1
* @return The transformed Zmat A
* @exception JampackException
* Thrown if either u or v is too short.
*/
public static Zmat au(Zmat A, Z1 u, int r1, int r2, int c1, int c2, Z1 v) throws JampackException {
int i, j, cu;
if (r2 < r1 || c2 < c1) {
return A;
}
if (c2 - c1 + 1 > u.n) {
throw new JampackException("Householder vector too short.");
}
if (r2 - r1 + 1 > v.n) {
throw new JampackException("Work vector too short.");
}
A.dirty = true;
r1 = r1 - A.basex;
r2 = r2 - A.basex;
c1 = c1 - A.basex;
c2 = c2 - A.basex;
for (i = r1; i <= r2; i++) {
v.put(i - r1, 0, 0);
for (j = c1; j <= c2; j++) {
v.re[i - r1] = v.re[i - r1] + A.re[i][j] * u.re[j - c1] - A.im[i][j] * u.im[j - c1];
v.im[i - r1] = v.im[i - r1] + A.re[i][j] * u.im[j - c1] + A.im[i][j] * u.re[j - c1];
}
}
for (i = r1; i <= r2; i++) {
for (j = c1; j <= c2; j++) {
A.re[i][j] = A.re[i][j] - v.re[i - r1] * u.re[j - c1] - v.im[i - r1] * u.im[j - c1];
A.im[i][j] = A.im[i][j] + v.re[i - r1] * u.im[j - c1] - v.im[i - r1] * u.re[j - c1];
}
}
return A;
}
/**
* Postmultiplies the Householder transformation contained in a Z1 into a Zmat A[r1:r2,c1:c2] and overwrites Zmat
* A[r1:r2,c1:c2] with the results. If r1 &gt; r2 or c1 &gt; c2 the method does nothing.
*
* @param u
* The Householder vector
* @param A
* The Zmat to which the transformation is to be applied (altered)
* @param r1
* The index of the first row to which the transformation is to be applied
* @param r2
* The index of the last row to which the transformation is to be applied
* @param c1
* The index of the first column to which the transformation is index of the to be applied
* @param c2
* The index of the last column to which the transformation is to be applied
* @return The transformed Zmat A
* @exception JampackException
* Passed from below.
*/
public static Zmat au(Zmat A, Z1 u, int r1, int r2, int c1, int c2) throws JampackException {
if (r2 < r1) {
return A;
}
return au(A, u, r1, r2, c1, c2, new Z1(r2 - r1 + 1));
}
}

Просмотреть файл

@ -1,69 +1,68 @@
package Jampack;
class HouseTest{
class HouseTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(0);
Parameters.setBaseIndex(0);
if (args[0].equals("genc") || args[0].equals("ua")){
if (args[0].equals("genc") || args[0].equals("ua")) {
int i, j, n=5;
int i, j, n = 5;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(1,1);
}
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(1, 1);
}
Zmat A = new Zmat(Aa);
Zmat AA = new Zmat(A);
Zmat A = new Zmat(Aa);
Zmat AA = new Zmat(A);
Zmat A2 = A.get(A.bx, A.rx, 2, 2);
Z1 u = House.genc(A, A.bx, A.rx, 2);
Zmat uu = new Zmat(n, 1);
for (i=uu.bx; i<=uu.rx; i++)
uu.put(i, uu.bx, u.get(i-uu.bx));
Zmat U = Minus.o(Eye.o(n), Times.o(uu, H.o(uu)));
Print.o(Norm.fro(Minus.o(Eye.o(n), Times.o(U,U))));
Print.o(Norm.fro(Minus.o(A.get(A.bx, A.rx, 2, 2),Times.o(U, A2))));
if (args[0].equals("ua")){
Zmat B = Times.o(U, AA);
Zmat C = House.ua(u, AA, AA.bx, AA.rx, AA.bx, AA.rx);
Print.o(Norm.fro(Minus.o(B, C)));
}
}
Zmat A2 = A.get(A.bx, A.rx, 2, 2);
Z1 u = House.genc(A, A.bx, A.rx, 2);
Zmat uu = new Zmat(n, 1);
for (i = uu.bx; i <= uu.rx; i++)
uu.put(i, uu.bx, u.get(i - uu.bx));
Zmat U = Minus.o(Eye.o(n), Times.o(uu, H.o(uu)));
Print.o(Norm.fro(Minus.o(Eye.o(n), Times.o(U, U))));
Print.o(Norm.fro(Minus.o(A.get(A.bx, A.rx, 2, 2), Times.o(U, A2))));
if (args[0].equals("ua")) {
Zmat B = Times.o(U, AA);
Zmat C = House.ua(u, AA, AA.bx, AA.rx, AA.bx, AA.rx);
Print.o(Norm.fro(Minus.o(B, C)));
}
}
else if (args[0].equals("genr") || args[0].equals("au")){
else if (args[0].equals("genr") || args[0].equals("au")) {
int i, j, n=5;
int i, j, n = 5;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(1,1);
}
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(1, 1);
}
Zmat A = new Zmat(Aa);
Zmat AA = new Zmat(A);
Zmat A = new Zmat(Aa);
Zmat AA = new Zmat(A);
Zmat A2 = A.get(2,2,A.bx,A.cx);
Z1 u = House.genr(A, 2, A.bx, A.cx);
Zmat uu = new Zmat(n, 1);
for (i=uu.bx; i<=uu.rx; i++)
uu.put(i, uu.bx, u.get(i-uu.bx));
Zmat U = Minus.o(Eye.o(n), Times.o(uu, H.o(uu)));
Print.o(Norm.fro(Minus.o(Eye.o(n), Times.o(U,U))));
Print.o(Norm.fro(Minus.o(A.get(2, 2, A.bx, A.cx),Times.o(A2,U))));
if (args[0].equals("au")){
Zmat B = Times.o(AA,U);
Zmat C = House.au(AA, u, A.bx, A.rx, A.bx, A.cx);
Print.o(Norm.fro(Minus.o(B, C)));
}
}
}
Zmat A2 = A.get(2, 2, A.bx, A.cx);
Z1 u = House.genr(A, 2, A.bx, A.cx);
Zmat uu = new Zmat(n, 1);
for (i = uu.bx; i <= uu.rx; i++)
uu.put(i, uu.bx, u.get(i - uu.bx));
Zmat U = Minus.o(Eye.o(n), Times.o(uu, H.o(uu)));
Print.o(Norm.fro(Minus.o(Eye.o(n), Times.o(U, U))));
Print.o(Norm.fro(Minus.o(A.get(2, 2, A.bx, A.cx), Times.o(A2, U))));
if (args[0].equals("au")) {
Zmat B = Times.o(AA, U);
Zmat C = House.au(AA, u, A.bx, A.rx, A.bx, A.cx);
Print.o(Norm.fro(Minus.o(B, C)));
}
}
}
}

Просмотреть файл

@ -1,126 +1,124 @@
package Jampack;
/**
Inv computes the inverse of a matrix.
<p>
Comments: Inv computes the inverse of A by using Solve to solve
the system AX = I. This is inefficient, though
not inordinately so. Eventually these methods will be
replaced.
* Inv computes the inverse of a matrix.
* <p>
* Comments: Inv computes the inverse of A by using Solve to solve the system AX = I. This is inefficient, though not inordinately
* so. Eventually these methods will be replaced.
*
* @version Pre-alphs
* @author G. W. Stewart
*/
@version Pre-alphs
@author G. W. Stewart
*/
public class Inv {
public class Inv{
/**
* Computes the inverse of a Zltmat.
*
* @param L
* The Zltmat
* @return The inverse of L
* @exception JampackException
* Thrown if L is not square.<br>
* Passed from below.
*/
/**
Computes the inverse of a Zltmat.
@param L The Zltmat
@return The inverse of L
@exception JampackException
Thrown if L is not square.<br>
Passed from below.
*/
public static Zltmat o(Zltmat L) throws JampackException {
public static Zltmat o(Zltmat L)
throws JampackException{
if (L.nrow != L.ncol)
throw new JampackException("Cannot compute the inverse of a rectangular matrix.");
return new Zltmat(Solve.aib(L, Eye.o(L.nrow)));
}
if (L.nrow != L.ncol)
throw new JampackException
("Cannot compute the inverse of a rectangular matrix.");
return new Zltmat(Solve.aib(L, Eye.o(L.nrow)));
}
/**
* Computes the inverse of a Zutmat.
*
* @param U
* The Zutmat
* @return The inverse of U
* @exception JampackException
* Thrown if U is not square.<br>
* Passed from below.
*/
/**
Computes the inverse of a Zutmat.
@param U The Zutmat
@return The inverse of U
@exception JampackException
Thrown if U is not square.<br>
Passed from below.
*/
public static Zutmat o(Zutmat U) throws JampackException {
public static Zutmat o(Zutmat U)
throws JampackException{
if (U.nrow != U.ncol)
throw new JampackException("Cannot compute the inverse of a rectangular matrix.");
if (U.nrow != U.ncol)
throw new JampackException
("Cannot compute the inverse of a rectangular matrix.");
return new Zutmat(Solve.aib(U, Eye.o(U.nrow)));
}
return new Zutmat(Solve.aib(U, Eye.o(U.nrow)));
}
/**
* Computes the inverse of a square Zmat
*
* @param A
* The Zmat
* @return The inverse of A
* @exception JampackException
* Thrown if A is not square.<br>
* Passed from below.
*/
/**
Computes the inverse of a square Zmat
@param A The Zmat
@return The inverse of A
@exception JampackException
Thrown if A is not square.<br>
Passed from below.
*/
public static Zmat o(Zmat A) throws JampackException {
public static Zmat o(Zmat A)
throws JampackException{
if (A.nrow != A.ncol)
throw new JampackException("Cannot compute the inverse of a rectangular matrix.");
if (A.nrow != A.ncol)
throw new JampackException
("Cannot compute the inverse of a rectangular matrix.");
return Solve.aib(A, Eye.o(A.nrow));
}
return Solve.aib(A, Eye.o(A.nrow));
}
/**
* Computes the inverse of a Zpsdmat.
*
* @param A
* The Zpsdmat
* @return The inverse of A
* @exception JampackException
* Thrown if A is not square.<br>
* Passed from below.
*/
/**
Computes the inverse of a Zpsdmat.
@param A The Zpsdmat
@return The inverse of A
@exception JampackException
Thrown if A is not square.<br>
Passed from below.
*/
public static Zpsdmat o(Zpsdmat A) throws JampackException {
public static Zpsdmat o(Zpsdmat A)
throws JampackException{
if (A.nrow != A.ncol)
throw new JampackException("Cannot compute the inverse of a rectangular matrix.");
if (A.nrow != A.ncol)
throw new JampackException
("Cannot compute the inverse of a rectangular matrix.");
Zpsdmat B = new Zpsdmat(Solve.aib(A, Eye.o(A.nrow)));
Zpsdmat B = new Zpsdmat( Solve.aib(A, Eye.o(A.nrow)));
for (int i = 0; i < B.ncol; i++) {
for (int j = i + 1; j < B.ncol; j++) {
B.re[j][i] = B.re[i][j];
B.im[j][i] = -B.im[i][j];
}
B.im[i][i] = 0.0;
}
return B;
}
for (int i=0; i<B.ncol; i++){
for (int j=i+1; j<B.ncol; j++){
B.re[j][i] = B.re[i][j];
B.im[j][i] = -B.im[i][j];
}
B.im[i][i] = 0.0;
}
return B;
}
/**
* Computes the inverse of a Zdiagmat.
*
* @param D
* The Zdiagmat
* @return The inverse of D
* @exception JampackException
* Thrown if D singular.
*/
/**
Computes the inverse of a Zdiagmat.
@param D The Zdiagmat
@return The inverse of D
@exception JampackException
Thrown if D singular.
*/
public static Zdiagmat o(Zdiagmat D) throws JampackException {
public static Zdiagmat o(Zdiagmat D)
throws JampackException{
Zdiagmat Di = new Zdiagmat(D.n);
for (int i = 0; i < D.order; i++) {
Z d = new Z(D.re[i], D.im[i]);
if (d.re == 0 && d.im == 0) {
throw new JampackException("Singuar matrix.");
}
d.Div(Z.ONE, d);
Di.re[i] = d.re;
Di.im[i] = d.im;
}
Zdiagmat Di = new Zdiagmat(D.n);
for (int i=0; i<D.order; i++){
Z d = new Z(D.re[i], D.im[i]);
if(d.re==0 && d.im==0){
throw new JampackException
("Singuar matrix.");
}
d.Div(Z.ONE, d);
Di.re[i] = d.re;
Di.im[i] = d.im;
}
return Di;
}
return Di;
}
}

Просмотреть файл

@ -1,102 +1,100 @@
package Jampack;
class InvTest{
class InvTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
// Parameters.setBaseIndex(0);
// Parameters.setBaseIndex(0);
if (args[0].equals("zli")){
if (args[0].equals("zli")) {
int i, j, n=3;
int i, j, n = 3;
Z La[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j <= i)
La[i][j] = new Z(i+1,j+1);
else
La[i][j] = new Z(0,0);
}
Z La[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j <= i)
La[i][j] = new Z(i + 1, j + 1);
else
La[i][j] = new Z(0, 0);
}
Zltmat L = new Zltmat(La);
Zltmat L = new Zltmat(La);
Zltmat Li = Inv.o(L);
Print.o(Li);
Print.o(Norm.fro(Minus.o(Eye.o(L.nr),Times.o(L,Li))));
}
if (args[0].equals("zui")){
Zltmat Li = Inv.o(L);
Print.o(Li);
Print.o(Norm.fro(Minus.o(Eye.o(L.nr), Times.o(L, Li))));
}
if (args[0].equals("zui")) {
int i, j, n=4;
int i, j, n = 4;
Z Ua[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j >= i)
Ua[i][j] = new Z(i+1,j+1);
else
Ua[i][j] = new Z(0,0);
}
Z Ua[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j >= i)
Ua[i][j] = new Z(i + 1, j + 1);
else
Ua[i][j] = new Z(0, 0);
}
Zutmat U = new Zutmat(Ua);
Zutmat U = new Zutmat(Ua);
Zutmat Ui = Inv.o(U);
Zutmat Ui = Inv.o(U);
Print.o(Ui);
Print.o(Norm.fro(Minus.o(Eye.o(U.nr),Times.o(U,Ui))));
}
Print.o(Ui);
Print.o(Norm.fro(Minus.o(Eye.o(U.nr), Times.o(U, Ui))));
}
else if (args[0].equals("zai")){
int i, j, n=5;
else if (args[0].equals("zai")) {
int i, j, n = 5;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(.1,.1);
}
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(.1, .1);
}
Zmat A = new Zmat(Aa);
Zmat A = new Zmat(Aa);
Zmat Ai = Inv.o(A);
Zmat Ai = Inv.o(A);
Print.o(Ai);
Print.o(Norm.fro(Minus.o(Eye.o(A.nr),Times.o(A,Ai))));
Print.o(Ai);
Print.o(Norm.fro(Minus.o(Eye.o(A.nr), Times.o(A, Ai))));
}
else if (args[0].equals("zpdai")){
int i, j, n=5;
} else if (args[0].equals("zpdai")) {
int i, j, n = 5;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(.1,.1);
}
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(.1, .1);
}
Zmat B = new Zmat(Aa);
Zpsdmat A = new Zpsdmat(Times.aha(B));
Zpsdmat Ai = Inv.o(A);
Zmat B = new Zmat(Aa);
Zpsdmat A = new Zpsdmat(Times.aha(B));
Zpsdmat Ai = Inv.o(A);
Print.o(Ai);
Print.o(Norm.fro(Minus.o(Eye.o(A.nr),Times.o(A,Ai))));
Print.o(Ai);
Print.o(Norm.fro(Minus.o(Eye.o(A.nr), Times.o(A, Ai))));
}
}
else if (args[0].equals("zdi")){
else if (args[0].equals("zdi")) {
int n=3;
int n = 3;
Zdiagmat D = new Zdiagmat(n);
Zdiagmat D = new Zdiagmat(n);
for (int i=0; i<n; i++){
D.re[i] = i;
D.im[i] = i+3;
}
for (int i = 0; i < n; i++) {
D.re[i] = i;
D.im[i] = i + 3;
}
Zdiagmat Di = Inv.o(D);
Print.o(Times.o(Di, D));
}
}
Zdiagmat Di = Inv.o(D);
Print.o(Times.o(Di, D));
}
}
}

Просмотреть файл

@ -1,16 +1,19 @@
package Jampack;
/**
This is the exception class for Jampack. Since
most errors in matrix algorithms are unrecoverable,
the standard response is to pass an error message
up the line.
* This is the exception class for Jampack. Since most errors in matrix algorithms are unrecoverable, the standard response is to
* pass an error message up the line.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class JampackException extends Exception {
public JampackException() {
super();
}
public class JampackException extends Exception{
public JampackException(){super();}
public JampackException(String s){super(s);}
public JampackException(String s) {
super(s);
}
}

Просмотреть файл

@ -1,311 +1,307 @@
package Jampack;
/**
Merge is a class containing programs to merge matrices
into one big matrix. The basic method (Merge.o) takes
an array of Zmat's and merges them. For conformity, the Zmats
along a row of the array must have the same number of rows,
and the Zmats along a column of the array must have the same
number of columns.
<p>
For convenience a number of special routines (o12, o21, o22, o13, ...)
are provided to merge the matrices in their argument list.
* Merge is a class containing programs to merge matrices into one big matrix. The basic method (Merge.o) takes an array of Zmat's
* and merges them. For conformity, the Zmats along a row of the array must have the same number of rows, and the Zmats along a
* column of the array must have the same number of columns.
* <p>
* For convenience a number of special routines (o12, o21, o22, o13, ...) are provided to merge the matrices in their argument
* list.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Merge {
public class Merge{
/**
* Merges the matrices in an array of Zmats
*
* @param B
* [][] The array of Zmats
* @return The merged Zmat
* @exception JampackException
* Thrown if there is a nonconformity.
*/
/**
Merges the matrices in an array of Zmats
@param B[][] The array of Zmats
@return The merged Zmat
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o(Zmat[][] B) throws JampackException {
public static Zmat o(Zmat[][] B)
throws JampackException{
int bi, bi0nr, bj, b0jnc, bnc, bnr, i, il, j, jl, nc, nr;
int bi, bi0nr, bj, b0jnc, bnc, bnr, i, il, j, jl, nc, nr;
Zmat Bij;
Zmat Bij;
bnr = B.length;
bnc = B[0].length;
bnr = B.length;
bnc = B[0].length;
/*
* Compute the number of columns in the result while testing for columnwise conformity.
*/
/* Compute the number of columns in the result while
testing for columnwise conformity. */
nc = 0;
for (bj = 0; bj < bnc; bj++) {
b0jnc = B[0][bj].ncol;
for (bi = 1; bi < bnr; bi++) {
if (B[bi][bj].ncol != b0jnc) {
throw new JampackException("Blocks do not conform");
}
}
nc = nc + b0jnc;
}
nc = 0;
for (bj=0; bj<bnc; bj++){
b0jnc = B[0][bj].ncol;
for (bi=1; bi<bnr; bi++){
if (B[bi][bj].ncol != b0jnc){
throw new JampackException
("Blocks do not conform");
}
}
nc = nc + b0jnc;
}
/*
* Compute the number of rows in the result while testing for rowwise conformity.
*/
/* Compute the number of rows in the result while
testing for rowwise conformity. */
nr = 0;
for (bi = 0; bi < bnr; bi++) {
bi0nr = B[bi][0].nrow;
for (bj = 1; bj < bnc; bj++) {
if (B[bi][bj].nrow != bi0nr) {
throw new JampackException("Blocks do not conform");
}
}
nr = nr + bi0nr;
}
nr = 0;
for (bi=0; bi<bnr; bi++){
bi0nr = B[bi][0].nrow;
for (bj=1; bj<bnc; bj++){
if (B[bi][bj].nrow != bi0nr){
throw new JampackException
("Blocks do not conform");
}
}
nr = nr + bi0nr;
}
/* Merge the matrices. */
/* Merge the matrices. */
Zmat A = new Zmat(nr, nc);
Zmat A = new Zmat(nr, nc);
il = 0;
for (bi = 0; bi < bnr; bi++) {
jl = 0;
for (bj = 0; bj < bnc; bj++) {
Bij = B[bi][bj];
for (i = il; i < il + Bij.nrow; i++) {
for (j = jl; j < jl + Bij.ncol; j++) {
A.re[i][j] = Bij.re[i - il][j - jl];
A.im[i][j] = Bij.im[i - il][j - jl];
}
}
jl = jl + Bij.ncol;
}
il = il + B[bi][0].nrow;
}
il = 0;
for (bi=0; bi<bnr; bi++){
jl = 0;
for (bj=0; bj<bnc; bj++){
Bij = B[bi][bj];
for (i=il; i<il+Bij.nrow; i++){
for (j=jl; j<jl+Bij.ncol; j++){
A.re[i][j] = Bij.re[i-il][j-jl];
A.im[i][j] = Bij.im[i-il][j-jl];
}
}
jl = jl + Bij.ncol;
}
il = il + B[bi][0].nrow;
}
return A;
}
return A;
}
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 B01 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 B01 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o12(Zmat B00, Zmat B01) throws JampackException {
public static Zmat o12(Zmat B00, Zmat B01)
throws JampackException{
Zmat B[][] = new Zmat[1][2];
Zmat B[][] = new Zmat[1][2];
B[0][0] = B00;
B[0][1] = B01;
B[0][0] = B00;
B[0][1] = B01;
return Merge.o(B);
}
return Merge.o(B);
}
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 |
* | B10 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
public static Zmat o21(Zmat B00, Zmat B10) throws JampackException {
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 |
* | B10 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o21(Zmat B00,
Zmat B10)
throws JampackException{
Zmat B[][] = new Zmat[2][1];
Zmat B[][] = new Zmat[2][1];
B[0][0] = B00;
B[1][0] = B10;
B[0][0] = B00;
B[1][0] = B10;
return Merge.o(B);
}
return Merge.o(B);
}
/**
* Merges its arguments to create the matrix
*
* <pre>
* A = | B00 B01|
* | B10 B11|
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
/**
Merges its arguments to create the matrix
<pre>
* A = | B00 B01|
* | B10 B11|
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o22(Zmat B00, Zmat B01, Zmat B10, Zmat B11) throws JampackException {
public static Zmat o22(Zmat B00, Zmat B01,
Zmat B10, Zmat B11)
throws JampackException{
Zmat B[][] = new Zmat[2][2];
B[0][0] = B00;
B[0][1] = B01;
B[1][0] = B10;
B[1][1] = B11;
return Merge.o(B);
}
Zmat B[][] = new Zmat[2][2];
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 B01 B02 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
B[0][0] = B00;
B[0][1] = B01;
B[1][0] = B10;
B[1][1] = B11;
return Merge.o(B);
}
public static Zmat o13(Zmat B00, Zmat B01, Zmat B02) throws JampackException {
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 B01 B02 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
Zmat B[][] = new Zmat[1][3];
public static Zmat o13(Zmat B00, Zmat B01, Zmat B02)
throws JampackException{
B[0][0] = B00;
B[0][1] = B01;
B[0][2] = B02;
return Merge.o(B);
}
Zmat B[][] = new Zmat[1][3];
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 B01 B02 |
* | B10 B11 B12 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thown if there is a nonconformity.
*/
B[0][0] = B00;
B[0][1] = B01;
B[0][2] = B02;
public static Zmat o23(Zmat B00, Zmat B01, Zmat B02, Zmat B10, Zmat B11, Zmat B12) throws JampackException {
return Merge.o(B);
}
Zmat B[][] = new Zmat[2][3];
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 B01 B02 |
* | B10 B11 B12 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thown if there is a nonconformity.
B[0][0] = B00;
B[0][1] = B01;
B[0][2] = B02;
B[1][0] = B10;
B[1][1] = B11;
B[1][2] = B12;
*/
return Merge.o(B);
}
public static Zmat o23(Zmat B00, Zmat B01, Zmat B02,
Zmat B10, Zmat B11, Zmat B12)
throws JampackException{
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 |
* | B10 |
* | B20 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
public static Zmat o31(Zmat B00, Zmat B10, Zmat B20) throws JampackException {
Zmat B[][] = new Zmat[2][3];
Zmat B[][] = new Zmat[3][1];
B[0][0] = B00;
B[0][1] = B01;
B[0][2] = B02;
B[1][0] = B10;
B[1][1] = B11;
B[1][2] = B12;
B[0][0] = B00;
B[1][0] = B10;
B[2][0] = B20;
return Merge.o(B);
}
return Merge.o(B);
}
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 |
* | B10 |
* | B20 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 B01 |
* | B10 B11 |
* | B20 B21 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
public static Zmat o32(Zmat B00, Zmat B01, Zmat B10, Zmat B11, Zmat B20, Zmat B21) throws JampackException {
public static Zmat o31(Zmat B00,
Zmat B10,
Zmat B20)
throws JampackException{
Zmat B[][] = new Zmat[3][2];
B[0][0] = B00;
B[0][1] = B01;
B[1][0] = B10;
B[1][1] = B11;
B[2][0] = B20;
B[2][1] = B21;
Zmat B[][] = new Zmat[3][1];
return Merge.o(B);
}
B[0][0] = B00;
B[1][0] = B10;
B[2][0] = B20;
/**
* Merges its arguments to create the Zmat
*
* <pre>
* A = | B00 B01 B02 |
* | B10 B11 B12 |
* | B20 B21 B22 |
* </pre>
*
* @param Bij
* The Zmats to be merged
* @return The composite Zmat A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
return Merge.o(B);
}
public static Zmat o33(Zmat B00, Zmat B01, Zmat B02, Zmat B10, Zmat B11, Zmat B12, Zmat B20, Zmat B21, Zmat B22)
throws JampackException {
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 B01 |
* | B10 B11 |
* | B20 B21 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o32(Zmat B00, Zmat B01,
Zmat B10, Zmat B11,
Zmat B20, Zmat B21)
throws JampackException{
Zmat B[][] = new Zmat[3][3];
B[0][0] = B00;
B[0][1] = B01;
B[0][2] = B02;
B[1][0] = B10;
B[1][1] = B11;
B[1][2] = B12;
B[2][0] = B20;
B[2][1] = B21;
B[2][2] = B22;
Zmat B[][] = new Zmat[3][2];
B[0][0] = B00;
B[0][1] = B01;
B[1][0] = B10;
B[1][1] = B11;
B[2][0] = B20;
B[2][1] = B21;
return Merge.o(B);
}
/**
Merges its arguments to create the Zmat
<pre>
* A = | B00 B01 B02 |
* | B10 B11 B12 |
* | B20 B21 B22 |
</pre>
@param Bij The Zmats to be merged
@return The composite Zmat A
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o33(Zmat B00, Zmat B01, Zmat B02,
Zmat B10, Zmat B11, Zmat B12,
Zmat B20, Zmat B21, Zmat B22)
throws JampackException{
Zmat B[][] = new Zmat[3][3];
B[0][0] = B00;
B[0][1] = B01;
B[0][2] = B02;
B[1][0] = B10;
B[1][1] = B11;
B[1][2] = B12;
B[2][0] = B20;
B[2][1] = B21;
B[2][2] = B22;
return Merge.o(B);
}
return Merge.o(B);
}
}

Просмотреть файл

@ -1,59 +1,49 @@
package Jampack;
class MergeTest{
class MergeTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(0);
Parameters.setBaseIndex(0);
Zmat A = new Zmat(10,9);
Zmat A = new Zmat(10, 9);
int bx = A.bx;
int rx = A.rx;
int cx = A.cx;
int bx = A.bx;
int rx = A.rx;
int cx = A.cx;
for (int i=bx; i<=rx; i++){
for (int j=bx; j<=cx; j++){
A.put(i, j, new Z(i,j));
}
}
for (int i = bx; i <= rx; i++) {
for (int j = bx; j <= cx; j++) {
A.put(i, j, new Z(i, j));
}
}
Zmat B = Merge.o12(A.get(bx,rx,bx,5), A.get(bx,rx,6,cx));
Print.o(Norm.fro(Minus.o(A,B)));
Zmat B = Merge.o12(A.get(bx, rx, bx, 5), A.get(bx, rx, 6, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o21(A.get(bx,5,bx,cx),
A.get(6,rx,bx,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o21(A.get(bx, 5, bx, cx), A.get(6, rx, bx, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o22(A.get(bx,5,bx,5), A.get(bx,5,6,cx),
A.get(6,rx,bx,5), A.get(6,rx,6,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o22(A.get(bx, 5, bx, 5), A.get(bx, 5, 6, cx), A.get(6, rx, bx, 5), A.get(6, rx, 6, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o13
(A.get(bx,rx,bx,3), A.get(bx,rx,4,6), A.get(bx,rx,7,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o13(A.get(bx, rx, bx, 3), A.get(bx, rx, 4, 6), A.get(bx, rx, 7, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o23
(A.get(bx,5,bx,3), A.get(bx,5,4,6), A.get(bx,5,7,cx),
A.get(6,rx,bx,3), A.get(6,rx,4,6), A.get(6,rx,7,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o23(A.get(bx, 5, bx, 3), A.get(bx, 5, 4, 6), A.get(bx, 5, 7, cx), A.get(6, rx, bx, 3), A.get(6, rx, 4, 6),
A.get(6, rx, 7, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o31(A.get(bx,3,bx,cx),
A.get(4,6,bx,cx),
A.get(7,rx,bx,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o31(A.get(bx, 3, bx, cx), A.get(4, 6, bx, cx), A.get(7, rx, bx, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o32(A.get(bx,3,bx,5), A.get(bx,3,6,cx),
A.get(4,6,bx,5), A.get(4,6,6,cx),
A.get(7,rx,bx,5), A.get(7,rx,6,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o32(A.get(bx, 3, bx, 5), A.get(bx, 3, 6, cx), A.get(4, 6, bx, 5), A.get(4, 6, 6, cx), A.get(7, rx, bx, 5),
A.get(7, rx, 6, cx));
Print.o(Norm.fro(Minus.o(A, B)));
B = Merge.o33
(A.get(bx,3,bx,3), A.get(bx,3,4,6), A.get(bx,3,7,cx),
A.get(4,5,bx,3), A.get(4,5,4,6), A.get(4,5,7,cx),
A.get(6,rx,bx,3), A.get(6,rx,4,6), A.get(6,rx,7,cx));
Print.o(Norm.fro(Minus.o(A,B)));
B = Merge.o33(A.get(bx, 3, bx, 3), A.get(bx, 3, 4, 6), A.get(bx, 3, 7, cx), A.get(4, 5, bx, 3), A.get(4, 5, 4, 6),
A.get(4, 5, 7, cx), A.get(6, rx, bx, 3), A.get(6, rx, 4, 6), A.get(6, rx, 7, cx));
Print.o(Norm.fro(Minus.o(A, B)));
}
}
}

Просмотреть файл

@ -1,144 +1,149 @@
package Jampack;
/**
Minus negates a matrix or computes the difference of two
matrices.
* Minus negates a matrix or computes the difference of two matrices.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Minus {
public class Minus{
/**
* Computes the difference of two Zmats.
*
* @param A
* The diminuend
* @param B
* The subtrahend
* @return A-B
* @exception JampackException
* Thrown if there is a nonconformity.
*/
public static Zmat o(Zmat A, Zmat B) throws JampackException {
if (A.nrow != B.nrow || A.ncol != B.ncol)
throw new JampackException("Matrices not conformable for subtraction");
/**
Computes the difference of two Zmats.
@param A The diminuend
@param B The subtrahend
@return A-B
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o(Zmat A, Zmat B)
throws JampackException{
if (A.nrow!=B.nrow || A.ncol != B.ncol)
throw new JampackException
("Matrices not conformable for subtraction");
Zmat C = new Zmat(A.nrow, A.ncol);
Zmat C = new Zmat(A.nrow, A.ncol);
for (int i = 0; i < A.nrow; i++)
for (int j = 0; j < A.ncol; j++) {
C.re[i][j] = A.re[i][j] - B.re[i][j];
C.im[i][j] = A.im[i][j] - B.im[i][j];
}
return C;
}
for (int i=0; i<A.nrow; i++)
for (int j=0; j<A.ncol; j++){
C.re[i][j] = A.re[i][j] - B.re[i][j];
C.im[i][j] = A.im[i][j] - B.im[i][j];
}
return C;
}
/**
* Negates a Zmat
*
* @param A
* The matrix to be negated
* @return -A
*/
public static Zmat o(Zmat A) {
/**
Negates a Zmat
@param A The matrix to be negated
@return -A
*/
public static Zmat o(Zmat A)
{
Zmat B = new Zmat(A.nrow, A.ncol);
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i = 0; i < A.nrow; i++)
for (int j = 0; j < A.ncol; j++) {
B.re[i][j] = -A.re[i][j];
B.im[i][j] = -A.im[i][j];
}
return B;
}
for (int i=0; i<A.nrow; i++)
for (int j=0; j<A.ncol; j++){
B.re[i][j] = -A.re[i][j];
B.im[i][j] = -A.im[i][j];
}
return B;
}
/**
* Computes the difference of a Zmat and a Zdiagmat.
*
* @param A
* The Zmat
* @param D
* The Zdiagmat
* @return A - D
* @exception JampackException
* Thrown if there is a nonconformity.
*/
/**
Computes the difference of a Zmat and a Zdiagmat.
@param A The Zmat
@param D The Zdiagmat
@return A - D
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o(Zmat A, Zdiagmat D) throws JampackException {
public static Zmat o(Zmat A, Zdiagmat D)
throws JampackException{
if (D.order != A.nrow || D.order != A.ncol) {
throw new JampackException("Matrices not conformable for subtraction");
}
Zmat C = new Zmat(A);
for (int i = 0; i < D.order; i++) {
C.re[i][i] = C.re[i][i] - D.re[i];
C.im[i][i] = C.im[i][i] - D.im[i];
}
return C;
}
if (D.order != A.nrow || D.order != A.ncol){
throw new JampackException
("Matrices not conformable for subtraction");
}
Zmat C = new Zmat(A);
for (int i=0; i<D.order; i++){
C.re[i][i] = C.re[i][i] - D.re[i];
C.im[i][i] = C.im[i][i] - D.im[i];
}
return C;
}
/**
* Computes the difference of a Zdiagmat and a Zmat.
*
* @param D
* The Zdiagmat
* @param A
* The Zmat
* @return D - A
* @exception JampackException
* Thrown if there is a nonconformity.
*/
/**
Computes the difference of a Zdiagmat and a Zmat.
@param D The Zdiagmat
@param A The Zmat
@return D - A
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zmat o(Zdiagmat D, Zmat A) throws JampackException {
public static Zmat o(Zdiagmat D, Zmat A)
throws JampackException{
if (D.order != A.nrow || D.order != A.ncol) {
throw new JampackException("Matrices not conformable for subtraction");
}
Zmat C = new Zmat(A);
for (int i = 0; i < D.order; i++) {
C.re[i][i] = D.re[i] - C.re[i][i];
C.im[i][i] = D.im[i] - C.im[i][i];
}
return C;
}
if (D.order != A.nrow || D.order != A.ncol){
throw new JampackException
("Matrices not conformable for subtraction");
}
Zmat C = new Zmat(A);
for (int i=0; i<D.order; i++){
C.re[i][i] = D.re[i] - C.re[i][i];
C.im[i][i] = D.im[i] - C.im[i][i];
}
return C;
}
/**
* Computes the difference of a two Zdiagmats.
*
* @param D1
* The first Zdiagmat
* @param D2
* The second Zdiagmat
* @return D1 - D2
* @exception JampackException
* Thrown if there is a nonconformity.
*/
/**
Computes the difference of a two Zdiagmats.
@param D1 The first Zdiagmat
@param D2 The second Zdiagmat
@return D1 - D2
@exception JampackException
Thrown if there is a nonconformity.
*/
public static Zdiagmat o(Zdiagmat D1, Zdiagmat D2) throws JampackException {
public static Zdiagmat o(Zdiagmat D1, Zdiagmat D2)
throws JampackException{
if (D1.order != D2.order) {
throw new JampackException("Matrices not conformable for subtraction");
}
Zdiagmat C = new Zdiagmat(D1);
for (int i = 0; i < D1.order; i++) {
C.re[i] = C.re[i] - D2.re[i];
C.im[i] = C.im[i] - D2.im[i];
}
return C;
}
if (D1.order != D2.order){
throw new JampackException
("Matrices not conformable for subtraction");
}
Zdiagmat C = new Zdiagmat(D1);
for (int i=0; i<D1.order; i++){
C.re[i] = C.re[i] - D2.re[i];
C.im[i] = C.im[i] - D2.im[i];
}
return C;
}
/**
* Negates a Zdiagmat.
*
* @param D
* The Zdiagmat
* @return -D
*/
public static Zdiagmat o(Zdiagmat D) {
/**
Negates a Zdiagmat.
@param D The Zdiagmat
@return -D
*/
public static Zdiagmat o(Zdiagmat D)
{
Zdiagmat B = new Zdiagmat(D);
Zdiagmat B = new Zdiagmat(D);
for (int i=0; i<B.order; i++){
B.re[i] = -B.re[i];
B.im[i] = -B.im[i];
}
return B;
}
for (int i = 0; i < B.order; i++) {
B.re[i] = -B.re[i];
B.im[i] = -B.im[i];
}
return B;
}
}

Просмотреть файл

@ -1,63 +1,62 @@
package Jampack;
class MinusTest{
class MinusTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
if (args[0].equals("zmzm")){
if (args[0].equals("zmzm")) {
int nr = 4;
int nc = 3;
int nr = 4;
int nc = 3;
Zmat A = new Zmat(nr,nc);
Zmat B = new Zmat(nr,nc);
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++){
A.re[i][j] = i;
B.re[i][j] = i;
A.im[i][j] = j;
B.im[i][j] = j;
}
}
Print.o(Minus.o(A,B));
Print.o(Minus.o(A));
}
else if (args[0].equals("zmzdm")){
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nr, nc);
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
A.re[i][j] = i;
B.re[i][j] = i;
A.im[i][j] = j;
B.im[i][j] = j;
}
}
Print.o(Minus.o(A, B));
Print.o(Minus.o(A));
}
int n = 3;
else if (args[0].equals("zmzdm")) {
Zmat A = new Zmat(n,n);
Zdiagmat D = new Zdiagmat(n);
for (int i=0; i<n; i++){
for (int j=0; j<n; j++){
A.re[i][j] = i;
A.im[i][j] = j;
}
D.re[i] = i;
D.im[i] = i;
}
Print.o(Minus.o(A,D));
Print.o(Minus.o(D,A));
}
int n = 3;
else if (args[0].equals("zdmzdm")){
Zmat A = new Zmat(n, n);
Zdiagmat D = new Zdiagmat(n);
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
A.re[i][j] = i;
A.im[i][j] = j;
}
D.re[i] = i;
D.im[i] = i;
}
Print.o(Minus.o(A, D));
Print.o(Minus.o(D, A));
}
int n = 3;
else if (args[0].equals("zdmzdm")) {
Zdiagmat D1 = new Zdiagmat(n);
Zdiagmat D2 = new Zdiagmat(n);
for (int i=0; i<n; i++){
D1.re[i] = i;
D1.im[i] = i;
D2.re[i] = i;
D2.im[i] = i;
}
Print.o(Minus.o(D1,D2));
Print.o(Minus.o(D1));
}
int n = 3;
}
Zdiagmat D1 = new Zdiagmat(n);
Zdiagmat D2 = new Zdiagmat(n);
for (int i = 0; i < n; i++) {
D1.re[i] = i;
D1.im[i] = i;
D2.re[i] = i;
D2.im[i] = i;
}
Print.o(Minus.o(D1, D2));
Print.o(Minus.o(D1));
}
}
}

Просмотреть файл

@ -1,147 +1,148 @@
package Jampack;
/**
Norm computes norms of matrices.
<p>
Comments: At this point only the Frobenius norm is calculated. Later the
1, 2, and infinity norms will be added.
* Norm computes norms of matrices.
* <p>
* Comments: At this point only the Frobenius norm is calculated. Later the 1, 2, and infinity norms will be added.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Norm {
public class Norm{
/**
* Computes the Frobenius norm of a the submatrix (ii1:ii2, jj1,jj2) of a Zmat.
*
* @param A
* The zmat
* @param ii1
* The lower row index
* @param ii2
* The upper row index
* @param jj1
* The lower column index
* @param jj2
* The upper column index
* @return The Frobenius norm of A(ii1:ii2, jj1:jj2)
*/
public static double fro(Zmat A, int ii1, int ii2, int jj1, int jj2) {
int i, i1, i2, j, j1, j2;
double fac, nrm, scale;
/**
Computes the Frobenius norm of a the submatrix (ii1:ii2, jj1,jj2)
of a Zmat.
@param A The zmat
@param ii1 The lower row index
@param ii2 The upper row index
@param jj1 The lower column index
@param jj2 The upper column index
@return The Frobenius norm of A(ii1:ii2, jj1:jj2)
*/
public static double fro(Zmat A, int ii1, int ii2, int jj1, int jj2)
{
int i, i1, i2, j, j1, j2;
double fac, nrm, scale;
i1 = ii1 - A.basex;
i2 = ii2 - A.basex;
j1 = jj1 - A.basex;
j2 = jj2 - A.basex;
i1 = ii1 - A.basex;
i2 = ii2 - A.basex;
j1 = jj1 - A.basex;
j2 = jj2 - A.basex;
scale = 0.0;
for (i = i1; i <= i2; i++) {
for (j = j1; j <= j2; j++) {
scale = Math.max(scale, Math.abs(A.re[i][j]) + Math.abs(A.im[i][j]));
}
}
if (scale == 0) {
return 0.0;
}
if (scale < 1) {
scale = scale * 1.0e20;
}
scale = 1 / scale;
nrm = 0;
for (i = i1; i <= i2; i++) {
for (j = j1; j <= j2; j++) {
fac = scale * A.re[i][j];
nrm = nrm + fac * fac;
fac = scale * A.im[i][j];
nrm = nrm + fac * fac;
}
}
return Math.sqrt(nrm) / scale;
}
scale = 0.0;
for (i=i1; i<=i2; i++){
for (j=j1; j<=j2; j++){
scale = Math.max(scale,
Math.abs(A.re[i][j])+Math.abs(A.im[i][j]));
}
}
if (scale == 0){
return 0.0;
}
if (scale < 1){
scale = scale*1.0e20;
}
scale = 1/scale;
nrm = 0;
for (i=i1; i<=i2; i++){
for (j=j1; j<=j2; j++){
fac = scale*A.re[i][j];
nrm = nrm + fac*fac;
fac = scale*A.im[i][j];
nrm = nrm + fac*fac;
}
}
return Math.sqrt(nrm)/scale;
}
/**
* Computes the Frobenius norm of a Zmat.
*
* @param A
* The Zmat
* @return The Frobenius norm of A
*/
public static double fro(Zmat A) {
A.getProperties();
return Norm.fro(A, A.bx, A.rx, A.bx, A.cx);
}
/**
Computes the Frobenius norm of a Zmat.
@param A The Zmat
@return The Frobenius norm of A
*/
public static double fro(Zmat A)
{
A.getProperties();
return Norm.fro(A, A.bx, A.rx, A.bx, A.cx);
}
/**
* Computes the Frobenius norm of a Z1.
*
* @param u
* The Z1
* @return The Frobenius norm of u
*/
public static double fro(Z1 u) {
int i;
double fac, nrm, scale;
/**
Computes the Frobenius norm of a Z1.
@param u The Z1
@return The Frobenius norm of u
*/
public static double fro(Z1 u)
{
int i;
double fac, nrm, scale;
int n = u.n;
int n = u.n;
scale = 0.0;
for (i = 0; i < n; i++) {
scale = Math.max(scale, Math.abs(u.re[i]) + Math.abs(u.im[i]));
}
if (scale == 0) {
return 0.0;
}
if (scale < 1) {
scale = scale * 1.0e20;
}
scale = 1 / scale;
nrm = 0;
scale = 0.0;
for (i=0; i<n; i++){
scale = Math.max(scale,
Math.abs(u.re[i]) + Math.abs(u.im[i]));
}
if (scale == 0){
return 0.0;
}
if (scale < 1){
scale = scale*1.0e20;
}
scale = 1/scale;
nrm = 0;
for (i = 0; i < n; i++) {
fac = scale * u.re[i];
nrm = nrm + fac * fac;
fac = scale * u.im[i];
nrm = nrm + fac * fac;
}
for (i=0; i<n; i++){
fac = scale*u.re[i];
nrm = nrm + fac*fac;
fac = scale*u.im[i];
nrm = nrm + fac*fac;
}
return Math.sqrt(nrm) / scale;
}
return Math.sqrt(nrm)/scale;
}
/**
* Computes the Frobenius norm of a Zdiagmat.
*
* @param D
* The Zdiagmat
* @regurn The Frobenius norm of D
*/
/**
Computes the Frobenius norm of a Zdiagmat.
@param D The Zdiagmat
@regurn The Frobenius norm of D
*/
public static double fro(Zdiagmat D) {
int i;
double fac, nrm, scale;
public static double fro(Zdiagmat D)
{
int i;
double fac, nrm, scale;
int n = D.order;
int n = D.order;
scale = 0.0;
for (i = 0; i < n; i++) {
scale = Math.max(scale, Math.abs(D.re[i]) + Math.abs(D.im[i]));
}
if (scale == 0) {
return 0.0;
}
if (scale < 1) {
scale = scale * 1.0e20;
}
scale = 1 / scale;
nrm = 0;
scale = 0.0;
for (i=0; i<n; i++){
scale = Math.max(scale,
Math.abs(D.re[i]) + Math.abs(D.im[i]));
}
if (scale == 0){
return 0.0;
}
if (scale < 1){
scale = scale*1.0e20;
}
scale = 1/scale;
nrm = 0;
for (i = 0; i < n; i++) {
fac = scale * D.re[i];
nrm = nrm + fac * fac;
fac = scale * D.im[i];
nrm = nrm + fac * fac;
}
for (i=0; i<n; i++){
fac = scale*D.re[i];
nrm = nrm + fac*fac;
fac = scale*D.im[i];
nrm = nrm + fac*fac;
}
return Math.sqrt(nrm)/scale;
}
return Math.sqrt(nrm) / scale;
}
}

Просмотреть файл

@ -1,24 +1,22 @@
package Jampack;
class NormTest{
class NormTest {
public static void main(String[] args)
throws JampackException
{
double nrm, ss;
public static void main(String[] args) throws JampackException {
double nrm, ss;
Z Ary[][] = new Z[3][2];
for (int i=0; i<3; i++)
for (int j=0; j<2; j++)
Ary[i][j] = new Z(i,j);
Z Ary[][] = new Z[3][2];
for (int i = 0; i < 3; i++)
for (int j = 0; j < 2; j++)
Ary[i][j] = new Z(i, j);
Zmat A = new Zmat(Ary);
nrm = Norm.fro(A);
A = Times.o(H.o(A), A);
ss = 0.0;
for (int i=0; i<A.nc; i++)
ss = ss + A.re[i][i];
Print.o(nrm*nrm);
Print.o(ss);
}
Zmat A = new Zmat(Ary);
nrm = Norm.fro(A);
A = Times.o(H.o(A), A);
ss = 0.0;
for (int i = 0; i < A.nc; i++)
ss = ss + A.re[i][i];
Print.o(nrm * nrm);
Print.o(ss);
}
}

Просмотреть файл

@ -1,116 +1,113 @@
package Jampack;
/**
Parameter is a class containing global parameters for
Jampack.
* Parameter is a class containing global parameters for Jampack.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Parameters {
public class Parameters{
/**
* The base index
*/
protected static int BaseIndex = 1;
/**
The base index
*/
protected static int BaseIndex = 1;
/**
* Flag allowing only one change in base index
*/
protected static boolean BaseIndexNotChangeable;
/**
Flag allowing only one change in base index
*/
protected static boolean BaseIndexNotChangeable;
/**
* Returns the base index
*/
/**
Returns the base index
*/
public static int getBaseIndex() {
return BaseIndex;
}
public static int getBaseIndex()
{
return BaseIndex;
}
/**
* Resets the default base index.
*
* @param xb
* The new base index
* @exception JampackException
* Thrown when the base indices are not changable.
*/
public static void setBaseIndex(int bx) throws JampackException {
if (BaseIndexNotChangeable) {
throw new JampackException("Illegal attempt to change base index");
}
BaseIndex = bx;
BaseIndexNotChangeable = true;
}
/**
* Adjust the base index of a Zmat to make it conform to the default.
*/
public static void adjustBaseIndex(Zmat A) {
BaseIndexNotChangeable = true;
A.basex = BaseIndex;
A.getProperties();
}
/**
Resets the default base index.
@param xb The new base index
@exception JampackException
Thrown when the base indices are
not changable.
/**
* Adjust the base index of a Zdiagmat to make it conform to the default.
*/
public static void adjustBaseIndex(Zdiagmat A) {
BaseIndexNotChangeable = true;
A.basex = BaseIndex;
A.getProperties();
}
*/
public static void setBaseIndex(int bx)
throws JampackException{
if (BaseIndexNotChangeable){
throw new JampackException
("Illegal attempt to change base index");
}
BaseIndex = bx;
BaseIndexNotChangeable = true;
}
/**
* The history flag indicating whether to save decompositions
*/
/**
Adjust the base index of a Zmat to make it conform to
the default.
*/
public static void adjustBaseIndex(Zmat A){
BaseIndexNotChangeable = true;
A.basex = BaseIndex;
A.getProperties();
}
protected static boolean History = true;
/**
Adjust the base index of a Zdiagmat to make it conform to
the default.
*/
public static void adjustBaseIndex(Zdiagmat A){
BaseIndexNotChangeable = true;
A.basex = BaseIndex;
A.getProperties();
}
/**
* Sets the history flag.
*/
public static void setHistory() {
History = true;
}
/**
The history flag indicating whether to save decompositions
*/
/**
* Unsets the history flag.
*/
public static void unsetHistory() {
History = false;
}
protected static boolean History = true;
/**
* Output field width.
*/
protected static int OutputFieldWidth = 12;
/**
* Number of places to the right of the decimal point.
*/
protected static int OutputFracPlaces = 3;
/**
* Output page width
*/
protected static int PageWidth = 80;
/**
Sets the history flag.
*/
public static void setHistory(){
History = true;
}
/**
* Changes the output parameters. Nonpositive parameters are ignored.
*
* @param width
* The new field width
* @param frac
* The new number of places to the right of the decimal
* @param pagewidth
* The new page width
*/
public static void setOutputParams(int width, int frac, int pagewidth) {
/**
Unsets the history flag.
*/
public static void unsetHistory(){
History = false;
}
/**
Output field width.
*/
protected static int OutputFieldWidth = 12;
/**
Number of places to the right of the decimal point.
*/
protected static int OutputFracPlaces = 3;
/**
Output page width
*/
protected static int PageWidth = 80;
/**
Changes the output parameters. Nonpositive parameters are ignored.
@param width The new field width
@param frac The new number of places to the right of the decimal
@param pagewidth The new page width
*/
public static void setOutputParams(int width, int frac, int pagewidth){
OutputFieldWidth = (width>0)? width: OutputFieldWidth;
OutputFracPlaces = (frac>0)? width: OutputFracPlaces;
PageWidth = (pagewidth>0)? pagewidth: PageWidth;
}
OutputFieldWidth = (width > 0) ? width : OutputFieldWidth;
OutputFracPlaces = (frac > 0) ? width : OutputFracPlaces;
PageWidth = (pagewidth > 0) ? pagewidth : PageWidth;
}
}

Просмотреть файл

@ -1,87 +1,86 @@
package Jampack;
/**
Pivot applys a sequence of pivot operations to the
rows of a matrix. The pivot sequence is contained
in an integer array pvt[], which determines a permution
as follows:
<pre>
* for (k=0; k&lt;pvt.length; k++)
* swap k and pvt[k];
</pre>
Both k and pvt[k] represent zero-based references
to the rows of the matrix.
Pivot also has a method to apply the inverse permutation.
<p>
Comments: Column pivoting will be added later.
* Pivot applys a sequence of pivot operations to the rows of a matrix. The pivot sequence is contained in an integer array pvt[],
* which determines a permution as follows:
*
* <pre>
* for (k=0; k&lt;pvt.length; k++)
* swap k and pvt[k];
* </pre>
*
* Both k and pvt[k] represent zero-based references to the rows of the matrix. Pivot also has a method to apply the inverse
* permutation.
* <p>
* Comments: Column pivoting will be added later.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Pivot {
public class Pivot{
/**
* Pivots the rows of a Zmat (altered) as specified by a pivot array.
*
* @param A
* The Zmat (altered)
* @param pvt
* The pivot array
* @return The Zmat A with its rows permuted
* @exception JampackException
* Thrown for inconsistent dimensions.
*/
/**
Pivots the rows of a Zmat (altered) as specified by a pivot array.
@param A The Zmat (altered)
@param pvt The pivot array
@return The Zmat A with its rows permuted
@exception JampackException
Thrown for inconsistent dimensions.
*/
public static Zmat row(Zmat A, int pvt[]) throws JampackException {
public static Zmat row(Zmat A, int pvt[])
throws JampackException{
int np = pvt.length;
if (np > A.nrow)
throw new JampackException("Inconsistent array dimensions");
int np = pvt.length;
if (np > A.nrow)
throw new JampackException
("Inconsistent array dimensions");
A.dirty = true;
A.dirty = true;
for (int k = 0; k < np; k++)
for (int j = 0; j < A.ncol; j++) {
double t = A.re[k][j];
A.re[k][j] = A.re[pvt[k]][j];
A.re[pvt[k]][j] = t;
t = A.im[k][j];
A.im[k][j] = A.im[pvt[k]][j];
A.im[pvt[k]][j] = t;
}
return A;
}
for (int k=0; k<np; k++)
for (int j=0; j<A.ncol; j++){
double t = A.re[k][j];
A.re[k][j] = A.re[pvt[k]][j];
A.re[pvt[k]][j] = t;
t = A.im[k][j];
A.im[k][j] = A.im[pvt[k]][j];
A.im[pvt[k]][j] = t;
}
return A;
}
/**
* Pivots the rows of a Zmat (altered) as in the inverse order specified by a pivot array.
*
* @param A
* The Zmat (altered)
* @param pvt
* The pivot array
* @return The Zmat A with its rows permuted
* @exception JampackException
* Thrown for inconsitent dimensions.
*/
/**
Pivots the rows of a Zmat (altered) as in the inverse order specified
by a pivot array.
@param A The Zmat (altered)
@param pvt The pivot array
@return The Zmat A with its rows permuted
@exception JampackException
Thrown for inconsitent dimensions.
*/
public static Zmat rowi(Zmat A, int pvt[]) throws JampackException {
public static Zmat rowi(Zmat A, int pvt[])
throws JampackException{
int np = pvt.length;
if (np > A.nrow)
throw new JampackException("Inconsistent array dimensions");
int np = pvt.length;
if (np > A.nrow)
throw new JampackException
("Inconsistent array dimensions");
A.dirty = true;
A.dirty = true;
for (int k=np-1; k>=0; k--)
for (int j=0; j<A.nc; j++){
double t = A.re[k][j];
A.re[k][j] = A.re[pvt[k]][j];
A.re[pvt[k]][j] = t;
t = A.im[k][j];
A.im[k][j] = A.im[pvt[k]][j];
A.im[pvt[k]][j] = t;
}
return A;
}
for (int k = np - 1; k >= 0; k--)
for (int j = 0; j < A.nc; j++) {
double t = A.re[k][j];
A.re[k][j] = A.re[pvt[k]][j];
A.re[pvt[k]][j] = t;
t = A.im[k][j];
A.im[k][j] = A.im[pvt[k]][j];
A.im[pvt[k]][j] = t;
}
return A;
}
}

Просмотреть файл

@ -1,103 +1,111 @@
package Jampack;
/**
Plus Computes the sum of two matrices.
* Plus Computes the sum of two matrices.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Plus {
public class Plus{
/**
* Computes the sum of two Zmats
*
* @param A
* The first Zmat
* @param B
* The second Zmat
* @return A + B
* @exception JampackException
* Thrown for nonconformity.
*/
public static Zmat o(Zmat A, Zmat B) throws JampackException {
if (A.nrow != B.nrow || A.ncol != B.ncol) {
throw new JampackException("Matrices not conformable for addition");
}
Zmat C = new Zmat(A.nr, A.nc);
/**
Computes the sum of two Zmats
@param A The first Zmat
@param B The second Zmat
@return A + B
@exception JampackException
Thrown for nonconformity.
*/
public static Zmat o(Zmat A, Zmat B)
throws JampackException{
if (A.nrow!=B.nrow || A.ncol != B.ncol){
throw new JampackException("Matrices not conformable for addition");
}
Zmat C = new Zmat(A.nr, A.nc);
for (int i = 0; i < A.nrow; i++)
for (int j = 0; j < A.ncol; j++) {
C.re[i][j] = A.re[i][j] + B.re[i][j];
C.im[i][j] = A.im[i][j] + B.im[i][j];
}
return C;
}
for (int i=0; i<A.nrow; i++)
for (int j=0; j<A.ncol; j++){
C.re[i][j] = A.re[i][j] + B.re[i][j];
C.im[i][j] = A.im[i][j] + B.im[i][j];
}
return C;
}
/**
* Computes the sum of a Zmat and a Zdiagmat.
*
* @param A
* The Zmat
* @param D
* The Zdiagmat
* @return A + D
* @exception JampackException
* Thrown for nonconformity.
*/
/**
Computes the sum of a Zmat and a Zdiagmat.
@param A The Zmat
@param D The Zdiagmat
@return A + D
@exception JampackException
Thrown for nonconformity.
*/
public static Zmat o(Zmat A, Zdiagmat D) throws JampackException {
public static Zmat o(Zmat A, Zdiagmat D)
throws JampackException{
if (D.order != A.nrow || D.order != A.ncol) {
throw new JampackException("Matrices not conformable for addition");
}
Zmat C = new Zmat(A);
for (int i = 0; i < A.nrow; i++) {
C.re[i][i] = C.re[i][i] + D.re[i];
C.im[i][i] = C.im[i][i] + D.im[i];
}
return C;
}
if (D.order != A.nrow || D.order != A.ncol){
throw new JampackException("Matrices not conformable for addition");
}
Zmat C = new Zmat(A);
for (int i=0; i<A.nrow; i++){
C.re[i][i] = C.re[i][i] + D.re[i];
C.im[i][i] = C.im[i][i] + D.im[i];
}
return C;
}
/**
* Computes the sum of a Zdiagmat and a Zmat.
*
* @param D
* The Zdiagmat
* @param A
* The Zmat
* @return D + A
* @exception JampackException
* Thrown for nonconformity.
*/
/**
Computes the sum of a Zdiagmat and a Zmat.
@param D The Zdiagmat
@param A The Zmat
@return D + A
@exception JampackException
Thrown for nonconformity.
*/
public static Zmat o(Zdiagmat D, Zmat A) throws JampackException {
public static Zmat o(Zdiagmat D, Zmat A)
throws JampackException{
if (D.order != A.nrow || D.order != A.ncol) {
throw new JampackException("Matrices not conformable for addition");
}
Zmat C = new Zmat(A);
for (int i = 0; i < D.order; i++) {
C.re[i][i] = C.re[i][i] + D.re[i];
C.im[i][i] = C.im[i][i] + D.im[i];
}
return C;
}
if (D.order != A.nrow || D.order != A.ncol){
throw new JampackException("Matrices not conformable for addition");
}
Zmat C = new Zmat(A);
for (int i=0; i<D.order; i++){
C.re[i][i] = C.re[i][i] + D.re[i];
C.im[i][i] = C.im[i][i] + D.im[i];
}
return C;
}
/**
* Computes the sum of a Zdiagmat and a Zdiagmat.
*
* @param D1
* The first Zdiagmat
* @param D2
* The second Zdiagmat
* @return D1 + D2
* @exception JampackException
* Thrown for nonconformity.
*/
/**
Computes the sum of a Zdiagmat and a Zdiagmat.
@param D1 The first Zdiagmat
@param D2 The second Zdiagmat
@return D1 + D2
@exception JampackException
Thrown for nonconformity.
*/
public static Zdiagmat o(Zdiagmat D1, Zdiagmat D2) throws JampackException {
public static Zdiagmat o(Zdiagmat D1, Zdiagmat D2)
throws JampackException{
if (D1.order != D2.order){
throw new JampackException("Matrices not conformable for addition");
}
Zdiagmat C = new Zdiagmat(D1);
for (int i=0; i<D1.order; i++){
C.re[i] = C.re[i] + D2.re[i];
C.im[i] = C.im[i] + D2.im[i];
}
return C;
}
if (D1.order != D2.order) {
throw new JampackException("Matrices not conformable for addition");
}
Zdiagmat C = new Zdiagmat(D1);
for (int i = 0; i < D1.order; i++) {
C.re[i] = C.re[i] + D2.re[i];
C.im[i] = C.im[i] + D2.im[i];
}
return C;
}
}

Просмотреть файл

@ -1,61 +1,60 @@
package Jampack;
class PlusTest{
class PlusTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
if (args[0].equals("zmzm")){
if (args[0].equals("zmzm")) {
int nr = 4;
int nc = 3;
int nr = 4;
int nc = 3;
Zmat A = new Zmat(nr,nc);
Zmat B = new Zmat(nr,nc);
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++){
A.re[i][j] = i;
B.re[i][j] = -i;
A.im[i][j] = j;
B.im[i][j] = -j;
}
}
Print.o(Plus.o(A,B));
}
else if (args[0].equals("zmzdm")){
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nr, nc);
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
A.re[i][j] = i;
B.re[i][j] = -i;
A.im[i][j] = j;
B.im[i][j] = -j;
}
}
Print.o(Plus.o(A, B));
}
int n = 3;
else if (args[0].equals("zmzdm")) {
Zmat A = new Zmat(n,n);
Zdiagmat D = new Zdiagmat(n);
for (int i=0; i<n; i++){
for (int j=0; j<n; j++){
A.re[i][j] = i;
A.im[i][j] = j;
}
D.re[i] = -i;
D.im[i] = -i;
}
Print.o(Plus.o(A,D));
Print.o(Plus.o(D,A));
}
int n = 3;
else if (args[0].equals("zdmzdm")){
Zmat A = new Zmat(n, n);
Zdiagmat D = new Zdiagmat(n);
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
A.re[i][j] = i;
A.im[i][j] = j;
}
D.re[i] = -i;
D.im[i] = -i;
}
Print.o(Plus.o(A, D));
Print.o(Plus.o(D, A));
}
int n = 3;
else if (args[0].equals("zdmzdm")) {
Zdiagmat D1 = new Zdiagmat(n);
Zdiagmat D2 = new Zdiagmat(n);
for (int i=0; i<n; i++){
D1.re[i] = i;
D1.im[i] = -i;
D2.re[i] = -i;
D2.im[i] = i;
}
Print.o(Plus.o(D1,D2));
}
int n = 3;
}
Zdiagmat D1 = new Zdiagmat(n);
Zdiagmat D2 = new Zdiagmat(n);
for (int i = 0; i < n; i++) {
D1.re[i] = i;
D1.im[i] = -i;
D2.re[i] = -i;
D2.im[i] = i;
}
Print.o(Plus.o(D1, D2));
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,53 +1,49 @@
package Jampack;
class PrintTest
{
class PrintTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
int itest = -5;
Print.o(itest, 10);
int itest = -5;
Print.o(itest, 10);
int ia[] = {1, 2, -3, 4, 5, 6};
Print.o(ia, 20);
int ia[] = { 1, 2, -3, 4, 5, 6 };
Print.o(ia, 20);
Print.o(Math.PI, 15, 7);
Print.o(Math.PI, 15, 7);
double a[] = {1, 3, -3, 4, Math.PI, Math.E};
Print.o(a, 15, 7);
double a[] = { 1, 3, -3, 4, Math.PI, Math.E };
Print.o(a, 15, 7);
double A[][]= {{1, 3, -3, 4, Math.PI, Math.E},
{1, 3, -3, 4, Math.PI, Math.E},
{1, 3, -3, 4, Math.PI, Math.E}};
Print.o(A, 15, 7);
double A[][] = { { 1, 3, -3, 4, Math.PI, Math.E }, { 1, 3, -3, 4, Math.PI, Math.E }, { 1, 3, -3, 4, Math.PI, Math.E } };
Print.o(A, 15, 7);
Print.o(new Z(3, -7), 15, 7);
Print.o(new Z(3, -7), 15, 7);
Z c[] = new Z[5];
for (int i=0; i<5; i++)
c[i] = new Z(i,i+1);
Print.o(c, 15, 7);
Z c[] = new Z[5];
for (int i = 0; i < 5; i++)
c[i] = new Z(i, i + 1);
Print.o(c, 15, 7);
Z C[][] = new Z[3][5];
for (int i = 0; i < 3; i++)
for (int j = 0; j < 5; j++)
C[i][j] = new Z(i, j);
Print.o(C, 15, 7);
Z C[][] = new Z[3][5];
for (int i=0; i<3; i++)
for (int j=0; j<5; j++)
C[i][j] = new Z(i,j);
Print.o(C, 15, 7);
Z1 z1 = new Z1(5);
for (int i = 0; i < 5; i++) {
z1.re[i] = i;
z1.im[i] = -i;
}
Print.o(z1, 15, 7);
Z1 z1 = new Z1(5);
for (int i=0; i<5; i++){
z1.re[i] = i; z1.im[i]=-i;
}
Print.o(z1, 15, 7);
Zmat Z = new Zmat(C);
Print.o(Z, 15, 7);
Zmat Z = new Zmat(C);
Print.o(Z, 15, 7);
Zdiagmat D = new Zdiagmat(z1);
Print.o(D, 15, 7);
Zdiagmat D = new Zdiagmat(z1);
Print.o(D, 15, 7);
}
}
}

Просмотреть файл

@ -1,210 +1,212 @@
package Jampack;
import java.util.Random;
/**
The rand suite generates random objects with elements
distributed randomly on [0,1] or normally with mean zero
and standard deviation one.
@version Pre-alpha
@author G. W. Stewart
*/
public class Rand{
private static Random R = new Random();
/**
Sets the seed for the random number generator.
* The rand suite generates random objects with elements distributed randomly on [0,1] or normally with mean zero and standard
* deviation one.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@param seed The seed
*/
public static void setSeed(long seed){
R.setSeed(seed);
}
public class Rand {
/**
Generates a random uniform double.
private static Random R = new Random();
@return a uniform random double
*/
public static double ud(){
return R.nextDouble();
}
/**
* Sets the seed for the random number generator.
*
* @param seed
* The seed
*/
public static void setSeed(long seed) {
R.setSeed(seed);
}
/**
Generates a one-dimensional array of
uniform random doubles.
/**
* Generates a random uniform double.
*
* @return a uniform random double
*/
public static double ud() {
return R.nextDouble();
}
@param n The length of the array.
@return The array of uniform doubles.
*/
public static double[] udary(int n){
double d[] = new double[n];
for (int i=0; i<n; i++){
d[i] = R.nextDouble();
}
return d;
}
/**
* Generates a one-dimensional array of uniform random doubles.
*
* @param n
* The length of the array.
* @return The array of uniform doubles.
*/
public static double[] udary(int n) {
double d[] = new double[n];
for (int i = 0; i < n; i++) {
d[i] = R.nextDouble();
}
return d;
}
/**
Generates a two-dimensional array of
uniform random doubles.
/**
* Generates a two-dimensional array of uniform random doubles.
*
* @param m
* The number of rows in the array.
* @param n
* The number of columns in the array.
* @return The array of uniform doubles.
*/
public static double[][] udary(int m, int n) {
double d[][] = new double[m][n];
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
d[i][j] = R.nextDouble();
}
}
return d;
}
@param m The number of rows in the array.
@param n The number of columns in the array.
@return The array of uniform doubles.
*/
public static double[][] udary(int m, int n){
double d[][] = new double[m][n];
for (int i=0; i<m; i++){
for (int j=0; j<n; j++){
d[i][j] = R.nextDouble();
}
}
return d;
}
/**
* Generates a uniform random complex number, i.e., a complex number whose real and imaginary parts are random.
*
* @return The uniform random Z
*/
public static Z uz() {
return new Z(R.nextDouble(), R.nextDouble());
}
/**
Generates a uniform random complex number, i.e., a complex
number whose real and imaginary parts are random.
/**
* Generates a uniform random Z1.
*
* @param n
* The length of the Z1
* @return The uniform random Z1
* @exception JampackException
* Passed from below.
*/
public static Z1 uz1(int n) throws JampackException {
@return The uniform random Z
*/
public static Z uz(){
return new Z(R.nextDouble(), R.nextDouble());
}
Z1 zone = new Z1(n);
for (int i = 0; i < n; i++) {
zone.re[i] = R.nextDouble();
zone.im[i] = R.nextDouble();
}
return zone;
}
/**
Generates a uniform random Z1.
@param n The length of the Z1
@return The uniform random Z1
@exception JampackException
Passed from below.
*/
public static Z1 uz1(int n)
throws JampackException{
/**
* Generates a uniform random Zmat.
*
* @param m
* The number of rows in the Zmat
* @param n
* The number of columns in the Zmat
* @return The uniform random Zmat
* @exception JampackException
* Passed from below.
*/
public static Zmat uzmat(int m, int n) throws JampackException {
Zmat zm = new Zmat(m, n);
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
zm.re[i][j] = R.nextDouble();
zm.im[i][j] = R.nextDouble();
}
}
return zm;
}
Z1 zone = new Z1(n);
for (int i=0; i<n; i++){
zone.re[i] = R.nextDouble();
zone.im[i] = R.nextDouble();
}
return zone;
}
/**
* Generates a normal random double.
*
* @return a normal random double
*/
public static double nd() {
return R.nextGaussian();
}
/**
Generates a uniform random Zmat.
@param m The number of rows in the Zmat
@param n The number of columns in the Zmat
@return The uniform random Zmat
@exception JampackException
Passed from below.
/**
* Generates a one-dimensional array of normal random doubles.
*
* @param n
* The length of the array.
* @return The array of normal doubles.
*/
public static double[] ndary(int n) {
double d[] = new double[n];
for (int i = 0; i < n; i++) {
d[i] = R.nextGaussian();
}
return d;
}
*/
public static Zmat uzmat(int m, int n)
throws JampackException{
Zmat zm = new Zmat(m, n);
for (int i=0; i<m; i++){
for (int j=0; j<n; j++){
zm.re[i][j] = R.nextDouble();
zm.im[i][j] = R.nextDouble();
}
}
return zm;
}
/**
* Generates a two-dimensional array of normal random doubles.
*
* @param m
* The number of rows in the array.
* @param n
* The number of columns in the array.
* @return The array of normal doubles.
*/
public static double[][] ndary(int m, int n) {
double d[][] = new double[m][n];
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
d[i][j] = R.nextGaussian();
}
}
return d;
}
/**
Generates a normal random double.
/**
* Generates a normal random complex number, i.e., a complex number whose real and imaginary parts are random.
*
* @return The normal random Z
*/
public static Z nz() {
return new Z(R.nextGaussian(), R.nextGaussian());
}
@return a normal random double
*/
public static double nd(){
return R.nextGaussian();
}
/**
* Generates a normal random Z1.
*
* @param n
* The length of the Z1
* @return The normal random Z1
* @exception JampackException
* Passed from below.
*/
public static Z1 nz1(int n) throws JampackException {
/**
Generates a one-dimensional array of
normal random doubles.
Z1 zone = new Z1(n);
for (int i = 0; i < n; i++) {
zone.re[i] = R.nextGaussian();
zone.im[i] = R.nextGaussian();
}
return zone;
}
@param n The length of the array.
@return The array of normal doubles.
*/
public static double[] ndary(int n){
double d[] = new double[n];
for (int i=0; i<n; i++){
d[i] = R.nextGaussian();
}
return d;
}
/**
Generates a two-dimensional array of
normal random doubles.
@param m The number of rows in the array.
@param n The number of columns in the array.
@return The array of normal doubles.
*/
public static double[][] ndary(int m, int n){
double d[][] = new double[m][n];
for (int i=0; i<m; i++){
for (int j=0; j<n; j++){
d[i][j] = R.nextGaussian();
}
}
return d;
}
/**
Generates a normal random complex number, i.e., a complex
number whose real and imaginary parts are random.
@return The normal random Z
*/
public static Z nz(){
return new Z(R.nextGaussian(), R.nextGaussian());
}
/**
Generates a normal random Z1.
@param n The length of the Z1
@return The normal random Z1
@exception JampackException
Passed from below.
*/
public static Z1 nz1(int n)
throws JampackException{
Z1 zone = new Z1(n);
for (int i=0; i<n; i++){
zone.re[i] = R.nextGaussian();
zone.im[i] = R.nextGaussian();
}
return zone;
}
/**
Generates a normal random Zmat.
@param m The number of rows in the Zmat
@param n The number of columns in the Zmat
@return The normal random Zmat
@exception JampackException
Passed from below.
*/
public static Zmat nzmat(int m, int n)
throws JampackException{
Zmat zm = new Zmat(m, n);
for (int i=0; i<m; i++){
for (int j=0; j<n; j++){
zm.re[i][j] = R.nextGaussian();
zm.im[i][j] = R.nextGaussian();
}
}
return zm;
}
/**
* Generates a normal random Zmat.
*
* @param m
* The number of rows in the Zmat
* @param n
* The number of columns in the Zmat
* @return The normal random Zmat
* @exception JampackException
* Passed from below.
*/
public static Zmat nzmat(int m, int n) throws JampackException {
Zmat zm = new Zmat(m, n);
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
zm.re[i][j] = R.nextGaussian();
zm.im[i][j] = R.nextGaussian();
}
}
return zm;
}
}

Просмотреть файл

@ -1,17 +1,16 @@
package Jampack;
class RandTest{
class RandTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Print.o(Rand.uzmat(5,3));
Rand.setSeed(69);
Print.o(Rand.uzmat(5,3));
Print.o(Rand.uzmat(5, 3));
Rand.setSeed(69);
Print.o(Rand.uzmat(5, 3));
Print.o(Rand.nzmat(5,3));
Rand.setSeed(69);
Print.o(Rand.nzmat(5,3));
Print.o(Rand.nzmat(5, 3));
Rand.setSeed(69);
Print.o(Rand.nzmat(5, 3));
}
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,34 +1,33 @@
package Jampack;
class RotTest{
class RotTest {
public static void main(String[] args){
public static void main(String[] args) {
Zmat A = new Zmat(2, 1);
A.put(1, 1, new Z(-1, 2));
A.put(2, 1, new Z(2, -1));
Zmat B = new Zmat(A);
Zmat C = H.o(A);
Zmat D = new Zmat(C);
Rot P = Rot.genc(A, 1, 2, 1);
Rot.pa(P, B, 1, 2, 1, 1);
Print.o(A);
Print.o(B);
Rot.pha(P, B, 1, 2, 1, 1);
Print.o(B);
Rot.pha(P, B, 1, 2, 1, 1);
Rot.pa(P, B, 1, 2, 1, 1);
Print.o(B);
Zmat A = new Zmat(2, 1);
A.put(1,1, new Z(-1,2));
A.put(2,1, new Z(2,-1));
Zmat B = new Zmat(A);
Zmat C = H.o(A);
Zmat D = new Zmat(C);
Rot P = Rot.genc(A, 1, 2, 1);
Rot.pa(P, B, 1, 2, 1, 1);
Print.o(A);
Print.o(B);
Rot.pha(P, B, 1, 2, 1, 1);
Print.o(B);
Rot.pha(P, B, 1, 2, 1, 1);
Rot.pa(P, B, 1, 2, 1, 1);
Print.o(B);
Rot Q = Rot.genr(C, 1, 1, 2);
Rot.ap(D, Q, 1, 1, 1, 2);
Print.o(C);
Print.o(D);
Rot.aph(D, Q, 1, 1, 1, 2);
Print.o(D);
Rot.aph(D, Q, 1, 1, 1, 2);
Rot.ap(D, Q, 1, 1, 1, 2);
Print.o(D);
}
Rot Q = Rot.genr(C, 1, 1, 2);
Rot.ap(D, Q, 1, 1, 1, 2);
Print.o(C);
Print.o(D);
Rot.aph(D, Q, 1, 1, 1, 2);
Print.o(D);
Rot.aph(D, Q, 1, 1, 1, 2);
Rot.ap(D, Q, 1, 1, 1, 2);
Print.o(D);
}
}

Просмотреть файл

@ -1,132 +1,133 @@
package Jampack;
/**
Schur implements the Schur decomposition of a matrix. Specifically,
given a square matrix A, there is a unitary matrix U such that
<pre>
* T = U^H AU
</pre>
is upper triangular. Schur represents T as a Zutmat and U as a Zmat.
* Schur implements the Schur decomposition of a matrix. Specifically, given a square matrix A, there is a unitary matrix U such
* that
*
* <pre>
* T = U^H AU
* </pre>
*
* is upper triangular. Schur represents T as a Zutmat and U as a Zmat.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Schur {
public class Schur{
/** The upper triangular matrix. */
public Zutmat T;
/** The upper triangular matrix. */
public Zutmat T;
/** The unitary matrix. */
public Zmat U;
/** The unitary matrix. */
public Zmat U;
/** Limits the number of interations in the QR algorithm */
public static int MAXITER = 30;
/** Limits the number of interations in the QR algorithm */
public static int MAXITER = 30;
/**
* Creats a Schur decomposition from a square Zmat.
*
* @param A
* The Zmat whose Schur decomposition is to be computed
* @exception JampackException
* Thrown for nonsquare matrix.<br>
* Thrown for maximum iteration count exceeded.
*/
public Schur(Zmat A) throws JampackException {
/**
Creats a Schur decomposition from a square Zmat.
@param A The Zmat whose Schur decomposition is to be computed
@exception JampackException
Thrown for nonsquare matrix.<br>
Thrown for maximum iteration count exceeded.
*/
public Schur(Zmat A)
throws JampackException{
int i, il, iter, iu, k;
double d, sd, sf;
Z b = new Z(), c = new Z(), disc = new Z(), kappa = new Z(), p, q, r, r1 = new Z(), r2 = new Z(), s, z1 = new Z(), z2 = new Z();
Rot P = new Rot();
int i, il, iter, iu, k;
double d, sd, sf;
Z b = new Z(), c = new Z(), disc = new Z(), kappa = new Z(),
p, q, r, r1 = new Z(), r2 = new Z(), s, z1 = new Z(), z2 = new Z();
Rot P = new Rot();
if (A.nr != A.nc) {
throw new JampackException("Nonsquare matrix");
}
if (A.nr != A.nc){
throw new JampackException
("Nonsquare matrix");
}
/* Reduce to Hessenberg form and set up T and U */
/* Reduce to Hessenberg form and set up T and U */
Zhess H = new Zhess(A);
T = new Zutmat(H.H);
U = H.U;
Zhess H = new Zhess(A);
T = new Zutmat(H.H);
U = H.U;
iu = T.rx;
iter = 0;
while (true) {
iu = T.rx;
iter = 0;
while(true){
// Locate the range in which to iterate.
// Locate the range in which to iterate.
while (iu > T.bx) {
d = Z.abs1(T.get(iu, iu)) + Z.abs1(T.get(iu - 1, iu - 1));
sd = Z.abs1(T.get(iu, iu - 1));
if (sd >= 1.0e-16 * d)
break;
T.put(iu, iu - 1, Z.ZERO);
iter = 0;
iu = iu - 1;
}
if (iu == T.bx)
break;
while (iu > T.bx){
d = Z.abs1(T.get(iu,iu)) + Z.abs1(T.get(iu-1,iu-1));
sd = Z.abs1(T.get(iu,iu-1));
if (sd >= 1.0e-16*d) break;
T.put(iu, iu-1, Z.ZERO);
iter = 0;
iu = iu-1;
}
if (iu == T.bx) break;
iter = iter + 1;
if (iter >= MAXITER) {
throw new JampackException("Maximum number of iterations exceeded.");
}
il = iu - 1;
while (il > T.bx) {
d = Z.abs1(T.get(il, il)) + Z.abs1(T.get(il - 1, il - 1));
sd = Z.abs1(T.get(il, il - 1));
if (sd < 1.0e-16 * d)
break;
il = il - 1;
}
if (il != T.bx) {
T.put(il, il - 1, Z.ZERO);
}
iter = iter+1;
if (iter >= MAXITER){
throw new JampackException
("Maximum number of iterations exceeded.");
}
il = iu-1;
while (il > T.bx){
d = Z.abs1(T.get(il,il)) + Z.abs1(T.get(il-1,il-1));
sd = Z.abs1(T.get(il,il-1));
if (sd < 1.0e-16*d) break;
il = il-1;
}
if(il != T.bx){
T.put(il, il-1, Z.ZERO);
}
// Compute the shift.
// Compute the shift.
p = T.get(iu-1,iu-1);
q = T.get(iu-1,iu);
r = T.get(iu,iu-1);
s = T.get(iu,iu);
p = T.get(iu - 1, iu - 1);
q = T.get(iu - 1, iu);
r = T.get(iu, iu - 1);
s = T.get(iu, iu);
sf = Z.abs1(p) + Z.abs1(q) + Z.abs1(r) + Z.abs1(s);
p.Div(p, sf);
q.Div(q, sf);
r.Div(r, sf);
s.Div(s, sf);
sf = Z.abs1(p) + Z.abs1(q) + Z.abs1(r) + Z.abs1(s);
p.Div(p, sf);
q.Div(q, sf);
r.Div(r, sf);
s.Div(s, sf);
c.Minus(z1.Times(p, s), z2.Times(r, q));
b.Plus(p, s);
c.Minus(z1.Times(p, s), z2.Times(r, q));
b.Plus(p, s);
disc.Sqrt(disc.Minus(z1.Times(b,b), z2.Times(4,c)));
r1.Div(r1.Plus(b, disc), 2);
r2.Div(r2.Minus(b, disc), 2);
if (Z.abs1(r1) > Z.abs1(r2)){
r2.Div(c, r1);
}
else{
r1.Div(c, r2);
}
if (Z.abs1(z1.Minus(r1, s)) < Z.abs1(z2.Minus(r2, s))){
kappa.Times(sf, r1);
}
else{
kappa.Times(sf, r2);
}
disc.Sqrt(disc.Minus(z1.Times(b, b), z2.Times(4, c)));
r1.Div(r1.Plus(b, disc), 2);
r2.Div(r2.Minus(b, disc), 2);
if (Z.abs1(r1) > Z.abs1(r2)) {
r2.Div(c, r1);
} else {
r1.Div(c, r2);
}
if (Z.abs1(z1.Minus(r1, s)) < Z.abs1(z2.Minus(r2, s))) {
kappa.Times(sf, r1);
} else {
kappa.Times(sf, r2);
}
// Perform the QR step.
// Perform the QR step.
p.Minus(T.get(il,il), kappa);
q.Eq(T.get(il+1,il));
Rot.genc(p.re, p.im, q.re, q.im, P);
for (i=il; i<iu; i++){
Rot.pa(P, T, i, i+1, i, T.cx);
Rot.aph(T, P, T.bx, Math.min(i+2,iu), i, i+1);
Rot.aph(U, P, U.bx, U.rx, i, i+1);
if (i != iu-1){
Rot.genc(T, i+1, i+2, i, P);
}
}
}
}
p.Minus(T.get(il, il), kappa);
q.Eq(T.get(il + 1, il));
Rot.genc(p.re, p.im, q.re, q.im, P);
for (i = il; i < iu; i++) {
Rot.pa(P, T, i, i + 1, i, T.cx);
Rot.aph(T, P, T.bx, Math.min(i + 2, iu), i, i + 1);
Rot.aph(U, P, U.bx, U.rx, i, i + 1);
if (i != iu - 1) {
Rot.genc(T, i + 1, i + 2, i, P);
}
}
}
}
}

Просмотреть файл

@ -1,28 +1,27 @@
package Jampack;
class SchurTest{
class SchurTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(1);
Parameters.setBaseIndex(1);
int n = 50;
int n = 50;
// Schur.MAXITER = 2;
// Schur.MAXITER = 2;
Z z = new Z();
Zmat A = new Zmat(n,n);
for (int i=A.bx; i<=A.rx; i++){
for (int j=A.bx; j<=A.cx; j++){
A.put(i, j, new Z(i+j, i-2*j));
}
A.put(i,i, new Z(2*n, 2*n));
}
Z z = new Z();
Zmat A = new Zmat(n, n);
Schur B = new Schur(A);
Print.o(Z.abs(z.Minus(Trace.o(A), Trace.o(B.T))));
Print.o(Norm.fro(Minus.o(A,Times.o(B.U, Times.o(B.T, H.o(B.U))))));
}
for (int i = A.bx; i <= A.rx; i++) {
for (int j = A.bx; j <= A.cx; j++) {
A.put(i, j, new Z(i + j, i - 2 * j));
}
A.put(i, i, new Z(2 * n, 2 * n));
}
Schur B = new Schur(A);
Print.o(Z.abs(z.Minus(Trace.o(A), Trace.o(B.T))));
Print.o(Norm.fro(Minus.o(A, Times.o(B.U, Times.o(B.T, H.o(B.U))))));
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1,398 +1,385 @@
package Jampack;
class SolveTest{
class SolveTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
// Parameters.SetBaseIndex(0);
// Parameters.SetBaseIndex(0);
if (args[0].equals("lib")){
if (args[0].equals("lib")) {
int i, j, n=3;
int i, j, n = 3;
Z La[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j <= i)
La[i][j] = new Z(i+1,j+1);
else
La[i][j] = new Z(0,0);
}
Z La[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j <= i)
La[i][j] = new Z(i + 1, j + 1);
else
La[i][j] = new Z(0, 0);
}
Zltmat L = new Zltmat(La);
Zltmat L = new Zltmat(La);
Z Xa[][] = new Z[n][2];
Z Xa[][] = new Z[n][2];
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
Zmat X = new Zmat(Xa);
Zmat X = new Zmat(Xa);
Zmat B = Times.o(L,X);
Zmat B = Times.o(L, X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(L, B))));
}
Print.o(Norm.fro(Minus.o(X, Solve.aib(L, B))));
}
else if (args[0].equals("lhib")) {
else if (args[0].equals("lhib")){
int i, j, n = 4;
int i, j, n=4;
Z La[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j <= i)
La[i][j] = new Z(i + 1, j + 1);
else
La[i][j] = new Z(0, 0);
}
Z La[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j <= i)
La[i][j] = new Z(i+1,j+1);
else
La[i][j] = new Z(0,0);
}
Zltmat L = new Zltmat(La);
Zltmat L = new Zltmat(La);
Z Xa[][] = new Z[n][2];
Z Xa[][] = new Z[n][2];
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
Zmat X = new Zmat(Xa);
Zmat X = new Zmat(Xa);
Zmat B = Times.o(H.o(L), X);
Zmat B = Times.o(H.o(L), X);
Print.o(Norm.fro(Minus.o(X, Solve.ahib(L, B))));
}
Print.o(Norm.fro(Minus.o(X, Solve.ahib(L, B))));
}
else if (args[0].equals("bli")) {
else if (args[0].equals("bli")){
int i, j, n = 4;
int i, j, n=4;
Z La[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j <= i)
La[i][j] = new Z(i + 1, j + 1);
else
La[i][j] = new Z(0, 0);
}
Z La[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j <= i)
La[i][j] = new Z(i+1,j+1);
else
La[i][j] = new Z(0,0);
}
Zltmat L = new Zltmat(La);
Zltmat L = new Zltmat(La);
Z Xa[][] = new Z[2][n];
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(j, j + 1);
}
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(j,j+1);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, L);
Zmat X = new Zmat(Xa);
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, L))));
}
Zmat B = Times.o(X, L);
else if (args[0].equals("blhi")) {
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, L))));
}
int i, j, n = 4;
else if (args[0].equals("blhi")){
Z La[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j <= i)
La[i][j] = new Z(i + 1, j + 1);
else
La[i][j] = new Z(0, 0);
}
int i, j, n=4;
Zltmat L = new Zltmat(La);
Z La[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j <= i)
La[i][j] = new Z(i+1,j+1);
else
La[i][j] = new Z(0,0);
}
Z Xa[][] = new Z[2][n];
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(j, j + 1);
}
Zltmat L = new Zltmat(La);
Zmat X = new Zmat(Xa);
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(j,j+1);
}
Zmat B = Times.o(X, H.o(L));
Print.o(Norm.fro(Minus.o(X, Solve.bahi(B, L))));
}
Zmat X = new Zmat(Xa);
else if (args[0].equals("uib")) {
Zmat B = Times.o(X, H.o(L));
int i, j, n = 3;
Print.o(Norm.fro(Minus.o(X, Solve.bahi(B, L))));
}
Z Ua[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j >= i)
Ua[i][j] = new Z(i + 1, j + 1);
else
Ua[i][j] = new Z(0, 0);
}
else if (args[0].equals("uib")){
Zutmat U = new Zutmat(Ua);
int i, j, n=3;
Z Xa[][] = new Z[n][2];
Z Ua[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j >= i)
Ua[i][j] = new Z(i+1,j+1);
else
Ua[i][j] = new Z(0,0);
}
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
Zutmat U = new Zutmat(Ua);
Zmat X = new Zmat(Xa);
Z Xa[][] = new Z[n][2];
Zmat B = Times.o(U, X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(U, B))));
}
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
else if (args[0].equals("uhib")) {
Zmat X = new Zmat(Xa);
int i, j, n = 3;
Zmat B = Times.o(U,X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(U, B))));
}
Z Ua[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j >= i)
Ua[i][j] = new Z(i + 1, j + 1);
else
Ua[i][j] = new Z(0, 0);
}
else if (args[0].equals("uhib")){
Zutmat U = new Zutmat(Ua);
int i, j, n=3;
Z Xa[][] = new Z[n][2];
Z Ua[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j >= i)
Ua[i][j] = new Z(i+1,j+1);
else
Ua[i][j] = new Z(0,0);
}
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
Zutmat U = new Zutmat(Ua);
Zmat X = new Zmat(Xa);
Z Xa[][] = new Z[n][2];
Zmat B = Times.o(H.o(U), X);
Print.o(Norm.fro(Minus.o(X, Solve.ahib(U, B))));
}
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
else if (args[0].equals("bui")) {
Zmat X = new Zmat(Xa);
int i, j, n = 4;
Zmat B = Times.o(H.o(U), X);
Print.o(Norm.fro(Minus.o(X, Solve.ahib(U, B))));
}
Z Ua[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j >= i)
Ua[i][j] = new Z(i + 1, j + 1);
else
Ua[i][j] = new Z(0, 0);
}
else if (args[0].equals("bui")){
Zutmat U = new Zutmat(Ua);
int i, j, n=4;
Z Xa[][] = new Z[2][n];
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(j, j + 1);
}
Z Ua[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j >= i)
Ua[i][j] = new Z(i+1,j+1);
else
Ua[i][j] = new Z(0,0);
}
Zmat X = new Zmat(Xa);
Zutmat U = new Zutmat(Ua);
Zmat B = Times.o(X, U);
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(j,j+1);
}
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, U))));
}
else if (args[0].equals("buhi")) {
Zmat X = new Zmat(Xa);
int i, j, n = 4;
Zmat B = Times.o(X, U);
Z Ua[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
if (j >= i)
Ua[i][j] = new Z(i + 1, j + 1);
else
Ua[i][j] = new Z(0, 0);
}
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, U))));
}
Zutmat U = new Zutmat(Ua);
Z Xa[][] = new Z[2][n];
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(j, j + 1);
}
else if (args[0].equals("buhi")){
Zmat X = new Zmat(Xa);
int i, j, n=4;
Zmat B = Times.o(X, H.o(U));
Z Ua[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
if (j >= i)
Ua[i][j] = new Z(i+1,j+1);
else
Ua[i][j] = new Z(0,0);
}
Print.o(Norm.fro(Minus.o(X, Solve.bahi(B, U))));
}
Zutmat U = new Zutmat(Ua);
else if (args[0].equals("aib")) {
int i, j, n = 5;
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(j,j+1);
}
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(1, 1);
}
Zmat A = new Zmat(Aa);
Zmat X = new Zmat(Xa);
Z Xa[][] = new Z[n][2];
Zmat B = Times.o(X, H.o(U));
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
Print.o(Norm.fro(Minus.o(X, Solve.bahi(B, U))));
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(A, X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(A, B))));
}
else if (args[0].equals("aib")){
int i, j, n=5;
else if (args[0].equals("ahib")) {
int i, j, n = 10;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(1,1);
}
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(1, 1);
}
Zmat A = new Zmat(Aa);
Zmat A = new Zmat(Aa);
Z Xa[][] = new Z[n][2];
Z Xa[][] = new Z[n][2];
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
Zmat X = new Zmat(Xa);
Zmat X = new Zmat(Xa);
Zmat B = Times.o(H.o(A), X);
Print.o(Norm.fro(Minus.o(X, Solve.ahib(A, B))));
}
else if (args[0].equals("bai")) {
int i, j, n = 10;
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(1, 1);
}
Zmat B = Times.o(A,X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(A, B))));
}
Zmat A = new Zmat(Aa);
Z Xa[][] = new Z[2][n];
else if (args[0].equals("ahib")){
int i, j, n=10;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(1,1);
}
Zmat A = new Zmat(Aa);
Z Xa[][] = new Z[n][2];
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(H.o(A), X);
Print.o(Norm.fro(Minus.o(X, Solve.ahib(A, B))));
}
else if (args[0].equals("bai")){
int i, j, n=10;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(1,1);
}
Zmat A = new Zmat(Aa);
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(i,i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, A);
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, A))));
}
else if (args[0].equals("bahi")){
int i, j, n=10;
Z Aa[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=0; j<n; j++)
Aa[i][j] = new Z(i+1,j+1);
Aa[i][i] = new Z(1,1);
}
Zmat A = new Zmat(Aa);
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(i,i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, H.o(A));
Print.o(Norm.fro(Minus.o(X, Solve.bahi(B, A))));
}
else if (args[0].equals("pdaib")){
int i, j, n=5;
Zpsdmat A = new Zpsdmat(n,n);
for (i=0; i<n; i++){
for (j=0; j<n; j++){
A.re[i][j] = i;
A.im[i][j] = i+j;
}
A.re[i][i] = 2*n;
A.im[i][i] = 2*n;
}
A = new Zpsdmat(Times.o(H.o(A), A));
Z Xa[][] = new Z[n][2];
for (i=0; i<n; i++){
Xa[i][0] = new Z(1,1);
Xa[i][1] = new Z(i,i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(A, X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(A, B))));
}
else if (args[0].equals("pdbai")){
int i, j, n=5;
Zpsdmat A = new Zpsdmat(n,n);
for (i=0; i<n; i++){
for (j=0; j<n; j++){
A.re[i][j] = i;
A.im[i][j] = i+j;
}
A.re[i][i] = 2*n;
A.im[i][i] = 2*n;
}
A = new Zpsdmat(Times.o(H.o(A), A));
Z Xa[][] = new Z[2][n];
for (j=0; j<n; j++){
Xa[0][j] = new Z(1,1);
Xa[1][j] = new Z(i,i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, A);
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, A))));
}
}
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(i, i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, A);
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, A))));
}
else if (args[0].equals("bahi")) {
int i, j, n = 10;
Z Aa[][] = new Z[n][n];
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++)
Aa[i][j] = new Z(i + 1, j + 1);
Aa[i][i] = new Z(1, 1);
}
Zmat A = new Zmat(Aa);
Z Xa[][] = new Z[2][n];
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(i, i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, H.o(A));
Print.o(Norm.fro(Minus.o(X, Solve.bahi(B, A))));
}
else if (args[0].equals("pdaib")) {
int i, j, n = 5;
Zpsdmat A = new Zpsdmat(n, n);
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++) {
A.re[i][j] = i;
A.im[i][j] = i + j;
}
A.re[i][i] = 2 * n;
A.im[i][i] = 2 * n;
}
A = new Zpsdmat(Times.o(H.o(A), A));
Z Xa[][] = new Z[n][2];
for (i = 0; i < n; i++) {
Xa[i][0] = new Z(1, 1);
Xa[i][1] = new Z(i, i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(A, X);
Print.o(Norm.fro(Minus.o(X, Solve.aib(A, B))));
}
else if (args[0].equals("pdbai")) {
int i, j, n = 5;
Zpsdmat A = new Zpsdmat(n, n);
for (i = 0; i < n; i++) {
for (j = 0; j < n; j++) {
A.re[i][j] = i;
A.im[i][j] = i + j;
}
A.re[i][i] = 2 * n;
A.im[i][i] = 2 * n;
}
A = new Zpsdmat(Times.o(H.o(A), A));
Z Xa[][] = new Z[2][n];
for (j = 0; j < n; j++) {
Xa[0][j] = new Z(1, 1);
Xa[1][j] = new Z(i, i);
}
Zmat X = new Zmat(Xa);
Zmat B = Times.o(X, A);
Print.o(Norm.fro(Minus.o(X, Solve.bai(B, A))));
}
}
}

Просмотреть файл

@ -1,76 +1,80 @@
package Jampack;
/**
Swap interchanges rows and columns of a matrix.
* Swap interchanges rows and columns of a matrix.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Swap {
public class Swap{
/**
* Interchances two rows of a Zmat (altered).
*
* @param A
* The Zmat (altered)
* @param r1
* The index of the first row
* @param r2
* The index of the second row
* @exception JampackException
* Thrown for inconsistent row indices.
*/
/**
Interchances two rows of a Zmat (altered).
@param A The Zmat (altered)
@param r1 The index of the first row
@param r2 The index of the second row
@exception JampackException
Thrown for inconsistent row indices.
*/
public static void rows(Zmat A, int r1, int r2) throws JampackException {
public static void rows(Zmat A, int r1, int r2)
throws JampackException{
A.getProperties();
if (r1 < A.bx || r1 > A.rx || r2 < A.bx || r2 > A.rx) {
throw new JampackException("Inconsistent row indices");
}
A.getProperties();
if (r1<A.bx || r1>A.rx || r2<A.bx || r2>A.rx){
throw new JampackException
("Inconsistent row indices");
}
A.dirty = true;
A.dirty = true;
r1 = r1 - A.bx;
r2 = r2 - A.bx;
r1 = r1-A.bx;
r2 = r2-A.bx;
for (int j = 0; j < A.nr; j++) {
double t = A.re[r1][j];
A.re[r1][j] = A.re[r2][j];
A.re[r2][j] = t;
t = A.im[r1][j];
A.im[r1][j] = A.im[r2][j];
A.im[r2][j] = t;
}
}
for (int j=0; j<A.nr; j++){
double t = A.re[r1][j];
A.re[r1][j] = A.re[r2][j];
A.re[r2][j] = t;
t = A.im[r1][j];
A.im[r1][j] = A.im[r2][j];
A.im[r2][j] = t;
}
}
/**
* Interchances two columns of a Zmat (altered).
*
* @param A
* The Zmat (altered)
* @param c1
* The index of the first column
* @param c2
* The index of the second column
* @exception JampackException
* Thrown for inconsistent column indices.
*/
public static void cols(Zmat A, int c1, int c2) throws JampackException {
/**
Interchances two columns of a Zmat (altered).
@param A The Zmat (altered)
@param c1 The index of the first column
@param c2 The index of the second column
@exception JampackException
Thrown for inconsistent column indices.
*/
public static void cols(Zmat A, int c1, int c2)
throws JampackException{
A.getProperties();
if (c1 < A.bx || c1 > A.cx || c2 < A.bx || c2 > A.cx) {
throw new JampackException("Inconsistent row indices");
}
A.getProperties();
if (c1<A.bx || c1>A.cx || c2<A.bx || c2>A.cx){
throw new JampackException
("Inconsistent row indices");
}
A.dirty = true;
A.dirty = true;
c1 = c1 - A.bx;
c2 = c2 - A.bx;
c1 = c1-A.bx;
c2 = c2-A.bx;
for (int i=0; i<A.nc; i++){
double t = A.re[i][c1];
A.re[i][c1] = A.re[i][c2];
A.re[i][c2] = t;
t = A.im[i][c1];
A.im[i][c1] = A.im[i][c2];
A.im[i][c2] = t;
}
}
for (int i = 0; i < A.nc; i++) {
double t = A.re[i][c1];
A.re[i][c1] = A.re[i][c2];
A.re[i][c2] = t;
t = A.im[i][c1];
A.im[i][c1] = A.im[i][c2];
A.im[i][c2] = t;
}
}
}

Просмотреть файл

@ -1,37 +1,36 @@
package Jampack;
class SwapTest{
class SwapTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
int i, j;
Z z = new Z();
int i, j;
Z z = new Z();
Zmat A = new Zmat(3, 3);
for (i=A.bx; i<=A.rx; i++){
for (j=A.bx; j<=A.cx; j++){
A.put(i,j, z.Eq(i,j));
}
}
Zmat A = new Zmat(3, 3);
for (i = A.bx; i <= A.rx; i++) {
for (j = A.bx; j <= A.cx; j++) {
A.put(i, j, z.Eq(i, j));
}
}
Zmat AA = new Zmat(A);
Zmat AA = new Zmat(A);
for (i=AA.bx; i<AA.rx; i++){
Swap.rows(AA, i, i+1);
}
for (i=AA.rx-1; i>=AA.bx; i--){
Swap.rows(AA, i+1, i);
}
Print.o(Norm.fro(Minus.o(AA,A)));
for (i = AA.bx; i < AA.rx; i++) {
Swap.rows(AA, i, i + 1);
}
for (i = AA.rx - 1; i >= AA.bx; i--) {
Swap.rows(AA, i + 1, i);
}
Print.o(Norm.fro(Minus.o(AA, A)));
for (j=AA.bx; j<AA.cx; j++){
Swap.cols(AA, j, j+1);
}
for (j=AA.cx-1; j>=AA.bx; j--){
Swap.cols(AA, j+1, j);
}
Print.o(Norm.fro(Minus.o(AA,A)));
for (j = AA.bx; j < AA.cx; j++) {
Swap.cols(AA, j, j + 1);
}
for (j = AA.cx - 1; j >= AA.bx; j--) {
Swap.cols(AA, j + 1, j);
}
Print.o(Norm.fro(Minus.o(AA, A)));
}
}
}

Просмотреть файл

@ -1,198 +1,207 @@
package Jampack;
public class Times{
public class Times {
/**
Computes the product of a Z and a Zmat.
@param z The complex scalar
@param A The Zmat
@return zA
*/
/**
* Computes the product of a Z and a Zmat.
*
* @param z
* The complex scalar
* @param A
* The Zmat
* @return zA
*/
public static Zmat o(Z z, Zmat A)
{
public static Zmat o(Z z, Zmat A) {
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i=0; i<A.nrow; i++)
for (int j=0; j<A.ncol; j++){
B.re[i][j] = z.re*A.re[i][j] - z.im*A.im[i][j];
B.im[i][j] = z.im*A.re[i][j] + z.re*A.im[i][j];
}
return B;
}
/**
Computes the product of two Zmats.
@param A The first Zmat
@param B The second Zmat
@return AB
@exception JampackException for unconformity
*/
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i = 0; i < A.nrow; i++)
for (int j = 0; j < A.ncol; j++) {
B.re[i][j] = z.re * A.re[i][j] - z.im * A.im[i][j];
B.im[i][j] = z.im * A.re[i][j] + z.re * A.im[i][j];
}
return B;
}
public static Zmat o(Zmat A, Zmat B)
throws JampackException{
if (A.ncol != B.nrow)
throw new JampackException("Unconformity in product");
Zmat C = new Zmat(A.nrow, B.ncol);
for (int i=0; i<A.nrow; i++)
for (int k=0; k<A.ncol; k++)
for (int j=0; j<B.ncol; j++){
C.re[i][j] = C.re[i][j] + A.re[i][k]*B.re[k][j]
- A.im[i][k]*B.im[k][j];
C.im[i][j] = C.im[i][j] + A.im[i][k]*B.re[k][j]
+ A.re[i][k]*B.im[k][j];
}
return C;
}
/**
* Computes the product of two Zmats.
*
* @param A
* The first Zmat
* @param B
* The second Zmat
* @return AB
* @exception JampackException
* for unconformity
*/
public static Zmat o(Zmat A, Zmat B) throws JampackException {
if (A.ncol != B.nrow)
throw new JampackException("Unconformity in product");
Zmat C = new Zmat(A.nrow, B.ncol);
for (int i = 0; i < A.nrow; i++)
for (int k = 0; k < A.ncol; k++)
for (int j = 0; j < B.ncol; j++) {
C.re[i][j] = C.re[i][j] + A.re[i][k] * B.re[k][j] - A.im[i][k] * B.im[k][j];
C.im[i][j] = C.im[i][j] + A.im[i][k] * B.re[k][j] + A.re[i][k] * B.im[k][j];
}
return C;
}
/**
Computes A<sup>H</sup>A, where A is a Zmat.
@param A The Zmat
@return A<sup>H</sup>A
*/
/**
* Computes A<sup>H</sup>A, where A is a Zmat.
*
* @param A
* The Zmat
* @return A<sup>H</sup>A
*/
public static Zpsdmat aha(Zmat A){
public static Zpsdmat aha(Zmat A) {
Zpsdmat C = new Zpsdmat(A.ncol, A.ncol);
for (int k=0; k<A.nrow; k++){
for (int i=0; i<A.ncol; i++){
C.re[i][i] = C.re[i][i] +
A.re[k][i]*A.re[k][i] + A.im[k][i]*A.im[k][i];
C.im[i][i] = 0.;
for (int j=i+1; j<A.ncol; j++){
C.re[i][j] = C.re[i][j] +
A.re[k][i]*A.re[k][j] + A.im[k][i]*A.im[k][j];
C.im[i][j] = C.im[i][j] +
A.re[k][i]*A.im[k][j] - A.im[k][i]*A.re[k][j];
}
}
}
for (int i=0; i<A.ncol; i++){
for (int j=i+1; j<A.ncol; j++){
C.re[j][i] = C.re[i][j];
C.im[j][i] = -C.im[i][j];
}
}
return C;
}
Zpsdmat C = new Zpsdmat(A.ncol, A.ncol);
for (int k = 0; k < A.nrow; k++) {
for (int i = 0; i < A.ncol; i++) {
C.re[i][i] = C.re[i][i] + A.re[k][i] * A.re[k][i] + A.im[k][i] * A.im[k][i];
C.im[i][i] = 0.;
for (int j = i + 1; j < A.ncol; j++) {
C.re[i][j] = C.re[i][j] + A.re[k][i] * A.re[k][j] + A.im[k][i] * A.im[k][j];
C.im[i][j] = C.im[i][j] + A.re[k][i] * A.im[k][j] - A.im[k][i] * A.re[k][j];
}
}
}
for (int i = 0; i < A.ncol; i++) {
for (int j = i + 1; j < A.ncol; j++) {
C.re[j][i] = C.re[i][j];
C.im[j][i] = -C.im[i][j];
}
}
return C;
}
/**
Computes AA<sup>H</sup>, where A is a Zmat.
@param A The Zmat
@return AA<sup>H</sup>
*/
/**
* Computes AA<sup>H</sup>, where A is a Zmat.
*
* @param A
* The Zmat
* @return AA<sup>H</sup>
*/
public static Zpsdmat aah(Zmat A){
public static Zpsdmat aah(Zmat A) {
Zpsdmat C = new Zpsdmat(A.nrow, A.nrow);
for (int i=0; i<A.nrow; i++){
for (int k=0; k<A.ncol; k++){
C.re[i][i] = C.re[i][i] +
A.re[i][k]*A.re[i][k] + A.im[i][k]*A.im[i][k];
}
C.im[i][i] = 0.;
for (int j=i+1; j<A.nrow; j++){
for(int k=0; k<A.ncol; k++){
C.re[i][j] = C.re[i][j] +
A.re[i][k]*A.re[j][k] + A.im[i][k]*A.im[j][k];
C.im[i][j] = C.im[i][j] -
A.re[i][k]*A.im[j][k] + A.im[i][k]*A.re[j][k];
}
C.re[j][i] = C.re[i][j];
C.im[j][i] = - C.im[i][j];
}
}
return C;
}
Zpsdmat C = new Zpsdmat(A.nrow, A.nrow);
for (int i = 0; i < A.nrow; i++) {
for (int k = 0; k < A.ncol; k++) {
C.re[i][i] = C.re[i][i] + A.re[i][k] * A.re[i][k] + A.im[i][k] * A.im[i][k];
}
C.im[i][i] = 0.;
for (int j = i + 1; j < A.nrow; j++) {
for (int k = 0; k < A.ncol; k++) {
C.re[i][j] = C.re[i][j] + A.re[i][k] * A.re[j][k] + A.im[i][k] * A.im[j][k];
C.im[i][j] = C.im[i][j] - A.re[i][k] * A.im[j][k] + A.im[i][k] * A.re[j][k];
}
C.re[j][i] = C.re[i][j];
C.im[j][i] = -C.im[i][j];
}
}
return C;
}
/**
Computes the product of a Z and a Zdiagmat.
@param z The complex scalar
@param D The Zdiagmat
@return zD
*/
/**
* Computes the product of a Z and a Zdiagmat.
*
* @param z
* The complex scalar
* @param D
* The Zdiagmat
* @return zD
*/
public static Zdiagmat o(Z z, Zdiagmat D){
Zdiagmat B = new Zdiagmat(D);
for (int i=0; i<D.order; i++){
B.re[i] = z.re*D.re[i] - z.im*D.im[i];
B.im[i] = z.im*D.re[i] + z.re*D.im[i];
}
return B;
}
public static Zdiagmat o(Z z, Zdiagmat D) {
/**
Computes the product of two Zdiagmats.
@param D1 The first Zdiagmat
@param D2 The second Zdiagmat
@return D1*D2
@exception JampackException for unconformity
*/
Zdiagmat B = new Zdiagmat(D);
for (int i = 0; i < D.order; i++) {
B.re[i] = z.re * D.re[i] - z.im * D.im[i];
B.im[i] = z.im * D.re[i] + z.re * D.im[i];
}
return B;
}
public static Zdiagmat o(Zdiagmat D1, Zdiagmat D2)
throws JampackException{
if (D1.order != D2.order){
throw new JampackException
("Unconformity in product");
}
Zdiagmat D3 = new Zdiagmat(D1.order);
for (int i=0; i<D3.order; i++){
D3.re[i] = D1.re[i]*D2.re[i] - D1.im[i]*D2.im[i];
D3.im[i] = D1.re[i]*D2.im[i] + D1.im[i]*D2.re[i];
}
return D3;
}
/**
* Computes the product of two Zdiagmats.
*
* @param D1
* The first Zdiagmat
* @param D2
* The second Zdiagmat
* @return D1*D2
* @exception JampackException
* for unconformity
*/
/**
Computes the product of a Zdiagmat and a Zmat.
@param D The Zdiagmat
@param A The Zmat
@return DA
@exception JampackException for unconformity
*/
public static Zdiagmat o(Zdiagmat D1, Zdiagmat D2) throws JampackException {
if (D1.order != D2.order) {
throw new JampackException("Unconformity in product");
}
Zdiagmat D3 = new Zdiagmat(D1.order);
for (int i = 0; i < D3.order; i++) {
D3.re[i] = D1.re[i] * D2.re[i] - D1.im[i] * D2.im[i];
D3.im[i] = D1.re[i] * D2.im[i] + D1.im[i] * D2.re[i];
}
return D3;
}
public static Zmat o(Zdiagmat D, Zmat A)
throws JampackException{
/**
* Computes the product of a Zdiagmat and a Zmat.
*
* @param D
* The Zdiagmat
* @param A
* The Zmat
* @return DA
* @exception JampackException
* for unconformity
*/
if (D.order != A.nrow){
throw new JampackException
("Unconformity in product.");
}
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i=0; i<A.nrow; i++){
for (int j=0; j<A.nc; j++){
B.re[i][j] = D.re[i]*A.re[i][j] - D.im[i]*A.im[i][j];
B.im[i][j] = D.re[i]*A.im[i][j] + D.im[i]*A.re[i][j];
}
}
return B;
}
public static Zmat o(Zdiagmat D, Zmat A) throws JampackException {
/**
Computes the product of a Zmat and a Zdiagmat.
@param A The Zgmat
@param D The Zdiagmat
@return AD
@exception JampackException for unconformity
*/
if (D.order != A.nrow) {
throw new JampackException("Unconformity in product.");
}
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i = 0; i < A.nrow; i++) {
for (int j = 0; j < A.nc; j++) {
B.re[i][j] = D.re[i] * A.re[i][j] - D.im[i] * A.im[i][j];
B.im[i][j] = D.re[i] * A.im[i][j] + D.im[i] * A.re[i][j];
}
}
return B;
}
public static Zmat o(Zmat A, Zdiagmat D)
throws JampackException{
/**
* Computes the product of a Zmat and a Zdiagmat.
*
* @param A
* The Zgmat
* @param D
* The Zdiagmat
* @return AD
* @exception JampackException
* for unconformity
*/
if (D.order != A.ncol){
throw new JampackException
("Unconformity in product.");
}
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i=0; i<A.nrow; i++){
for (int j=0; j<A.ncol; j++){
B.re[i][j] = D.re[j]*A.re[i][j] - D.im[j]*A.im[i][j];
B.im[i][j] = D.re[j]*A.im[i][j] + D.im[j]*A.re[i][j];
}
}
return B;
}
public static Zmat o(Zmat A, Zdiagmat D) throws JampackException {
if (D.order != A.ncol) {
throw new JampackException("Unconformity in product.");
}
Zmat B = new Zmat(A.nrow, A.ncol);
for (int i = 0; i < A.nrow; i++) {
for (int j = 0; j < A.ncol; j++) {
B.re[i][j] = D.re[j] * A.re[i][j] - D.im[j] * A.im[i][j];
B.im[i][j] = D.re[j] * A.im[i][j] + D.im[j] * A.re[i][j];
}
}
return B;
}
}

Просмотреть файл

@ -1,96 +1,89 @@
package Jampack;
class TimesTest{
class TimesTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
if (args[0].equals("zmzm")){
if (args[0].equals("zmzm")) {
int nr = 3;
int nc = 4;
int nr = 3;
int nc = 4;
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nc, nr);
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++){
A.re[i][j] = i;
A.im[i][j] = j;
B.re[j][i] = i+j;
B.re[j][i] = i-j;
}
}
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nc, nr);
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
A.re[i][j] = i;
A.im[i][j] = j;
B.re[j][i] = i + j;
B.re[j][i] = i - j;
}
}
Print.o
(Norm.fro(Minus.o(Times.o(A, B), H.o(Times.o(H.o(B),H.o(A))))));
}
Print.o(Norm.fro(Minus.o(Times.o(A, B), H.o(Times.o(H.o(B), H.o(A))))));
}
if (args[0].equals("zmhzm")){
if (args[0].equals("zmhzm")) {
int nr = 3;
int nc = 4;
int nr = 3;
int nc = 4;
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nc, nr);
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++){
A.re[i][j] = i;
A.im[i][j] = j;
}
}
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nc, nr);
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
A.re[i][j] = i;
A.im[i][j] = j;
}
}
Print.o
(Norm.fro(Minus.o(Times.aha(A), Times.o(H.o(A),A))));
}
Print.o(Norm.fro(Minus.o(Times.aha(A), Times.o(H.o(A), A))));
}
if (args[0].equals("zmzmh")){
if (args[0].equals("zmzmh")) {
int nr = 3;
int nc = 4;
int nr = 3;
int nc = 4;
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nc, nr);
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++){
A.re[i][j] = i;
A.im[i][j] = j;
}
}
Zmat A = new Zmat(nr, nc);
Zmat B = new Zmat(nc, nr);
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
A.re[i][j] = i;
A.im[i][j] = j;
}
}
Print.o
(Norm.fro(Minus.o(Times.aah(A), Times.o(A,H.o(A)))));
}
Print.o(Norm.fro(Minus.o(Times.aah(A), Times.o(A, H.o(A)))));
}
else if (args[0].equals("zdmzm")){
else if (args[0].equals("zdmzm")) {
int nr = 3;
int nc = 4;
int nr = 3;
int nc = 4;
Zdiagmat D = new Zdiagmat(nr);
Zmat A = new Zmat(nr, nc);
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++){
A.re[i][j] = i+j;
A.im[i][j] = j;
}
D.re[i] = i;
D.re[i] = i;
}
Zdiagmat D = new Zdiagmat(nr);
Zmat A = new Zmat(nr, nc);
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++) {
A.re[i][j] = i + j;
A.im[i][j] = j;
}
D.re[i] = i;
D.re[i] = i;
}
Print.o
(Norm.fro(Minus.o(Times.o(D, A), H.o(Times.o(H.o(A),H.o(D))))));
}
else if (args[0].equals("zdmzdm")){
Print.o(Norm.fro(Minus.o(Times.o(D, A), H.o(Times.o(H.o(A), H.o(D))))));
} else if (args[0].equals("zdmzdm")) {
int n = 4;
int n = 4;
Zdiagmat D = new Zdiagmat(n);
for (int i=0; i<n; i++){
D.re[i] = i;
D.im[i] = i+5;
}
Print.o(Norm.fro(Minus.o(Times.o(D, H.o(D)), Times.o(H.o(D), D))));
}
}
Zdiagmat D = new Zdiagmat(n);
for (int i = 0; i < n; i++) {
D.re[i] = i;
D.im[i] = i + 5;
}
Print.o(Norm.fro(Minus.o(Times.o(D, H.o(D)), Times.o(H.o(D), D))));
}
}
}

Просмотреть файл

@ -1,34 +1,33 @@
package Jampack;
class Trace{
class Trace {
public static Z o(Zmat A){
public static Z o(Zmat A) {
if (A.nc != A.nr){
throw new RuntimeException
("Nonsquare matrix");
}
if (A.nc != A.nr) {
throw new RuntimeException("Nonsquare matrix");
}
Z t = new Z();
Z t = new Z();
for (int i=0; i<A.nr; i++){
t.re = t.re + A.re[i][i];
t.im = t.im + A.im[i][i];
}
for (int i = 0; i < A.nr; i++) {
t.re = t.re + A.re[i][i];
t.im = t.im + A.im[i][i];
}
return t;
}
return t;
}
public static Z o(Zdiagmat D){
public static Z o(Zdiagmat D) {
Z t = new Z();
Z t = new Z();
for (int i=0; i<D.order; i++){
t.re = t.re + D.re[i];
t.im = t.im + D.im[i];
}
for (int i = 0; i < D.order; i++) {
t.re = t.re + D.re[i];
t.im = t.im + D.im[i];
}
return t;
}
return t;
}
}

Просмотреть файл

@ -1,359 +1,375 @@
package Jampack;
/**
* Z is a mutable complex variable class. It is designed to
* perform complex arithmetic without creating a new Z at
* each operation. Specifically, binary operations have the
* form c.op(a,b), in which a, b, and c need not be different.
* The method places the complex number a.op.b in
* c. The method also returns a pointer to c. Thus the
* class supports two styles of programming. For example
* to compute e = a*b + c*d you can write <p>
* Z is a mutable complex variable class. It is designed to perform complex arithmetic without creating a new Z at each operation.
* Specifically, binary operations have the form c.op(a,b), in which a, b, and c need not be different. The method places the
* complex number a.op.b in c. The method also returns a pointer to c. Thus the class supports two styles of programming. For
* example to compute e = a*b + c*d you can write
* <p>
*
* z1.Times(a,b) <br>
* z2.Times(c,d) <br>
* e.Plus(z1,z2) <p>
* e.Plus(z1,z2)
* <p>
*
* or <p>
*
* e.Plus(z1.Times(a,b), z2.Times(a,b)) <p>
*
* Since objects of class Z are mutable, the use of the assignment
* operator "=" with these objects is deprecated. Use Eq. <p>
*
* The functions are reasonably resistent to overflow and underflow.
* But the more complicated ones could almost certainly be improved.
@version Pre-alpha
@author G. W. Stewart
*/
public class Z{
/** Complex 1. */
public static final Z ONE = new Z(1,0);
/** Complex 0. */
public static final Z ZERO = new Z(0,0);
/** Imaginary unit. */
public static final Z I = new Z(0,1);
/** The real part of Z. */
public double re;
/** The imaginary part of Z. */
public double im;
/**
* Creates a Z and initializes it to zero.
@return a Z initialized to zero.
*/
public Z(){
re = 0.;
im = 0.;
}
/**
* Creates a Z and initializes its real and imaginary parts.
@param x a double
@param y a double
@return x + iy
*/
public Z(double x, double y){
re = x;
im = y;
}
/**
* Creates a Z and initializes its real part.
@param x a double
@return x + i*0
*/
public Z(double x){
re = x;
im = 0;
}
/**
* Creates a Z and initializes it to another Z.
@param a a Z
@return a
*/
public Z(Z a){
re = a.re;
im = a.im;
}
/**
* Tests two Z'z for equality.
@param z1 a Z
@param z2 a Z
@return true if z1=z2, otherwise false
*/
public boolean IsEqual(Z z1, Z z2){
if (z1.re == z2.re && z1.im == z2.im){
return true;
}
else{
return false;
}
}
/**
* Resets the real and imaginary parts of a Z to those of another Z.
@param a a Z
@return this = a;
*/
public Z Eq(Z a){
re = a.re;
im = a.im;
return this;
}
/**
* Resets the real and imaginary parts of a Z.
@param a a double
@param b a double
@return this = a + ib
*/
public Z Eq(double a, double b){
re = a;
im = b;
return this;
}
/**
* Interchanges the real and imaginary parts of two Z's.
@param a a Z
@return this = a, with a set to the original
value of this.
*/
public Z Exch(Z a){
double t;
t = re; re = a.re; a.re = t;
t = im; im = a.im; a.im = t;
return this;
}
/**
Computes the 1-norm of a Z
*/
public static double abs1(Z z){
return Math.abs(z.re) + Math.abs(z.im);
}
/**
Computes the absolute value of a Z.
@param z a Z
@return the absolute vaue of Z
*/
public static double abs(Z z){
double are, aim, rho;
are = Math.abs(z.re);
aim = Math.abs(z.im);
if (are+aim == 0) return 0;
if (are >= aim){
rho = aim/are;
return are*Math.sqrt(1+rho*rho);
}
else{
rho = are/aim;
return aim*Math.sqrt(1+rho*rho);
}
}
/**
* Computes the conjugate of a Z.
@param a a Z
@return this = conj(a);
*/
public Z Conj(Z a){
re = a.re;
im = -a.im;
return this;
}
/**
* Computes unary minus of a Z.
@param a a Z
@return this = -a;
*/
public Z Minus(Z a){
re = -a.re;
im = -a.im;
return this;
}
/**
* Computes the sum of two Z's.
@param a a Z
@param b a Z
@return this = a + b
*/
public Z Plus(Z a, Z b){
re = a.re + b.re;
im = a.im + b.im;
return this;
}
/**
* Computes the difference of two Z's.
@param a a Z
@param b a Z
@return this = a - b
*/
public Z Minus(Z a, Z b){
re = a.re - b.re;
im = a.im - b.im;
return this;
}
/**
* Computes the product of two Z's.
@param a a Z
@param b a Z
@return this = ab
*/
public Z Times(Z a, Z b){
double tre;
tre = a.re*b.re - a.im*b.im;
im = a.im*b.re + a.re*b.im;
re = tre;
return this;
}
/**
* Computes the product of a double and a Z.
@param a a double
@param b a Z
@return this = ab
*/
public Z Times(double a, Z b){
re = a*b.re;
im = a*b.im;
return this;
}
/**
* Computes the quotient of two Z's. Throws a JampackException if
* the denominator is zero.
@param a a Z
@param b a Z
@return this = a/b
@exception JampackException
Thrown if b is zero.
*/
public Z Div(Z a, Z b)
throws JampackException{
double avi, t, tre, tim;
avi = abs(b);
if (avi == 0){
throw new JampackException
("Divide by zero.");
}
avi = 1./avi;
tre = b.re*avi;
tim = -b.im*avi;
t = (a.re*tre - a.im*tim)*avi;
im = (a.im*tre + a.re*tim)*avi;
re = t;
return this;
}
/**
* Computes the quotient of a Z and a double. Throws a JampackException
* if the denominator is zero.
@param a a Z
@param b a double
@return this = a/b
@exception JampackException
Thrown if b is zero.
*/
public Z Div(Z a, double b)
throws JampackException{
if (b == 0){
throw new JampackException
("Divide by zero.");
}
re = a.re/b;
im = a.im/b;
return this;
}
/**
* Computes the principal value of the square root of a Z.
@param a a Z
@param this = sqrt(a)
*/
public Z Sqrt(Z a){
double t, tre, tim;
t = Z.abs(a);
if (Math.abs(a.re) <= Math.abs(a.im)){
// No cancellation in these formulas
tre = Math.sqrt(0.5*(t+a.re));
tim = Math.sqrt(0.5*(t-a.re));
}
else{
// Stable computation of the above formulas
if (a.re > 0){
tre = t + a.re;
tim = Math.abs(a.im)*Math.sqrt(0.5/tre);
tre = Math.sqrt(0.5*tre);
}
else{
tim = t - a.re;
tre = Math.abs(a.im)*Math.sqrt(0.5/tim);
tim = Math.sqrt(0.5*tim);
}
}
if (a.im < 0)
tim = -tim;
re = tre;
im = tim;
return this;
}
* or
* <p>
*
* e.Plus(z1.Times(a,b), z2.Times(a,b))
* <p>
*
* Since objects of class Z are mutable, the use of the assignment operator "=" with these objects is deprecated. Use Eq.
* <p>
*
* The functions are reasonably resistent to overflow and underflow. But the more complicated ones could almost certainly be
* improved.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
public class Z {
/** Complex 1. */
public static final Z ONE = new Z(1, 0);
/** Complex 0. */
public static final Z ZERO = new Z(0, 0);
/** Imaginary unit. */
public static final Z I = new Z(0, 1);
/** The real part of Z. */
public double re;
/** The imaginary part of Z. */
public double im;
/**
* Creates a Z and initializes it to zero.
*
* @return a Z initialized to zero.
*/
public Z() {
re = 0.;
im = 0.;
}
/**
* Creates a Z and initializes its real and imaginary parts.
*
* @param x
* a double
* @param y
* a double
* @return x + iy
*/
public Z(double x, double y) {
re = x;
im = y;
}
/**
* Creates a Z and initializes its real part.
*
* @param x
* a double
* @return x + i*0
*/
public Z(double x) {
re = x;
im = 0;
}
/**
* Creates a Z and initializes it to another Z.
*
* @param a
* a Z
* @return a
*/
public Z(Z a) {
re = a.re;
im = a.im;
}
/**
* Tests two Z'z for equality.
*
* @param z1
* a Z
* @param z2
* a Z
* @return true if z1=z2, otherwise false
*/
public boolean IsEqual(Z z1, Z z2) {
if (z1.re == z2.re && z1.im == z2.im) {
return true;
} else {
return false;
}
}
/**
* Resets the real and imaginary parts of a Z to those of another Z.
*
* @param a
* a Z
* @return this = a;
*/
public Z Eq(Z a) {
re = a.re;
im = a.im;
return this;
}
/**
* Resets the real and imaginary parts of a Z.
*
* @param a
* a double
* @param b
* a double
* @return this = a + ib
*/
public Z Eq(double a, double b) {
re = a;
im = b;
return this;
}
/**
* Interchanges the real and imaginary parts of two Z's.
*
* @param a
* a Z
* @return this = a, with a set to the original value of this.
*/
public Z Exch(Z a) {
double t;
t = re;
re = a.re;
a.re = t;
t = im;
im = a.im;
a.im = t;
return this;
}
/**
* Computes the 1-norm of a Z
*/
public static double abs1(Z z) {
return Math.abs(z.re) + Math.abs(z.im);
}
/**
* Computes the absolute value of a Z.
*
* @param z
* a Z
* @return the absolute vaue of Z
*/
public static double abs(Z z) {
double are, aim, rho;
are = Math.abs(z.re);
aim = Math.abs(z.im);
if (are + aim == 0)
return 0;
if (are >= aim) {
rho = aim / are;
return are * Math.sqrt(1 + rho * rho);
} else {
rho = are / aim;
return aim * Math.sqrt(1 + rho * rho);
}
}
/**
* Computes the conjugate of a Z.
*
* @param a
* a Z
* @return this = conj(a);
*/
public Z Conj(Z a) {
re = a.re;
im = -a.im;
return this;
}
/**
* Computes unary minus of a Z.
*
* @param a
* a Z
* @return this = -a;
*/
public Z Minus(Z a) {
re = -a.re;
im = -a.im;
return this;
}
/**
* Computes the sum of two Z's.
*
* @param a
* a Z
* @param b
* a Z
* @return this = a + b
*/
public Z Plus(Z a, Z b) {
re = a.re + b.re;
im = a.im + b.im;
return this;
}
/**
* Computes the difference of two Z's.
*
* @param a
* a Z
* @param b
* a Z
* @return this = a - b
*/
public Z Minus(Z a, Z b) {
re = a.re - b.re;
im = a.im - b.im;
return this;
}
/**
* Computes the product of two Z's.
*
* @param a
* a Z
* @param b
* a Z
* @return this = ab
*/
public Z Times(Z a, Z b) {
double tre;
tre = a.re * b.re - a.im * b.im;
im = a.im * b.re + a.re * b.im;
re = tre;
return this;
}
/**
* Computes the product of a double and a Z.
*
* @param a
* a double
* @param b
* a Z
* @return this = ab
*/
public Z Times(double a, Z b) {
re = a * b.re;
im = a * b.im;
return this;
}
/**
* Computes the quotient of two Z's. Throws a JampackException if the denominator is zero.
*
* @param a
* a Z
* @param b
* a Z
* @return this = a/b
* @exception JampackException
* Thrown if b is zero.
*/
public Z Div(Z a, Z b) throws JampackException {
double avi, t, tre, tim;
avi = abs(b);
if (avi == 0) {
throw new JampackException("Divide by zero.");
}
avi = 1. / avi;
tre = b.re * avi;
tim = -b.im * avi;
t = (a.re * tre - a.im * tim) * avi;
im = (a.im * tre + a.re * tim) * avi;
re = t;
return this;
}
/**
* Computes the quotient of a Z and a double. Throws a JampackException if the denominator is zero.
*
* @param a
* a Z
* @param b
* a double
* @return this = a/b
* @exception JampackException
* Thrown if b is zero.
*/
public Z Div(Z a, double b) throws JampackException {
if (b == 0) {
throw new JampackException("Divide by zero.");
}
re = a.re / b;
im = a.im / b;
return this;
}
/**
* Computes the principal value of the square root of a Z.
*
* @param a
* a Z
* @param this = sqrt(a)
*/
public Z Sqrt(Z a) {
double t, tre, tim;
t = Z.abs(a);
if (Math.abs(a.re) <= Math.abs(a.im)) {
// No cancellation in these formulas
tre = Math.sqrt(0.5 * (t + a.re));
tim = Math.sqrt(0.5 * (t - a.re));
} else {
// Stable computation of the above formulas
if (a.re > 0) {
tre = t + a.re;
tim = Math.abs(a.im) * Math.sqrt(0.5 / tre);
tre = Math.sqrt(0.5 * tre);
} else {
tim = t - a.re;
tre = Math.abs(a.im) * Math.sqrt(0.5 / tim);
tim = Math.sqrt(0.5 * tim);
}
}
if (a.im < 0)
tim = -tim;
re = tre;
im = tim;
return this;
}
}

Просмотреть файл

@ -1,97 +1,101 @@
package Jampack;
/**
Z1 implements a one-dimensional array of complex numbers as a
two arrays of type double. The addressing is zero based.
It is necessary to provided one-dimensional complex arrays
whose real and imaginary parts are contiguous in storage.
* Z1 implements a one-dimensional array of complex numbers as a two arrays of type double. The addressing is zero based. It is
* necessary to provided one-dimensional complex arrays whose real and imaginary parts are contiguous in storage.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Z1 {
public class Z1{
protected int n;
protected double re[];
protected double im[];
protected int n;
protected double re[];
protected double im[];
/**
* Creates a Z1 initializes to zero.
*
* @param n
* a positive integer
* @return A Z1 of length n
* @exception JampackException
* Thrown if n<=0.
*/
/**
Creates a Z1 initializes to zero.
public Z1(int n) throws JampackException {
if (n <= 0) {
throw new JampackException("Nonpositive dimension.");
}
@param n a positive integer
@return A Z1 of length n
@exception JampackException
Thrown if n<=0.
*/
this.n = n;
re = new double[n];
im = new double[n];
}
public Z1(int n)
throws JampackException{
if (n <= 0){
throw new JampackException
("Nonpositive dimension.");
}
/**
* Returns the ith element of a Z1 as a Z.
*
* @param i
* an integer
* @return The ith elemeent of this Z1
*/
this.n = n;
re = new double[n];
im = new double[n];
}
public Z get(int i) {
/**
Returns the ith element of a Z1 as a Z.
return new Z(re[i], im[i]);
}
@param i an integer
@return The ith elemeent of this Z1
*/
/**
* Sets the ith element of a Z1 to a Z.
*
* @param i
* an integer
* @param z
* a Z
* @return resets the ith element of this Z1 to z
*/
public Z get(int i){
public void put(int i, Z z) {
return new Z(re[i], im[i]);
}
re[i] = z.re;
im[i] = z.im;
/**
Sets the ith element of a Z1 to a Z.
}
@param i an integer
@param z a Z
@return resets the ith element of this Z1 to z
*/
/**
* Sets the real and imaginary parts of the ith element of a Z1.
*
* @param i
* an integer
* @param real
* a double
* @param imag
* a double
* @return resets the ith component of this Z1 to real + i*imag
*/
public void put(int i, Z z){
public void put(int i, double real, double imag) {
re[i] = real;
im[i] = imag;
}
re[i] = z.re;
im[i] = z.im;
/**
* Multiplies the ith element of a Z1 by a Z.
*
* @param i
* an integer
* @param z
* a Z
* @return multiplies the ith element of this Z1 by z.
*/
}
public void Times(int i, Z z) {
double t;
/**
Sets the real and imaginary parts of the ith element of a Z1.
@param i an integer
@param real a double
@param imag a double
@return resets the ith component of this Z1 to
real + i*imag
*/
public void put(int i, double real, double imag){
re[i] = real;
im[i] = imag;
}
/**
Multiplies the ith element of a Z1 by a Z.
@param i an integer
@param z a Z
@return multiplies the ith element of this Z1 by z.
*/
public void Times(int i, Z z){
double t;
t = re[i]*z.re - im[i]*z.im;
im[i] = re[i]*z.im + im[i]*z.re;
re[i] = t;
}
t = re[i] * z.re - im[i] * z.im;
im[i] = re[i] * z.im + im[i] * z.re;
re[i] = t;
}
}

Просмотреть файл

@ -1,97 +1,90 @@
package Jampack;
/**
Zchol implements the Cholesky decomposition of a positive definite
matrix. Specifically if A is (Hermitian) positive definite then
there is an upper triangular matrix R with positive diagonal
elements such that
<pre>
* A = R^H R
</pre>
The matrix R is implemented as a Zutmat.
* Zchol implements the Cholesky decomposition of a positive definite matrix. Specifically if A is (Hermitian) positive definite
* then there is an upper triangular matrix R with positive diagonal elements such that
*
* <pre>
* A = R^H R
* </pre>
*
* The matrix R is implemented as a Zutmat.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Zchol {
public class Zchol{
/** The order of A and R */
public int n;
/** The order of A and R */
public int n;
/** The Cholesky factor */
public Zutmat R;
/** The Cholesky factor */
public Zutmat R;
/**
* Constructs a Zchol from a Zmat A. The matrix that is actually decomposed is taken from the upper triangle of $A$ and the
* imaginary part of its diagonal is set to zero. Throws a JampackException for inconsistent dimensions on failure of the
* algorithm to complete.
*
* @param A
* The matrix whose Cholesky decomposition is to be computed.
* @return The Cholesky decomposition of A
* @exception JampackException
* Thrown if A is not square or Hermitian.<br>
* Thrown if the doecomposition does not exist.
*/
/**
Constructs a Zchol from a Zmat A. The matrix that
is actually decomposed is taken from the upper triangle
of $A$ and the imaginary part of its diagonal is set to
zero. Throws a JampackException for inconsistent dimensions
on failure of the algorithm to complete.
public Zchol(Zmat A) throws JampackException {
@param A The matrix whose Cholesky decomposition is
to be computed.
@return The Cholesky decomposition of A
@exception JampackException
Thrown if A is not square or Hermitian.<br>
Thrown if the doecomposition does not exist.
*/
double mu;
int i, j, k;
A.getProperties();
public Zchol(Zmat A)
throws JampackException{
if (A.nr != A.nc) {
throw new JampackException("Matrix not square.");
}
double mu;
int i, j, k;
A.getProperties();
n = A.nr;
if (A.nr != A.nc){
throw new JampackException
("Matrix not square.");
}
/* Set up R from the upper triangle of A */
n = A.nr;
R = new Zutmat(A);
/* Set up R from the upper triangle of A */
/* Check for A Hermitian and initialize R. */
R = new Zutmat(A);
for (i = 0; i < n; i++) {
if (R.im[i][i] != 0) {
throw new JampackException("Matrix not Hermitian");
}
for (j = 0; j < i; j++) {
if (R.re[i][j] != R.re[j][i] || R.im[i][j] != -R.im[j][i]) {
throw new JampackException("Matrix not Hermitian");
}
R.im[i][j] = 0;
R.re[i][j] = 0;
}
}
/* Check for A Hermitian and initialize R. */
/* Compute the decomposition */
for (i=0; i<n; i++){
if (R.im[i][i] != 0){
throw new JampackException("Matrix not Hermitian");
}
for (j=0; j<i; j++){
if (R.re[i][j]!=R.re[j][i] || R.im[i][j]!=-R.im[j][i]){
throw new JampackException("Matrix not Hermitian");
}
R.im[i][j] = 0;
R.re[i][j] = 0;
}
}
/* Compute the decomposition */
for (k=0; k<n; k++){
if (R.re[k][k] <= 0){
throw new JampackException
("Nonpositive diagonal entry during reduction.");
}
R.re[k][k] = Math.sqrt(R.re[k][k]);
mu = 1/R.re[k][k];
for (j=k+1; j<n; j++){
R.re[k][j] = mu*R.re[k][j];
R.im[k][j] = mu*R.im[k][j];
}
for (i=k+1; i<n; i++){
for (j=i; j<n; j++){
R.re[i][j] = R.re[i][j] -
R.re[k][i]*R.re[k][j] - R.im[k][i]*R.im[k][j];
R.im[i][j] = R.im[i][j] -
R.re[k][i]*R.im[k][j] + R.im[k][i]*R.re[k][j];
}
R.im[i][i] = 0;
}
}
}
for (k = 0; k < n; k++) {
if (R.re[k][k] <= 0) {
throw new JampackException("Nonpositive diagonal entry during reduction.");
}
R.re[k][k] = Math.sqrt(R.re[k][k]);
mu = 1 / R.re[k][k];
for (j = k + 1; j < n; j++) {
R.re[k][j] = mu * R.re[k][j];
R.im[k][j] = mu * R.im[k][j];
}
for (i = k + 1; i < n; i++) {
for (j = i; j < n; j++) {
R.re[i][j] = R.re[i][j] - R.re[k][i] * R.re[k][j] - R.im[k][i] * R.im[k][j];
R.im[i][j] = R.im[i][j] - R.re[k][i] * R.im[k][j] + R.im[k][i] * R.re[k][j];
}
R.im[i][i] = 0;
}
}
}
}

Просмотреть файл

@ -1,24 +1,22 @@
package Jampack;
class ZcholTest{
class ZcholTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
int n=5;
int n = 5;
Zmat A = new Zmat(n,n);
for (int i=0; i<n; i++){
for (int j=0; j<n; j++){
A.re[i][j] = i;
A.im[i][j] = i+j;
}
A.re[i][i] = 2*n;
A.im[i][i] = 2*n;
}
A = Times.o(H.o(A), A);
Zchol CH = new Zchol(A);
Print.o(Norm.fro(Minus.o(A, Times.o(H.o(CH.R), CH.R))));
}
Zmat A = new Zmat(n, n);
for (int i = 0; i < n; i++) {
for (int j = 0; j < n; j++) {
A.re[i][j] = i;
A.im[i][j] = i + j;
}
A.re[i][i] = 2 * n;
A.im[i][i] = 2 * n;
}
A = Times.o(H.o(A), A);
Zchol CH = new Zchol(A);
Print.o(Norm.fro(Minus.o(A, Times.o(H.o(CH.R), CH.R))));
}
}

Просмотреть файл

@ -1,240 +1,231 @@
package Jampack;
/**
Zdiagmat is a storage efficient representation of a complex
diagonal matrix.
* Zdiagmat is a storage efficient representation of a complex diagonal matrix.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Zdiagmat {
public class Zdiagmat{
/** The order of the matrix */
protected int order;
/** The order of the matrix */
protected int order;
/** The base index */
protected int basex;
/** The base index */
protected int basex;
/** The real part of the diagonal */
protected double re[];
/** The real part of the diagonal */
protected double re[];
/** The imaginary part of the diagonal */
protected double im[];
/** The imaginary part of the diagonal */
protected double im[];
/** The order of the matrix (public) */
public int n;
/** The order of the matrix (public) */
public int n;
/** The base index (public) */
public int bx;
/** The base index (public) */
public int bx;
/** The index of the last diagonal (public) */
public int dx;
/** The index of the last diagonal (public) */
public int dx;
/**
* Constructs a Zdiagmat and initializes it to zero.
*
* @param order
* The order of the new Zdiagmat
* @return A Zdiagmat initialized to zero.
*/
public Zdiagmat(int order) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
this.order = order;
getProperties();
re = new double[n];
im = new double[n];
}
/**
* Constructs a Zdiagmat and initializes it to a constant.
*
* @param order
* The order of the new Zdiagmat
* @param val
* The value to which the diagonal is to be initialized
* @return A Zdiagmat whose diagonal is val.
*/
public Zdiagmat(int order, Z val) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
this.order = order;
getProperties();
re = new double[n];
im = new double[n];
for (int i = 0; i < n; i++) {
re[i] = val.re;
im[i] = val.im;
}
}
/**
* Constructs a Zdiagmat and initializes it to a Z1.
*
* @param val
* A Z1
* @return A Zdiagmat whose diagonal elements are the elements of val.
*/
public Zdiagmat(Z1 val) {
Parameters.BaseIndexNotChangeable = true;
bx = Parameters.BaseIndex;
order = val.re.length;
getProperties();
re = new double[n];
im = new double[n];
for (int i = 0; i < n; i++) {
re[i] = val.re[i];
im[i] = val.im[i];
}
}
/**
Constructs a Zdiagmat and initializes it to zero.
/**
* Constructs a Zdiagmat and initializes it to the diagonal of a Zmat.
*
* @param A
* The Zmat
* @param k
* The diagonal. For k=0 gives the princpal diagonal; k>0, the kth superdiagonal; k<0, the kth subdiagonal.
* @return The Zdiagmat consisting of the selected diagonal of A
* @exception JampackException
* Thrown for k to large or small.
*/
@param order The order of the new Zdiagmat
@return A Zdiagmat initialized to zero.
*/
public Zdiagmat(Zmat A, int k) throws JampackException {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
if (k >= 0) {
if (k >= A.ncol) {
throw new JampackException("Diagonal out of range.");
}
order = Math.min(A.nrow, A.ncol - k);
re = new double[order];
im = new double[order];
for (int i = 0; i < order; i++) {
re[i] = A.re[i][i + k];
im[i] = A.im[i][i + k];
}
} else {
k = -k;
if (k >= A.nrow) {
throw new JampackException("Diagonal out of range.");
}
order = Math.min(A.nrow - k, A.ncol);
re = new double[order];
im = new double[order];
for (int i = 0; i < order; i++) {
re[i] = A.re[i + k][i];
im[i] = A.im[i + k][i];
}
}
getProperties();
}
public Zdiagmat(int order){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
this.order = order;
getProperties();
re = new double[n];
im = new double[n];
}
/**
* Constructs a Zdiagmat and initializes it to the principal diagonal of a Zmat.
*
* @param A
* A Zmat
* @returns A Zdiagmat whose diagonal is that of A
* @exception JampackException
* Passed from below.
*/
/**
Constructs a Zdiagmat and initializes it to a constant.
public Zdiagmat(Zmat A) throws JampackException {
this(A, 0);
}
@param order The order of the new Zdiagmat
@param val The value to which the diagonal
is to be initialized
@return A Zdiagmat whose diagonal is val.
*/
/**
* Constructs a Zdiagmat and initializes it to another Zdiagmat.
*
* @param D
* A Zdiagmat
* @returns A Zdiagmat that is a copy of D.
*/
public Zdiagmat(int order, Z val){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
this.order = order;
getProperties();
re = new double[n];
im = new double[n];
for (int i=0; i<n; i++){
re[i] = val.re;
im[i] = val.im;
}
}
public Zdiagmat(Zdiagmat D) {
/**
Constructs a Zdiagmat and initializes it to a Z1.
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
order = D.order;
getProperties();
re = new double[n];
im = new double[n];
@param val A Z1
@return A Zdiagmat whose diagonal elements are
the elements of val.
*/
for (int i = 0; i < n; i++) {
re[i] = D.re[i];
im[i] = D.im[i];
}
}
public Zdiagmat(Z1 val){
Parameters.BaseIndexNotChangeable = true;
bx = Parameters.BaseIndex;
order = val.re.length;
getProperties();
re = new double[n];
im = new double[n];
for (int i=0; i<n; i++){
re[i] = val.re[i];
im[i] = val.im[i];
}
}
/**
* Sets the public parameters.
*/
/**
Constructs a Zdiagmat and initializes it to the diagonal of a Zmat.
public void getProperties() {
bx = basex;
dx = bx + order - 1;
n = order;
}
@param A The Zmat
@param k The diagonal. For k=0 gives the princpal diagonal;
k>0, the kth superdiagonal; k<0, the kth subdiagonal.
@return The Zdiagmat consisting of the selected diagonal of A
@exception JampackException
Thrown for k to large or small.
*/
/**
* Gets the ii-th diagonal element of a Zdiagmat.
*
* @param ii
* An integer
* @return The ii-th element of this Zdiagmat
*/
public Zdiagmat(Zmat A, int k)
throws JampackException{
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
if (k >= 0){
if (k >= A.ncol){
throw new JampackException
("Diagonal out of range.");
}
order = Math.min(A.nrow, A.ncol-k);
re = new double[order];
im = new double[order];
for (int i=0; i<order; i++){
re[i] = A.re[i][i+k];
im[i] = A.im[i][i+k];
}
}
else{
k = -k;
if (k >= A.nrow){
throw new JampackException
("Diagonal out of range.");
}
order = Math.min(A.nrow-k, A.ncol);
re = new double[order];
im = new double[order];
for (int i=0; i<order; i++){
re[i] = A.re[i+k][i];
im[i] = A.im[i+k][i];
}
}
getProperties();
}
/**
Constructs a Zdiagmat and initializes it to the principal diagonal
of a Zmat.
public Z get(int ii) {
@param A A Zmat
@returns A Zdiagmat whose diagonal is that of A
@exception JampackException
Passed from below.
*/
return new Z(re[ii - bx], im[ii - bx]);
}
public Zdiagmat(Zmat A)
throws JampackException{
this(A, 0);
}
/**
* Gets the <tt>i</tt>th diagonal of a of a Zdiagmat (0-based).
*/
/**
Constructs a Zdiagmat and initializes it to another Zdiagmat.
public Z get0(int i) {
@param D A Zdiagmat
@returns A Zdiagmat that is a copy of D.
*/
return new Z(re[i], im[i]);
}
public Zdiagmat(Zdiagmat D){
/**
* Writes the ii-th diagonal element of a Zdiagmat.
*
* @param ii
* An integer
* @param val
* A Z
* @return Resets the ii-th diagonal element of this Zdiagmat to val.
*/
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
order = D.order;
getProperties();
re = new double[n];
im = new double[n];
public void put(int ii, Z val) {
for (int i=0; i<n; i++){
re[i] = D.re[i];
im[i] = D.im[i];
}
}
re[ii - bx] = val.re;
im[ii - bx] = val.im;
}
/**
Sets the public parameters.
*/
/**
* Writes the <tt>i</tt>th diagonal element of a Zdiagmat (0-based).
*/
public void getProperties(){
bx = basex;
dx = bx + order - 1;
n = order;
}
public void put0(int i, Z val) {
/**
Gets the ii-th diagonal element of a Zdiagmat.
@param ii An integer
@return The ii-th element of this Zdiagmat
*/
public Z get(int ii){
return new Z(re[ii-bx], im[ii-bx]);
}
/**
Gets the <tt>i</tt>th diagonal of a of a Zdiagmat
(0-based).
*/
public Z get0(int i){
return new Z(re[i], im[i]);
}
/**
Writes the ii-th diagonal element of a Zdiagmat.
@param ii An integer
@param val A Z
@return Resets the ii-th diagonal element of this Zdiagmat
to val.
*/
public void put(int ii, Z val){
re[ii-bx] = val.re;
im[ii-bx] = val.im;
}
/**
Writes the <tt>i</tt>th diagonal element of a Zdiagmat
(0-based).
*/
public void put0(int i, Z val){
re[i] = val.re;
im[i] = val.im;
}
re[i] = val.re;
im[i] = val.im;
}
}

Просмотреть файл

@ -1,59 +1,55 @@
package Jampack;
class ZdiagmatTest{
class ZdiagmatTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
// Parameters.SetBaseIndex(0);
// Parameters.SetBaseIndex(0);
if (args[0].equals("t1")){
Print.o(new Zdiagmat(5));
}
if (args[0].equals("t1")) {
Print.o(new Zdiagmat(5));
}
else if (args[0].equals("t2")){
Print.o(new Zdiagmat(3, new Z(3,-1)));
}
else if (args[0].equals("t2")) {
Print.o(new Zdiagmat(3, new Z(3, -1)));
}
else if (args[0].equals("t3")){
Z1 val = new Z1(3);
for (int i=0; i<3; i++){
val.re[i] = i;
val.im[i] = 1;
}
Print.o(new Zdiagmat(val));
}
else if (args[0].equals("t4")){
Zmat A = new Zmat(3,3);
for (int i=A.bx; i<=A.rx; i++){
for (int j=A.bx; j<=A.cx; j++){
A.put(i, j, new Z(i,j));
}
}
Print.o(new Zdiagmat(A));
Print.o(new Zdiagmat(A,1));
Print.o(new Zdiagmat(A,2));
Print.o(new Zdiagmat(A,-1));
Print.o(new Zdiagmat(A,-2));
}
else if (args[0].equals("t5")){
Z1 val = new Z1(3);
for (int i=0; i<3; i++){
val.re[i] = i;
val.im[i] = -i;
}
Print.o(new Zdiagmat(new Zdiagmat(val)));
}
else if (args[0].equals("t6")){
Z1 val = new Z1(3);
for (int i=0; i<3; i++){
val.re[i] = i;
val.im[i] = 1;
}
Zdiagmat D = new Zdiagmat(val);
D.put(D.dx, D.get(D.bx));
Print.o(D);
}
else if (args[0].equals("t3")) {
Z1 val = new Z1(3);
for (int i = 0; i < 3; i++) {
val.re[i] = i;
val.im[i] = 1;
}
Print.o(new Zdiagmat(val));
} else if (args[0].equals("t4")) {
Zmat A = new Zmat(3, 3);
for (int i = A.bx; i <= A.rx; i++) {
for (int j = A.bx; j <= A.cx; j++) {
A.put(i, j, new Z(i, j));
}
}
Print.o(new Zdiagmat(A));
Print.o(new Zdiagmat(A, 1));
Print.o(new Zdiagmat(A, 2));
Print.o(new Zdiagmat(A, -1));
Print.o(new Zdiagmat(A, -2));
} else if (args[0].equals("t5")) {
Z1 val = new Z1(3);
for (int i = 0; i < 3; i++) {
val.re[i] = i;
val.im[i] = -i;
}
Print.o(new Zdiagmat(new Zdiagmat(val)));
} else if (args[0].equals("t6")) {
Z1 val = new Z1(3);
for (int i = 0; i < 3; i++) {
val.re[i] = i;
val.im[i] = 1;
}
Zdiagmat D = new Zdiagmat(val);
D.put(D.dx, D.get(D.bx));
Print.o(D);
}
}
}
}

Просмотреть файл

@ -1,54 +1,53 @@
package Jampack;
/**
Zhess implements the unitary reduction to Hessenberg form
by a unitary similarity transformation. Specifically, given
a square matrix A, there is a unitary matrix U such that
<pre>
* H = U^H AU
</pre>
is upper Hessenberg.
Zhess represents U and H as Zmats.
* Zhess implements the unitary reduction to Hessenberg form by a unitary similarity transformation. Specifically, given a square
* matrix A, there is a unitary matrix U such that
*
* <pre>
* H = U^H AU
* </pre>
*
* is upper Hessenberg. Zhess represents U and H as Zmats.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Zhess {
public class Zhess{
/** The upper Hessenberg matrix */
public Zmat H;
/** The upper Hessenberg matrix */
public Zmat H;
/** The unitary matrix */
public Zmat U;
/** The unitary matrix */
public Zmat U;
/**
* Creates a Zhess from a square Zmat. Throws a JampackException for nonsquare matrx.
*
* @param A
* A Zmat
* @return The Hessenberg form of A
* @exception JampackException
* Thrown if A is not square.
*/
/** Creates a Zhess from a square Zmat. Throws a
JampackException for nonsquare matrx.
public Zhess(Zmat A) throws JampackException {
@param A A Zmat
@return The Hessenberg form of A
@exception JampackException
Thrown if A is not square.
*/
if (A.nr != A.nc) {
throw new JampackException("Matrix not square");
}
public Zhess(Zmat A)
throws JampackException{
H = new Zmat(A);
U = Eye.o(H.nr);
if (A.nr != A.nc){
throw new JampackException
("Matrix not square");
}
Z1 work = new Z1(H.nr);
H = new Zmat(A);
U = Eye.o(H.nr);
Z1 work = new Z1(H.nr);
for (int k=H.bx; k<=H.cx-2; k++){
Z1 u = House.genc(H, k+1, H.rx, k);
House.ua(u, H, k+1, H.rx, k+1, H.cx, work);
House.au(H, u, H.bx, H.rx, k+1, H.cx, work);
House.au(U, u, U.bx, U.rx, k+1, U.cx, work);
}
}
for (int k = H.bx; k <= H.cx - 2; k++) {
Z1 u = House.genc(H, k + 1, H.rx, k);
House.ua(u, H, k + 1, H.rx, k + 1, H.cx, work);
House.au(H, u, H.bx, H.rx, k + 1, H.cx, work);
House.au(U, u, U.bx, U.rx, k + 1, U.cx, work);
}
}
}

Просмотреть файл

@ -1,27 +1,26 @@
package Jampack;
class ZhessTest{
class ZhessTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(0);
Parameters.setBaseIndex(0);
int n = 5;
int n = 5;
Zmat A = new Zmat(n,n);
Zmat A = new Zmat(n, n);
for (int i=A.bx; i<=A.rx; i++){
for (int j=A.bx; j<=A.cx; j++){
A.put(i, j, new Z(i+j, i-j));
}
A.put(i,i, new Z(2*n, 2*n));
}
for (int i = A.bx; i <= A.rx; i++) {
for (int j = A.bx; j <= A.cx; j++) {
A.put(i, j, new Z(i + j, i - j));
}
A.put(i, i, new Z(2 * n, 2 * n));
}
Zhess B = new Zhess(A);
Zhess B = new Zhess(A);
Print.o(B.H);
Print.o(Times.o(H.o(B.U), Times.o(A, B.U)));
Print.o(Norm.fro(Minus.o(A,Times.o(B.U, Times.o(B.H, H.o(B.U))))));
}
Print.o(B.H);
Print.o(Times.o(H.o(B.U), Times.o(A, B.U)));
Print.o(Norm.fro(Minus.o(A, Times.o(B.U, Times.o(B.H, H.o(B.U))))));
}
}

Просмотреть файл

@ -1,181 +1,163 @@
package Jampack;
/**
Computes a Householder QR decomposition.Specifically,
given a matrix A there are is a unitary matrix U
such that
<pre>
* QA = R
</pre>
where R is zero below its diagonal. In constructing
this decomposition, Zhqrd represents Q as a product
of Householder transformations with each transformation
represented by a Z1. R is represented by a Zutmat.
Methods are provided to apply the transformations to
other matrices.
<br>
Comments: The routines to postmultiply by Q are soft coded and
should ultimately be replaced.
* Computes a Householder QR decomposition.Specifically, given a matrix A there are is a unitary matrix U such that
*
* <pre>
* QA = R
* </pre>
*
* where R is zero below its diagonal. In constructing this decomposition, Zhqrd represents Q as a product of Householder
* transformations with each transformation represented by a Z1. R is represented by a Zutmat. Methods are provided to apply the
* transformations to other matrices. <br>
* Comments: The routines to postmultiply by Q are soft coded and should ultimately be replaced.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Zhqrd {
public class Zhqrd{
/** The number of rows in A */
public int nrow;
/** The number of rows in A */
public int nrow;
/** The number of columns in A */
public int ncol;
/** The number of columns in A*/
public int ncol;
/** The number of Householder transformations */
public int ntran;
/** The number of Householder transformations */
public int ntran;
/**
* An array containing the generating vectors for the Householder transformations.
*/
public Z1[] U;
/** An array containing the generating vectors for the
Householder transformations. */
public Z1[] U;
/**
* The R factor. If nrow&gt;ncol then R is square of order ncol. Otherwise R has the same dimenstions as A.
*/
/** The R factor. If nrow&gt;ncol then R is square of order
ncol. Otherwise R has the same dimenstions as A. */
public Zutmat R;
public Zutmat R;
/**
Computes a Householder QR decomposition of a Zmat
/**
* Computes a Householder QR decomposition of a Zmat
*
* @param A
* A Zmat
* @return The Zhqrd of A
* @exception JampackException
* Passed from below.
*/
public Zhqrd(Zmat A) throws JampackException {
@param A A Zmat
@return The Zhqrd of A
@exception JampackException
Passed from below.
*/
public Zhqrd(Zmat A)
throws JampackException{
A.getProperties();
A.getProperties();
/* Initialize. */
/* Initialize. */
nrow = A.nr;
ncol = A.nc;
ntran = Math.min(A.nr, A.nc);
U = new Z1[ntran];
nrow = A.nr;
ncol = A.nc;
ntran = Math.min(A.nr, A.nc);
U = new Z1[ntran];
/* Perform the reduction in R */
/* Perform the reduction in R */
R = new Zutmat(A);
for (int k = A.bx; k < A.bx + ntran; k++) {
R = new Zutmat(A);
for (int k=A.bx; k<A.bx+ntran; k++){
U[k - A.bx] = House.genc(R, k, A.rx, k);
House.ua(U[k - A.bx], R, k, A.rx, k + 1, A.cx);
}
if (nrow > ncol) {// Chop off zeros at the bottom.
R = new Zutmat(R.get(R.bx, R.cx, R.bx, R.cx));
}
}
U[k-A.bx] = House.genc(R, k, A.rx, k);
House.ua(U[k-A.bx], R, k, A.rx, k+1, A.cx);
}
if (nrow > ncol){// Chop off zeros at the bottom.
R = new Zutmat(R.get(R.bx, R.cx, R.bx, R.cx));
}
}
/**
* Computes the product QB. Throws JampackException for inconsistent dimenstions.
*
* @param B
* A Zmat
* @return QB
* @exception JampackException
* Thrown for inconsistent dimensions.
*/
/**
Computes the product QB. Throws JampackException for
inconsistent dimenstions.
public Zmat qb(Zmat B) throws JampackException {
@param B A Zmat
@return QB
@exception JampackException
Thrown for inconsistent dimensions.
*/
if (B.ncol != ncol) {
throw new JampackException("Inconsistent dimensions.");
}
public Zmat qb(Zmat B)
throws JampackException{
Zmat C = new Zmat(B);
for (int k = ntran - 1; k >= 0; k--) {
House.ua(U[k], C, C.bx + k, C.rx, C.bx, C.cx);
}
if (B.ncol != ncol){
throw new JampackException
("Inconsistent dimensions.");
}
return C;
Zmat C = new Zmat(B);
}
for (int k=ntran-1; k>=0; k--){
House.ua(U[k], C, C.bx+k, C.rx, C.bx, C.cx);
}
/**
* Computes the product Q<sup>H</sup>B. Throws JampackException for inconsistent dimenstions.
*
* @param B
* A Zmat
* @return Q<sup>H</sup>B
* @exception JampackException
* Thrown for inconsistent dimensions.
*/
public Zmat qhb(Zmat B) throws JampackException {
return C;
if (B.ncol != ncol) {
throw new JampackException("Inconsistent dimensions.");
}
}
Zmat C = new Zmat(B);
/**
Computes the product Q<sup>H</sup>B. Throws JampackException for
inconsistent dimenstions.
for (int k = 0; k < ntran; k++) {
House.ua(U[k], C, C.bx + k, C.rx, C.bx, C.cx);
}
@param B A Zmat
@return Q<sup>H</sup>B
@exception JampackException
Thrown for inconsistent dimensions.
*/
public Zmat qhb(Zmat B)
throws JampackException{
return C;
if (B.ncol != ncol){
throw new JampackException
("Inconsistent dimensions.");
}
}
/**
* Computes the product BQ. Throws JampackException for inconsistent dimenstions.
*
* @param B
* A Zmat
* @return BQ
* @exception JampackException
* Thrown for inconsistent dimensions.
*/
Zmat C = new Zmat(B);
public Zmat bq(Zmat B) throws JampackException {
for (int k=0; k<ntran; k++){
House.ua(U[k], C, C.bx+k, C.rx, C.bx, C.cx);
}
if (B.nrow != ncol) {
throw new JampackException("Inconsistent dimensions.");
}
return C;
return (H.o(qhb(H.o(B))));
}
}
/**
* Computes the product BQ<sup>H</sup>. Throws JampackException for inconsistent dimenstions.
*
* @param B
* A Zmat
* @return BQ<sup>H</sup>
* @exception JampackException
* Thrown for inconsistent dimensions.
*/
/**
Computes the product BQ. Throws JampackException for
inconsistent dimenstions.
public Zmat bqh(Zmat A, Zmat B) throws JampackException {
@param B A Zmat
@return BQ
@exception JampackException
Thrown for inconsistent dimensions.
*/
if (B.nrow != ncol) {
throw new JampackException("Inconsistent dimensions.");
}
public Zmat bq(Zmat B)
throws JampackException{
if (B.nrow != ncol){
throw new JampackException
("Inconsistent dimensions.");
}
return(H.o(qhb(H.o(B))));
}
/**
Computes the product BQ<sup>H</sup>. Throws JampackException for
inconsistent dimenstions.
@param B A Zmat
@return BQ<sup>H</sup>
@exception JampackException
Thrown for inconsistent dimensions.
*/
public Zmat bqh(Zmat A, Zmat B)
throws JampackException{
if (B.nrow != ncol){
throw new JampackException
("Inconsistent dimensions.");
}
return(H.o(qb(H.o(B))));
}
return (H.o(qb(H.o(B))));
}
}

Просмотреть файл

@ -1,26 +1,24 @@
package Jampack;
class ZhqrdTest {
class ZhqrdTest{
public static void main(String[] args) throws JampackException {
public static void main(String[] args)
throws JampackException{
Zmat v = new Zmat(4,1);
for (int i=0; i<4; i++){
v.re[i][0] = 1;
v.im[i][0] = i;
}
Z1 u = House.genc(v, v.bx, v.rx, v.bx);
Zmat uu = new Zmat(u);
Zmat A = new Zmat(Minus.o(Eye.o(4), Times.o(uu, H.o(uu))));
Zmat B = A.get(A.bx, A.rx-1, A.bx, A.cx);
Zhqrd QR = new Zhqrd(B);
Print.o(QR.R);
Print.o(Norm.fro(Minus.o(Times.o(H.o(B), B), Times.o(H.o(QR.R),QR.R))));
Print.o(Norm.fro(QR.R), 20, 16);
QR = new Zhqrd(A);
Print.o(Norm.fro(Minus.o(A, QR.qb(QR.R))));
Print.o(Norm.fro(Minus.o(QR.R, QR.qhb(A))));
}
Zmat v = new Zmat(4, 1);
for (int i = 0; i < 4; i++) {
v.re[i][0] = 1;
v.im[i][0] = i;
}
Z1 u = House.genc(v, v.bx, v.rx, v.bx);
Zmat uu = new Zmat(u);
Zmat A = new Zmat(Minus.o(Eye.o(4), Times.o(uu, H.o(uu))));
Zmat B = A.get(A.bx, A.rx - 1, A.bx, A.cx);
Zhqrd QR = new Zhqrd(B);
Print.o(QR.R);
Print.o(Norm.fro(Minus.o(Times.o(H.o(B), B), Times.o(H.o(QR.R), QR.R))));
Print.o(Norm.fro(QR.R), 20, 16);
QR = new Zhqrd(A);
Print.o(Norm.fro(Minus.o(A, QR.qb(QR.R))));
Print.o(Norm.fro(Minus.o(QR.R, QR.qhb(A))));
}
}

Просмотреть файл

@ -1,40 +1,33 @@
package Jampack;
/**
* Zltmat is a tag class of Zmat, which tells Jampack to expect a lower triangular matrix. The user is entirely responsible for
* the matrix having the proper form, and Jampack programs do no checking. For the constructors, see the corresponding
* constructors for <a href="Zmat.html"> Zmat </a>.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
Zltmat is a tag class of Zmat, which tells Jampack to expect a
lower triangular matrix. The user is entirely responsible for the
matrix having the proper form, and Jampack programs do no checking.
For the constructors, see the corresponding constructors for <a
href="Zmat.html"> Zmat </a>.
public class Zltmat extends Zmat {
@version Pre-alpha
@author G. W. Stewart
public Zltmat(double re[][], double im[][]) throws JampackException {
super(re, im);
}
*/
public Zltmat(Z A[][]) {
super(A);
}
public Zltmat(double A[][]) {
super(A);
}
public class Zltmat extends Zmat{
public Zltmat(Zmat A) {
super(A);
}
public Zltmat(double re[][], double im[][])
throws JampackException{
super(re, im);
}
public Zltmat(Z A[][]){
super(A);
}
public Zltmat(double A[][]){
super(A);
}
public Zltmat(Zmat A){
super(A);
}
public Zltmat(int nrow, int ncol){
super(nrow, ncol);
}
public Zltmat(int nrow, int ncol) {
super(nrow, ncol);
}
}

Просмотреть файл

@ -1,152 +1,159 @@
package Jampack;
/**
Zludpp implements the LU decomposition with partial
pivoting. Specifically, given a matrix A, there
is a permunation matrix P, a unit lower triangular matrix
L whose subdiagonal elements are less than one in magnitude
and a upper triangular matrix U such that
<pre>
* A = PLU
</pre>
Zludpp represents P as a pivot array (see <a href="Jampack.Pivot.html">
Pivot.java </a>), L as a Zltmat, and U as a Zutmat.
* Zludpp implements the LU decomposition with partial pivoting. Specifically, given a matrix A, there is a permunation matrix P,
* a unit lower triangular matrix L whose subdiagonal elements are less than one in magnitude and a upper triangular matrix U such
* that
*
* <pre>
* A = PLU
* </pre>
*
* Zludpp represents P as a pivot array (see <a href="Jampack.Pivot.html"> Pivot.java </a>), L as a Zltmat, and U as a Zutmat.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Zludpp {
public class Zludpp{
/** The number of rows in L */
public int nrl;
/** The number of columns in L */
public int ncl;
/** The number of rows in U */
int nru;
/** The number of columns in U */
int ncu;
/** The pivot array (see <a href="Jampack.Pivot.html"> Pivot.java </a>) */
public int pvt[];
/** The lower triangular matrix L */
public Zltmat L;
/** The upper triangular matrix U */
public Zutmat U;
/**
* Computes the partially pivoted LU decompostion.
*
* @param A
* A Zmat
* @return The Zludpp of A
* @exception JampackException
* Passed from below.
*/
/** The number of rows in L */
public int nrl;
/** The number of columns in L */
public int ncl;
/** The number of rows in U */
int nru;
/** The number of columns in U */
int ncu;
/** The pivot array (see <a href="Jampack.Pivot.html"> Pivot.java </a>) */
public int pvt[];
/** The lower triangular matrix L */
public Zltmat L;
/** The upper triangular matrix U */
public Zutmat U;
public Zludpp(Zmat A) throws JampackException {
int i, j, k, nr, nc;
double absi, mx, t;
Zmat T;
Z Tk[];
/**
Computes the partially pivoted LU decompostion.
A.getProperties();
@param A A Zmat
@return The Zludpp of A
@exception JampackException
Passed from below.
*/
/* Set up L and U */
public Zludpp(Zmat A)
throws JampackException{
int i, j, k, nr, nc;
double absi, mx, t;
Zmat T;
Z Tk[];
nr = A.nr;
nrl = nr;
nc = A.nc;
ncl = Math.min(A.nr, A.nc);
nru = ncl;
ncu = nc;
A.getProperties();
L = new Zltmat(nrl, ncl);
U = new Zutmat(nru, ncu);
pvt = new int[ncl];
/* Set up L and U */
/*
* Set up the matrix T in which the elimination will be performed and copy A to it.
*/
nr = A.nr;
nrl = nr;
nc = A.nc;
ncl = Math.min(A.nr, A.nc);
nru = ncl;
ncu = nc;
if (nrl >= ncu)
T = L;
else
T = U;
for (i = 0; i < nr; i++) {
for (j = 0; j < nc; j++) {
T.re[i][j] = A.re[i][j];
T.im[i][j] = A.im[i][j];
}
}
L = new Zltmat(nrl, ncl);
U = new Zutmat(nru, ncu);
pvt = new int[ncl];
/* Outer elimination loop. */
Tk = new Z[nrl]; // This should be repaced by a Z1.
/* Set up the matrix T in which the elimination will be
performed and copy A to it.*/
for (k = 0; k < Math.min(nr, nc); k++) {
if (nrl>= ncu)
T = L;
else
T = U;
/* Find the pivot row. */
for (i=0; i<nr; i++){
for (j=0; j<nc; j++){
T.re[i][j] = A.re[i][j];
T.im[i][j] = A.im[i][j];
}
}
mx = 0.;
pvt[k] = k;
for (i = k; i < nr; i++) {
Tk[i] = T.get0(i, k);
if ((absi = Z.abs(Tk[i])) > mx) {
pvt[k] = i;
mx = absi;
}
}
if (mx == 0.0)
continue;
/* Outer elimination loop. */
/* Perform the exchange. */
Tk = new Z[nrl]; // This should be repaced by a Z1.
Tk[k].Exch(Tk[pvt[k]]);
for (j = 0; j < nc; j++) {
t = T.re[k][j];
T.re[k][j] = T.re[pvt[k]][j];
T.re[pvt[k]][j] = t;
t = T.im[k][j];
T.im[k][j] = T.im[pvt[k]][j];
T.im[pvt[k]][j] = t;
}
for (k=0; k< Math.min(nr,nc); k++){
/* Compute multipliers and eliminate. */
/* Find the pivot row. */
for (i = k + 1; i < nr; i++) {
T.put0(i, k, Tk[i].Div(Tk[i], Tk[k]));
for (j = k + 1; j < nc; j++) {
T.re[i][j] = T.re[i][j] - T.re[i][k] * T.re[k][j] + T.im[i][k] * T.im[k][j];
T.im[i][j] = T.im[i][j] - T.im[i][k] * T.re[k][j] - T.re[i][k] * T.im[k][j];
}
}
}
mx = 0.;
pvt[k] = k;
for (i=k; i<nr; i++){
Tk[i] = T.get0(i,k);
if ((absi = Z.abs(Tk[i])) > mx){
pvt[k] = i;
mx = absi;
}
}
if (mx == 0.0) continue;
/* Finalize L and U */
/* Perform the exchange. */
Tk[k].Exch(Tk[pvt[k]]);
for (j=0; j<nc; j++){
t=T.re[k][j]; T.re[k][j]=T.re[pvt[k]][j]; T.re[pvt[k]][j]=t;
t=T.im[k][j]; T.im[k][j]=T.im[pvt[k]][j]; T.im[pvt[k]][j]=t;
}
/* Compute multipliers and eliminate. */
for (i=k+1; i<nr; i++){
T.put0(i, k, Tk[i].Div(Tk[i],Tk[k]));
for (j=k+1; j<nc; j++){
T.re[i][j] = T.re[i][j]
- T.re[i][k]*T.re[k][j] + T.im[i][k]*T.im[k][j];
T.im[i][j] = T.im[i][j]
- T.im[i][k]*T.re[k][j] - T.re[i][k]*T.im[k][j];
}
}
}
/* Finalize L and U */
if (nr >= nc) // Copy U from T.
for (i=0; i<nc; i++){
for (j=0; j<nc; j++)
if (i > j){
U.re[i][j] = 0.0; U.im[i][j] = 0.0;
}
else{
U.re[i][j] = T.re[i][j]; U.im[i][j] = T.im[i][j];
L.re[i][j] = 0.0; L.im[i][j] = 0.0;
}
L.re[i][i] = 1.0; L.im[i][i] = 0.0;
}
else // Copy L from T.
for (i=0; i<nr; i++){
for (j=0; j<nr; j++)
if (i > j){
L.re[i][j] = T.re[i][j]; L.im[i][j] = T.im[i][j];
U.re[i][j] = 0.0; U.im[i][j] = 0.0;
}
else{
L.re[i][j] = 0.0; L.im[i][j] = 0.0;
}
L.re[i][i] = 1.0; L.im[i][i] = 0.0;
}
}
if (nr >= nc) // Copy U from T.
for (i = 0; i < nc; i++) {
for (j = 0; j < nc; j++)
if (i > j) {
U.re[i][j] = 0.0;
U.im[i][j] = 0.0;
} else {
U.re[i][j] = T.re[i][j];
U.im[i][j] = T.im[i][j];
L.re[i][j] = 0.0;
L.im[i][j] = 0.0;
}
L.re[i][i] = 1.0;
L.im[i][i] = 0.0;
}
else
// Copy L from T.
for (i = 0; i < nr; i++) {
for (j = 0; j < nr; j++)
if (i > j) {
L.re[i][j] = T.re[i][j];
L.im[i][j] = T.im[i][j];
U.re[i][j] = 0.0;
U.im[i][j] = 0.0;
} else {
L.re[i][j] = 0.0;
L.im[i][j] = 0.0;
}
L.re[i][i] = 1.0;
L.im[i][i] = 0.0;
}
}
}

Просмотреть файл

@ -1,31 +1,29 @@
package Jampack;
class ZludppTest{
class ZludppTest {
public static void main(String[] args)
throws JampackException{
int nr=5, nc=7;
public static void main(String[] args) throws JampackException {
int nr = 5, nc = 7;
Parameters.setBaseIndex(0);
Parameters.setBaseIndex(0);
Z B[][] = new Z[nr][nc];
for (int i=0; i<nr; i++){
for (int j=0; j<nc; j++)
B[i][j] = new Z(i+1,j+1);
if (i < nc)
B[i][i] = new Z(1,1);
}
Z B[][] = new Z[nr][nc];
for (int i = 0; i < nr; i++) {
for (int j = 0; j < nc; j++)
B[i][j] = new Z(i + 1, j + 1);
if (i < nc)
B[i][i] = new Z(1, 1);
}
Zmat A = new Zmat(B);
Zmat A = new Zmat(B);
Zludpp LU = new Zludpp(A);
Zludpp LU = new Zludpp(A);
Print.o(LU.pvt, 4);
Print.o(LU.pvt, 4);
Pivot.row(A, LU.pvt);
Pivot.row(A, LU.pvt);
Print.o(Norm.fro(Minus.o(A, Times.o(LU.L, LU.U))));
Print.o(Norm.fro(Minus.o(A, Times.o(LU.L, LU.U))));
}
}
}

Просмотреть файл

@ -1,506 +1,547 @@
package Jampack;
/**
Zmat implements general complex matrix stored in a rectangular
array class Z.
@version Pre-alpha
@author G. W. Stewart
*/
public class Zmat{
/** The number of rows */
protected int nrow;
/** The number of columns */
protected int ncol;
/** The base index */
protected int basex;
/** The real part of the matrix */
protected double re[][];
/** The imaginary part of the matrix */
protected double im[][];
/** True if the matrix has been altered */
protected boolean dirty;
/** Points to an LU decompoistion of the matrix
provided one exists */
protected Zludpp LU;
/** Points to a Householder QR decompoistion of the matrix
provided one exists */
protected Zhqrd HQR;
/** Points to a Cholesky decompoistion of the matrix
provided one exists */
protected Zchol CHOL;
/** The base index */
public int bx;
/** The upper row index */
public int rx;
/** The number of rows */
public int nr;
/** The upper column index */
public int cx;
/** The number of columns */
public int nc;
/**
Creates a Zmat and initializes its real and imaginary
parts to a pair of arrays.
* Zmat implements general complex matrix stored in a rectangular array class Z.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@param re Contains the real part.
@param im Contains the imaginary part.
@exception JampackException if the dimensions of re and im
do not match
*/
public class Zmat {
public Zmat(double re[][], double im[][])
throws JampackException{
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = re.length;
ncol = re[0].length;
if (nrow != im.length || ncol != im[0].length)
throw new JampackException
("Inconsistent array dimensions");
getProperties();
this.re = new double[nr][nc];
this.im = new double[nr][nc];
for (int i=0; i<nr; i++)
for (int j=0; j<nc; j++){
this.re[i][j] = re[i][j];
this.im[i][j] = im[i][j];
}
}
/** The number of rows */
protected int nrow;
/** The number of columns */
protected int ncol;
/**
Creates a Zmat and initializes it to an array of class Z.
*/
/** The base index */
protected int basex;
public Zmat(Z A[][]){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.length;
ncol = A[0].length;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i=0; i<nr; i++)
for (int j=0; j<nc; j++){
re[i][j] = A[i][j].re;
im[i][j] = A[i][j].im;
}
}
/**
Creates a Zmat and initializes its real part to
to an array of class double. The imaginary part is
set to zero.
*/
/** The real part of the matrix */
protected double re[][];
public Zmat(double A[][]){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.length;
ncol = A[0].length;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i=0; i<nr; i++)
for (int j=0; j<nc; j++){
re[i][j] = A[i][j];
im[i][j] = 0;
}
}
/** The imaginary part of the matrix */
protected double im[][];
/**
Creates a Zmat and intitializes it to a Zmat.
*/
/** True if the matrix has been altered */
protected boolean dirty;
public Zmat(Zmat A){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.nrow;
ncol = A.ncol;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i=0; i<nr; i++)
for (int j=0; j<nc; j++){
re[i][j] = A.re[i][j];
im[i][j] = A.im[i][j];
}
}
/**
* Points to an LU decompoistion of the matrix provided one exists
*/
protected Zludpp LU;
/**
Creates a Zmat and initialize it to a Z1.
*/
/**
* Points to a Householder QR decompoistion of the matrix provided one exists
*/
protected Zhqrd HQR;
public Zmat(Z1 A){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.n;
ncol = 1;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i=0; i<nr; i++){
re[i][0] = A.re[i];
im[i][0] = A.im[i];
}
}
/**
* Points to a Cholesky decompoistion of the matrix provided one exists
*/
protected Zchol CHOL;
/**
Creates a Zmat and initialize it to a Zdiagmat.
*/
/** The base index */
public int bx;
public Zmat(Zdiagmat D){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = D.n;
ncol = D.n;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i=0; i<nr; i++){
re[i][i] = D.re[i];
im[i][i] = D.im[i];
}
}
/** The upper row index */
public int rx;
/**
Creates a Zmat and initializes it to zero.
*/
/** The number of rows */
public int nr;
public Zmat(int nrow, int ncol){
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
this.nrow = nrow;
this.ncol = ncol;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i=0; i<nr; i++)
for (int j=0; j<nc; j++){
re[i][j] = 0;
im[i][j] = 0;
}
}
/** The upper column index */
public int cx;
/** The number of columns */
public int nc;
/**
Sets the public parameters.
*/
/**
* Creates a Zmat and initializes its real and imaginary parts to a pair of arrays.
*
* @param re
* Contains the real part.
* @param im
* Contains the imaginary part.
* @exception JampackException
* if the dimensions of re and im do not match
*/
public void getProperties(){
bx = basex;
rx = bx + nrow - 1;
cx = bx + ncol - 1;
nr = nrow;
nc = ncol;
}
public Zmat(double re[][], double im[][]) throws JampackException {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = re.length;
ncol = re[0].length;
if (nrow != im.length || ncol != im[0].length)
throw new JampackException("Inconsistent array dimensions");
getProperties();
this.re = new double[nr][nc];
this.im = new double[nr][nc];
for (int i = 0; i < nr; i++)
for (int j = 0; j < nc; j++) {
this.re[i][j] = re[i][j];
this.im[i][j] = im[i][j];
}
}
/**
Returns a copy of the real part of a Zmat.
*/
/**
* Creates a Zmat and initializes it to an array of class Z.
*/
public double[][] getRe(){
public Zmat(Z A[][]) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.length;
ncol = A[0].length;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i = 0; i < nr; i++)
for (int j = 0; j < nc; j++) {
re[i][j] = A[i][j].re;
im[i][j] = A[i][j].im;
}
}
double[][] A = new double[nrow][ncol];
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++)
A[i][j] = re[i][j];
return A;
}
/**
Returns a copy of the imaginary part of a Zmat.
*/
/**
* Creates a Zmat and initializes its real part to to an array of class double. The imaginary part is set to zero.
*/
public double[][] getIm(){
public Zmat(double A[][]) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.length;
ncol = A[0].length;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i = 0; i < nr; i++)
for (int j = 0; j < nc; j++) {
re[i][j] = A[i][j];
im[i][j] = 0;
}
}
double[][] A = new double[nrow][ncol];
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++)
A[i][j] = im[i][j];
return A;
}
/**
Returns a copy of the real and imaginary parts as a complex array.
*/
/**
* Creates a Zmat and intitializes it to a Zmat.
*/
public Z[][] getZ(){
public Zmat(Zmat A) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.nrow;
ncol = A.ncol;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i = 0; i < nr; i++)
for (int j = 0; j < nc; j++) {
re[i][j] = A.re[i][j];
im[i][j] = A.im[i][j];
}
}
Z[][] A = new Z[nrow][ncol];
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++)
A[i][j] = new Z(re[i][j], im[i][j]);
return A;
}
/**
Returns the (ii,jj)-element of a Zmat.
@param ii The row index of the element
@param jj The column index of the element
*/
/**
* Creates a Zmat and initialize it to a Z1.
*/
public Z get(int ii, int jj){
public Zmat(Z1 A) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = A.n;
ncol = 1;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i = 0; i < nr; i++) {
re[i][0] = A.re[i];
im[i][0] = A.im[i];
}
}
return new Z(re[ii-basex][jj-basex],im[ii-basex][jj-basex]);
}
/**
* Creates a Zmat and initialize it to a Zdiagmat.
*/
/**
Returns the zero-based (i,j)-element of a Zmat.
@param i The row index of the element
@param j The column index of the element
*/
public Zmat(Zdiagmat D) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
nrow = D.n;
ncol = D.n;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i = 0; i < nr; i++) {
re[i][i] = D.re[i];
im[i][i] = D.im[i];
}
}
public Z get0(int i, int j){
/**
* Creates a Zmat and initializes it to zero.
*/
return new Z(re[i][j],im[i][j]);
}
public Zmat(int nrow, int ncol) {
Parameters.BaseIndexNotChangeable = true;
basex = Parameters.BaseIndex;
this.nrow = nrow;
this.ncol = ncol;
getProperties();
re = new double[nr][nc];
im = new double[nr][nc];
for (int i = 0; i < nr; i++)
for (int j = 0; j < nc; j++) {
re[i][j] = 0;
im[i][j] = 0;
}
}
/**
Writes the (ii,jj) element of a Zmat.
@param ii The row index of the element
@param jj The column index of the element
@param a The new value of the element
*/
public void put(int ii, int jj, Z a){
/**
* Sets the public parameters.
*/
dirty = true;
re[ii-basex][jj-basex] = a.re;
im[ii-basex][jj-basex] = a.im;
}
public void getProperties() {
bx = basex;
rx = bx + nrow - 1;
cx = bx + ncol - 1;
nr = nrow;
nc = ncol;
}
/**
Writes the zero-based (i,j)-element of a Zmat.
@param i The row index of the element
@param j The column index of the element
@param a The new value of the element
*/
public void put0(int i, int j, Z a){
/**
* Returns a copy of the real part of a Zmat.
*/
dirty = true;
re[i][j] = a.re;
im[i][j] = a.im;
}
public double[][] getRe() {
/**
Returns the submatrix (ii1:ii2, jj1:jj2).
@param ii1 The lower column index
@param ii2 The upper column index
@param jj1 The lower row index
@param jj2 The upper row index
*/
double[][] A = new double[nrow][ncol];
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++)
A[i][j] = re[i][j];
return A;
}
public Zmat get(int ii1, int ii2, int jj1, int jj2){
int nrow = ii2-ii1+1;
int ncol = jj2-jj1+1;
Zmat A = new Zmat(nrow, ncol);
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
A.re[i][j] = re[i+ii1-basex][j+jj1-basex];
A.im[i][j] = im[i+ii1-basex][j+jj1-basex];
}
return A;
}
/**
* Returns a copy of the imaginary part of a Zmat.
*/
/**
Overwrites the submatrix (ii1:ii2, jj1:jj2) with a Zmat.
@param ii1 The lower column index
@param ii2 The upper column index
@param jj1 The lower row index
@param jj2 The upper row index
@param A The new value of the submatrix
*/
public double[][] getIm() {
public void put(int ii1, int ii2, int jj1, int jj2, Zmat A){
dirty = true;
int nrow = ii2-ii1+1;
int ncol = jj2-jj1+1;
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
re[i+ii1-basex][j+jj1-basex]= A.re[i][j];
im[i+ii1-basex][j+jj1-basex]= A.im[i][j];
}
}
double[][] A = new double[nrow][ncol];
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++)
A[i][j] = im[i][j];
return A;
}
/**
Returns the submatrix (ii[], jj1:jj2).
@param i[] Contains the row indices of the submatrix
@param jj1 The lower column index
@param jj2 The upper column index
*/
/**
* Returns a copy of the real and imaginary parts as a complex array.
*/
public Zmat get(int ii[], int jj1, int jj2){
int nrow = ii.length;
int ncol = jj2-jj1+1;
Zmat A = new Zmat(nrow, ncol);
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
A.re[i][j] = re[ii[i]-basex][j+jj1-basex];
A.im[i][j] = im[ii[i]-basex][j+jj1-basex];
}
return A;
}
public Z[][] getZ() {
/**
Overwrites the submatrix (ii[], jj1:jj2) with a Zmat.
@param i[] Contains the row indices of the submatrix
@param jj1 The lower column index
@param jj2 The upper column index
@param A The new value of the submatrix.
*/
Z[][] A = new Z[nrow][ncol];
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++)
A[i][j] = new Z(re[i][j], im[i][j]);
return A;
}
public void put(int ii[], int jj1, int jj2, Zmat A){
dirty = true;
int nrow = ii.length;
int ncol = jj2-jj1+1;
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
re[ii[i]-basex][j+jj1-basex] = A.re[i][j];
im[ii[i]-basex][j+jj1-basex] = A.im[i][j];
}
}
/**
* Returns the (ii,jj)-element of a Zmat.
*
* @param ii
* The row index of the element
* @param jj
* The column index of the element
*/
/**
Returns the submatrix (ii1:ii2, jj[]).
@param ii1 The lower row index
@param ii2 The upper row index
@param jj[] Contains the column indices of the submatrix
*/
public Z get(int ii, int jj) {
public Zmat get(int ii1, int ii2, int jj[]){
int nrow = ii2-ii1+1;
int ncol = jj.length;
Zmat A = new Zmat(nrow, ncol);
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
A.re[i][j] = re[i+ii1-basex][jj[j]-basex];
A.im[i][j] = im[i+ii1-basex][jj[j]-basex];
}
return A;
}
return new Z(re[ii - basex][jj - basex], im[ii - basex][jj - basex]);
}
/**
Overwrites the submatrix (ii1:ii2, jj[]) with a Zmat.
@param ii1 The lower row index
@param ii2 The upper row index
@param jj[] Contains the column indices of the submatrix
@param A The new value of the submatrix
*/
/**
* Returns the zero-based (i,j)-element of a Zmat.
*
* @param i
* The row index of the element
* @param j
* The column index of the element
*/
public void put(int ii1, int ii2, int jj[], Zmat A){
dirty = true;
int nrow = ii2-ii1+1;
int ncol = jj.length;
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
re[i+ii1-basex][jj[j]-basex] = A.re[i][j];
im[i+ii1-basex][jj[j]-basex] = A.im[i][j];
}
}
public Z get0(int i, int j) {
/**
Returns the submatrix (ii[], jj[]).
@param ii[] Contains the row indices of the submatrix
@param jj[] Contains the column indices of the submatrix
*/
return new Z(re[i][j], im[i][j]);
}
public Zmat get(int ii[] , int jj[]){
int nrow = ii.length;
int ncol = jj.length;
Zmat A = new Zmat(nrow, ncol);
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
A.re[i][j] = re[ii[i]-basex][jj[j]-basex];
A.im[i][j] = im[ii[i]-basex][jj[j]-basex];
}
return A;
}
/**
* Writes the (ii,jj) element of a Zmat.
*
* @param ii
* The row index of the element
* @param jj
* The column index of the element
* @param a
* The new value of the element
*/
public void put(int ii, int jj, Z a) {
/**
Overwrites the submatrix (ii[], jj[]) with a Zmat.
Returns the submatrix (ii[], jj[])
@param ii[] Contains the row indices of the submatrix
@param jj[] Contains the column indices of the submatrix
@param A The value of the new submatrix
*/
dirty = true;
re[ii - basex][jj - basex] = a.re;
im[ii - basex][jj - basex] = a.im;
}
public void put(int ii[] , int jj[], Zmat A){
dirty = true;
int nrow = ii.length;
int ncol = jj.length;
for (int i=0; i<nrow; i++)
for (int j=0; j<ncol; j++){
re[ii[i]-basex][jj[j]-basex] = A.re[i][j];
im[ii[i]-basex][jj[j]-basex] = A.im[i][j];
}
}
/**
* Writes the zero-based (i,j)-element of a Zmat.
*
* @param i
* The row index of the element
* @param j
* The column index of the element
* @param a
* The new value of the element
*/
public void put0(int i, int j, Z a) {
/**
Returns an LU decomposition if a valid one exists. Otherwise
returns null.
*/
dirty = true;
re[i][j] = a.re;
im[i][j] = a.im;
}
public Zludpp getLU(){
clean();
Zludpp temp = LU;
LU = null;
return temp;
}
/**
* Returns the submatrix (ii1:ii2, jj1:jj2).
*
* @param ii1
* The lower column index
* @param ii2
* The upper column index
* @param jj1
* The lower row index
* @param jj2
* The upper row index
*/
/**
Returns a Householder QR decomposition if a valid one exists.
Otherwise returns null.
*/
public Zmat get(int ii1, int ii2, int jj1, int jj2) {
int nrow = ii2 - ii1 + 1;
int ncol = jj2 - jj1 + 1;
Zmat A = new Zmat(nrow, ncol);
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
A.re[i][j] = re[i + ii1 - basex][j + jj1 - basex];
A.im[i][j] = im[i + ii1 - basex][j + jj1 - basex];
}
return A;
}
public Zhqrd getHQR(){
clean();
Zhqrd temp = HQR;
HQR = null;
return temp;
}
/**
* Overwrites the submatrix (ii1:ii2, jj1:jj2) with a Zmat.
*
* @param ii1
* The lower column index
* @param ii2
* The upper column index
* @param jj1
* The lower row index
* @param jj2
* The upper row index
* @param A
* The new value of the submatrix
*/
public void put(int ii1, int ii2, int jj1, int jj2, Zmat A) {
dirty = true;
int nrow = ii2 - ii1 + 1;
int ncol = jj2 - jj1 + 1;
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
re[i + ii1 - basex][j + jj1 - basex] = A.re[i][j];
im[i + ii1 - basex][j + jj1 - basex] = A.im[i][j];
}
}
/**
Returns a Cholesky decomposition if a valid one exists.
Otherwise returns null.
*/
/**
* Returns the submatrix (ii[], jj1:jj2).
*
* @param i
* [] Contains the row indices of the submatrix
* @param jj1
* The lower column index
* @param jj2
* The upper column index
*/
public Zchol getCHOL(){
clean();
Zchol temp = CHOL;
CHOL = null;
return temp;
}
public Zmat get(int ii[], int jj1, int jj2) {
int nrow = ii.length;
int ncol = jj2 - jj1 + 1;
Zmat A = new Zmat(nrow, ncol);
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
A.re[i][j] = re[ii[i] - basex][j + jj1 - basex];
A.im[i][j] = im[ii[i] - basex][j + jj1 - basex];
}
return A;
}
/**
* Overwrites the submatrix (ii[], jj1:jj2) with a Zmat.
*
* @param i
* [] Contains the row indices of the submatrix
* @param jj1
* The lower column index
* @param jj2
* The upper column index
* @param A
* The new value of the submatrix.
*/
/**
Nullifies the history pointers if the matrix is dirty
and sets the dirty flag to false.
*/
public void put(int ii[], int jj1, int jj2, Zmat A) {
dirty = true;
int nrow = ii.length;
int ncol = jj2 - jj1 + 1;
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
re[ii[i] - basex][j + jj1 - basex] = A.re[i][j];
im[ii[i] - basex][j + jj1 - basex] = A.im[i][j];
}
}
protected void clean(){
if (dirty){
LU = null;
HQR = null;
CHOL = null;
dirty = false;
}
}
/**
* Returns the submatrix (ii1:ii2, jj[]).
*
* @param ii1
* The lower row index
* @param ii2
* The upper row index
* @param jj
* [] Contains the column indices of the submatrix
*/
public Zmat get(int ii1, int ii2, int jj[]) {
int nrow = ii2 - ii1 + 1;
int ncol = jj.length;
Zmat A = new Zmat(nrow, ncol);
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
A.re[i][j] = re[i + ii1 - basex][jj[j] - basex];
A.im[i][j] = im[i + ii1 - basex][jj[j] - basex];
}
return A;
}
/**
* Overwrites the submatrix (ii1:ii2, jj[]) with a Zmat.
*
* @param ii1
* The lower row index
* @param ii2
* The upper row index
* @param jj
* [] Contains the column indices of the submatrix
* @param A
* The new value of the submatrix
*/
public void put(int ii1, int ii2, int jj[], Zmat A) {
dirty = true;
int nrow = ii2 - ii1 + 1;
int ncol = jj.length;
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
re[i + ii1 - basex][jj[j] - basex] = A.re[i][j];
im[i + ii1 - basex][jj[j] - basex] = A.im[i][j];
}
}
/**
* Returns the submatrix (ii[], jj[]).
*
* @param ii
* [] Contains the row indices of the submatrix
* @param jj
* [] Contains the column indices of the submatrix
*/
public Zmat get(int ii[], int jj[]) {
int nrow = ii.length;
int ncol = jj.length;
Zmat A = new Zmat(nrow, ncol);
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
A.re[i][j] = re[ii[i] - basex][jj[j] - basex];
A.im[i][j] = im[ii[i] - basex][jj[j] - basex];
}
return A;
}
/**
* Overwrites the submatrix (ii[], jj[]) with a Zmat. Returns the submatrix (ii[], jj[])
*
* @param ii
* [] Contains the row indices of the submatrix
* @param jj
* [] Contains the column indices of the submatrix
* @param A
* The value of the new submatrix
*/
public void put(int ii[], int jj[], Zmat A) {
dirty = true;
int nrow = ii.length;
int ncol = jj.length;
for (int i = 0; i < nrow; i++)
for (int j = 0; j < ncol; j++) {
re[ii[i] - basex][jj[j] - basex] = A.re[i][j];
im[ii[i] - basex][jj[j] - basex] = A.im[i][j];
}
}
/**
* Returns an LU decomposition if a valid one exists. Otherwise returns null.
*/
public Zludpp getLU() {
clean();
Zludpp temp = LU;
LU = null;
return temp;
}
/**
* Returns a Householder QR decomposition if a valid one exists. Otherwise returns null.
*/
public Zhqrd getHQR() {
clean();
Zhqrd temp = HQR;
HQR = null;
return temp;
}
/**
* Returns a Cholesky decomposition if a valid one exists. Otherwise returns null.
*/
public Zchol getCHOL() {
clean();
Zchol temp = CHOL;
CHOL = null;
return temp;
}
/**
* Nullifies the history pointers if the matrix is dirty and sets the dirty flag to false.
*/
protected void clean() {
if (dirty) {
LU = null;
HQR = null;
CHOL = null;
dirty = false;
}
}
}

Просмотреть файл

@ -1,138 +1,137 @@
package Jampack;
class ZmatTest{
class ZmatTest {
public static void main(String[] args)
throws JampackException{
Z B[][] = new Z[4][3];
for (int i=0; i<4; i++)
for (int j=0; j<3; j++)
B[i][j] = new Z(i,j);
public static void main(String[] args) throws JampackException {
Z B[][] = new Z[4][3];
for (int i = 0; i < 4; i++)
for (int j = 0; j < 3; j++)
B[i][j] = new Z(i, j);
// Parameters.SetBaseIndex(0);
// Parameters.SetBaseIndex(0);
if (args[0].equals("t1")){
if (args[0].equals("t1")) {
Zmat A = new Zmat(4,5);
System.out.print(A.nrow + " ");
System.out.print(A.ncol);
Print.o(A);
}
Zmat A = new Zmat(4, 5);
System.out.print(A.nrow + " ");
System.out.print(A.ncol);
Print.o(A);
}
else if (args[0].equals("t2")){
Zmat A = new Zmat(B);
System.out.print(A.nrow + " ");
System.out.print(A.ncol);
Print.o(A);
}
else if (args[0].equals("t2")) {
else if (args[0].equals("t3")){
Zmat A = new Zmat(B);
Zmat C = new Zmat(A);
System.out.print(C.nrow + " ");
System.out.print(C.ncol + " ");
System.out.print(C.basex);
Print.o(C);
}
Zmat A = new Zmat(B);
System.out.print(A.nrow + " ");
System.out.print(A.ncol);
Print.o(A);
}
else if (args[0].equals("t4")){
Zmat A = new Zmat(B);
Zmat C = new Zmat(A.re, A.im);
System.out.print(C.nrow + " ");
System.out.print(C.ncol + " ");
System.out.print(C.basex);
Print.o(C);
}
else if (args[0].equals("t3")) {
Zmat A = new Zmat(B);
Zmat C = new Zmat(A);
System.out.print(C.nrow + " ");
System.out.print(C.ncol + " ");
System.out.print(C.basex);
Print.o(C);
}
else if (args[0].equals("t5")){
Zmat A = new Zmat(B);
Zmat C = new Zmat(A.re);
System.out.print(C.nrow + " ");
System.out.print(C.ncol + " ");
System.out.print(C.basex);
Print.o(C);
}
else if (args[0].equals("t4")) {
Zmat A = new Zmat(B);
Zmat C = new Zmat(A.re, A.im);
System.out.print(C.nrow + " ");
System.out.print(C.ncol + " ");
System.out.print(C.basex);
Print.o(C);
}
else if (args[0].equals("t6")){
Zmat A = new Zmat(B);
double[][] C = A.getRe();
double[][] D = A.getIm();
System.out.print(A.nrow + " ");
System.out.print(A.ncol + " ");
Print.o(C);
Print.o(D);
}
else if (args[0].equals("t5")) {
Zmat A = new Zmat(B);
Zmat C = new Zmat(A.re);
System.out.print(C.nrow + " ");
System.out.print(C.ncol + " ");
System.out.print(C.basex);
Print.o(C);
}
else if (args[0].equals("t7")){
Zmat A = new Zmat(B);
Z C[][] = A.getZ();
System.out.print(A.nrow + " ");
System.out.print(A.ncol);
Print.o(C);
}
else if (args[0].equals("t6")) {
Zmat A = new Zmat(B);
double[][] C = A.getRe();
double[][] D = A.getIm();
System.out.print(A.nrow + " ");
System.out.print(A.ncol + " ");
Print.o(C);
Print.o(D);
}
else if (args[0].equals("t8")){
Zmat A = new Zmat(B);
Z ell = new Z();
ell.Eq(A.get(3,2));
System.out.print(ell.re + " ");
System.out.print(ell.im);
System.out.print("\n");
A.put(3, 2, Z.ZERO);
ell.Eq(A.get(3,2));
System.out.print(ell.re + " ");
System.out.print(ell.im);
System.out.print("\n");
Print.o(A);
}
else if (args[0].equals("t7")) {
Zmat A = new Zmat(B);
Z C[][] = A.getZ();
System.out.print(A.nrow + " ");
System.out.print(A.ncol);
Print.o(C);
}
else if (args[0].equals("t9")){
Zmat A = new Zmat(B);
Zmat C = A.get(1,3,1,2);
Print.o(C);
C = new Zmat(3,2);
A.put(1, 3, 1, 2, C);
C = A.get(1, 3, 1, 2);
Print.o(C);
Print.o(A);
}
else if (args[0].equals("t8")) {
Zmat A = new Zmat(B);
Z ell = new Z();
ell.Eq(A.get(3, 2));
System.out.print(ell.re + " ");
System.out.print(ell.im);
System.out.print("\n");
A.put(3, 2, Z.ZERO);
ell.Eq(A.get(3, 2));
System.out.print(ell.re + " ");
System.out.print(ell.im);
System.out.print("\n");
Print.o(A);
}
else if (args[0].equals("t10")){
Zmat A = new Zmat(B);
int[] ir = new int[] {A.rx, A.bx};
Zmat C = A.get(ir,1,2);
Print.o(C);
C = new Zmat(2,2);
A.put(ir, 1, 2, C);
C = A.get(ir, 1, 2);
Print.o(C);
Print.o(A);
}
else if (args[0].equals("t9")) {
Zmat A = new Zmat(B);
Zmat C = A.get(1, 3, 1, 2);
Print.o(C);
C = new Zmat(3, 2);
A.put(1, 3, 1, 2, C);
C = A.get(1, 3, 1, 2);
Print.o(C);
Print.o(A);
}
else if (args[0].equals("t11")){
Zmat A = new Zmat(B);
int[] jr = new int[] {A.cx, A.bx};
Zmat C = A.get(1, 3, jr);
Print.o(C);
C = new Zmat(3,2);
A.put(1, 3, jr, C);
C = A.get(1, 3, jr);
Print.o(C);
Print.o(A);
}
else if (args[0].equals("t10")) {
Zmat A = new Zmat(B);
int[] ir = new int[] { A.rx, A.bx };
Zmat C = A.get(ir, 1, 2);
Print.o(C);
C = new Zmat(2, 2);
A.put(ir, 1, 2, C);
C = A.get(ir, 1, 2);
Print.o(C);
Print.o(A);
}
else if (args[0].equals("t12")){
Zmat A = new Zmat(B);
int[] ir = new int[] {A.rx,A.bx};
int[] jr = new int[] {A.cx,A.bx};
Zmat C = A.get(ir, jr);
Print.o(C);
C = new Zmat(2,2);
A.put(ir, jr, C);
C = A.get(ir, jr);
Print.o(C);
Print.o(A);
}
}
else if (args[0].equals("t11")) {
Zmat A = new Zmat(B);
int[] jr = new int[] { A.cx, A.bx };
Zmat C = A.get(1, 3, jr);
Print.o(C);
C = new Zmat(3, 2);
A.put(1, 3, jr, C);
C = A.get(1, 3, jr);
Print.o(C);
Print.o(A);
}
else if (args[0].equals("t12")) {
Zmat A = new Zmat(B);
int[] ir = new int[] { A.rx, A.bx };
int[] jr = new int[] { A.cx, A.bx };
Zmat C = A.get(ir, jr);
Print.o(C);
C = new Zmat(2, 2);
A.put(ir, jr, C);
C = A.get(ir, jr);
Print.o(C);
Print.o(A);
}
}
}

Просмотреть файл

@ -1,38 +1,33 @@
package Jampack;
/**
* Zpsdmat is a tag class of Zmat, which tells Jampack to expect a (Hermitian) positive semidefinite matrix. The user is entirely
* responsible for the matrix having the proper form, and Jampack programs do no checking. For the constructors, see the
* corresponding constructors for <a href="Zmat.html"> Zmat </a>.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
Zpsdmat is a tag class of Zmat, which tells Jampack to expect a
(Hermitian) positive semidefinite matrix. The user is entirely
responsible for the matrix having the proper form, and Jampack
programs do no checking. For the constructors, see the
corresponding constructors for <a href="Zmat.html"> Zmat </a>.
public class Zpsdmat extends Zmat {
@version Pre-alpha
@author G. W. Stewart
*/
public Zpsdmat(double re[][], double im[][]) throws JampackException {
super(re, im);
}
public class Zpsdmat extends Zmat{
public Zpsdmat(Z A[][]) {
super(A);
}
public Zpsdmat(double A[][]) {
super(A);
}
public Zpsdmat(double re[][], double im[][])
throws JampackException{
super(re, im);
}
public Zpsdmat(Zmat A) {
super(A);
}
public Zpsdmat(Z A[][]){
super(A);
}
public Zpsdmat(double A[][]){
super(A);
}
public Zpsdmat(Zmat A){
super(A);
}
public Zpsdmat(int nrow, int ncol){
super(nrow, ncol);
}
public Zpsdmat(int nrow, int ncol) {
super(nrow, ncol);
}
}

Просмотреть файл

@ -1,65 +1,62 @@
package Jampack;
/**
Implements a QR decomposition. Specifically,
given a matrix A there are is a unitary matrix Q
such that
<pre>
* Q<sup>H</sup>A = R
</pre>
where R is zero below its diagonal. In constructing
this decomposition, Zqrd represents Q as a Zmat.
R is represented by a Zutmat.
<p>
At a later stage an economical version of the decomposition
will be implemented, in which only A.nc columns of Q
are returned.
* Implements a QR decomposition. Specifically, given a matrix A there are is a unitary matrix Q such that
*
* <pre>
* Q<sup>H</sup>A = R
* </pre>
*
* where R is zero below its diagonal. In constructing this decomposition, Zqrd represents Q as a Zmat. R is represented by a
* Zutmat.
* <p>
* At a later stage an economical version of the decomposition will be implemented, in which only A.nc columns of Q are returned.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
@version Pre-alpha
@author G. W. Stewart
*/
public class Zqrd {
public class Zqrd{
/** The unitary matrix Q */
/** The unitary matrix Q */
public Zmat Q;
public Zmat Q;
/** The R factor. If A.nr&gt;A.nc then R is square of order
A.nc. Otherwise R has the same dimensions as A. */
public Zutmat R;
/**
* The R factor. If A.nr&gt;A.nc then R is square of order A.nc. Otherwise R has the same dimensions as A.
*/
/**
Constructs a Zqrd from a Zmat.
public Zutmat R;
@param A A Zmat
@return The Zqrd of A
@exception JampackException
Passed from below.
*/
/**
* Constructs a Zqrd from a Zmat.
*
* @param A
* A Zmat
* @return The Zqrd of A
* @exception JampackException
* Passed from below.
*/
public Zqrd(Zmat A)
throws JampackException{
public Zqrd(Zmat A) throws JampackException {
Zhqrd hqr;
Zhqrd hqr;
A.getProperties();
if(A.HQR == null){
hqr = new Zhqrd(A);
}
else{
hqr = A.HQR;
}
A.getProperties();
if (A.HQR == null) {
hqr = new Zhqrd(A);
} else {
hqr = A.HQR;
}
R = hqr.R;
if (A.nr > A.nc){
R = new Zutmat(Merge.o21(R, new Zmat(A.nr-A.nc, A.nc)));
}
Q = Eye.o(A.nr);
for (int k=hqr.ntran-1; k>=0; k--){
House.ua(hqr.U[k], Q, k+A.bx, A.rx, k+A.bx, A.rx);
}
}
R = hqr.R;
if (A.nr > A.nc) {
R = new Zutmat(Merge.o21(R, new Zmat(A.nr - A.nc, A.nc)));
}
Q = Eye.o(A.nr);
for (int k = hqr.ntran - 1; k >= 0; k--) {
House.ua(hqr.U[k], Q, k + A.bx, A.rx, k + A.bx, A.rx);
}
}
}

Просмотреть файл

@ -1,30 +1,26 @@
package Jampack;
class ZqrdTest{
class ZqrdTest {
public static void main(String[] args) throws JampackException {
public static void main(String[] args)
throws JampackException{
int i, j, m = 10, n = 10;
int i, j, m=10, n=10;
Parameters.setBaseIndex(0);
Parameters.setBaseIndex(0);
Z Aa[][] = new Z[m][n];
for (i = 0; i < m; i++) {
for (j = 0; j < n; j++) {
Aa[i][j] = new Z(i + 1, j + 1);
}
}
Z Aa[][] = new Z[m][n];
for (i=0; i<m; i++){
for (j=0; j<n; j++){
Aa[i][j] = new Z(i+1, j+1);
}
}
Zmat A = new Zmat(Aa);
Zmat A = new Zmat(Aa);
Zqrd X = new Zqrd(A);
Zqrd X = new Zqrd(A);
Print.o(Norm.fro(Minus.o(Eye.o(X.Q.nc), Times.o(H.o(X.Q), X.Q))));
Print.o(Norm.fro(Minus.o(Eye.o(X.Q.nc), Times.o(H.o(X.Q), X.Q))));
Print.o(Norm.fro(Minus.o(A, Times.o(X.Q, X.R))));
}
Print.o(Norm.fro(Minus.o(A, Times.o(X.Q, X.R))));
}
}

Просмотреть файл

@ -1,84 +1,74 @@
package Jampack;
/**
Zspec implements the spectral (eigenvalue-eigenvector) decomposition
of a Hermitian matrix. Specifically, given a Hermitian matrix
A there is a unitary matrix A and a real diagonal matrix D
such that
* Zspec implements the spectral (eigenvalue-eigenvector) decomposition of a Hermitian matrix. Specifically, given a Hermitian
* matrix A there is a unitary matrix A and a real diagonal matrix D such that
*
* <pre>
* D = U<sup>H</sup>AU.
* </pre>
*
* Zspec implements U as a Zmat and D as a Zdiagmat. It returns a JampackException if A is not Hermitian.
*
* <p>
* Comments: The decomposition is computed using <a href="Jampack.Schur.html">. Schur. </a> Eventually, there will be code that
* takes advantage of symmetry.
*
* <br>
* Since the diagonal matrix is real, it will be reimplemented as a Ddiagmat later.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
<pre>
* D = U<sup>H</sup>AU.
</pre>
public class Zspec {
Zspec implements U as a Zmat and D as a Zdiagmat. It returns
a JampackException if A is not Hermitian.
/** The matrix of eigenvectors */
public Zmat U;
<p>
Comments: The decomposition is computed using <a
href="Jampack.Schur.html">. Schur. </a> Eventually, there will be
code that takes advantage of symmetry.
/** The matrix of eigenvalues */
public Zdiagmat D;
<br>
Since the diagonal matrix is real, it will be reimplemented as a
Ddiagmat later.
/**
* Creates a Zspec from Zmat. Throws a JampackException if the matrix is not Hermitian.
*
* @param AA
* A Zmat
* @return The spectral decomposition of A
* @exception JampackException
* Thown if AA is not Hermitian.<br>
* Passed from below.
*/
@version Pre-alpha
@author G. W. Stewart
*/
public Zspec(Zmat AA) throws JampackException {
public class Zspec{
int i, j;
/** The matrix of eigenvectors */
public Zmat U;
if (AA.nrow != AA.ncol) {
throw new RuntimeException("Matrix not square.");
}
/** The matrix of eigenvalues */
public Zdiagmat D;
Zmat A = new Zmat(AA);
/**
Creates a Zspec from Zmat. Throws a JampackException if the
matrix is not Hermitian.
/* Check for A Hermitian. */
@param AA A Zmat
@return The spectral decomposition of A
@exception JampackException
Thown if AA is not Hermitian.<br>
Passed from below.
*/
for (i = 0; i < A.nrow; i++) {
if (A.im[i][i] != 0) {
throw new JampackException("Matrix not Hermitian");
}
for (j = 0; j < i; j++) {
if (A.re[i][j] != A.re[j][i] || A.im[i][j] != -A.im[j][i]) {
throw new JampackException("Matrix not Hermitian");
}
}
}
public Zspec(Zmat AA)
throws JampackException{
Schur S = new Schur(A);
int i, j;
if (AA.nrow != AA.ncol){
throw new RuntimeException
("Matrix not square.");
}
Zmat A = new Zmat(AA);
/* Check for A Hermitian. */
for (i=0; i<A.nrow; i++){
if (A.im[i][i] != 0){
throw new JampackException("Matrix not Hermitian");
}
for (j=0; j<i; j++){
if (A.re[i][j]!=A.re[j][i] || A.im[i][j]!=-A.im[j][i]){
throw new JampackException("Matrix not Hermitian");
}
}
}
Schur S = new Schur(A);
D = new Zdiagmat(S.T);
for (i=0; i<D.n; i++){
D.im[i] = 0.;
}
U = S.U;
}
D = new Zdiagmat(S.T);
for (i = 0; i < D.n; i++) {
D.im[i] = 0.;
}
U = S.U;
}
}

Просмотреть файл

@ -1,32 +1,31 @@
package Jampack;
class ZspecTest{
class ZspecTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
int i, j;
int n = 5;
Z t = new Z();
int i, j;
int n = 5;
Z t = new Z();
Z Ary[][] = new Z[n][n];
Z Ary[][] = new Z[n][n];
for (i=0; i<n; i++){
for (j=i; j<n; j++){
Ary[i][j] = new Z(1./(i+j+1), i-j);
Ary[j][i] = new Z(Ary[i][j].re, -Ary[i][j].im);
}
Ary[i][i].im = 0.;
Ary[i][i].re = 1.;
}
Zmat A = new Zmat(Ary);
Zspec B = new Zspec(A);
Zmat C = Times.o(Times.o(H.o(B.U), A), B.U);
for (i = 0; i < n; i++) {
for (j = i; j < n; j++) {
Ary[i][j] = new Z(1. / (i + j + 1), i - j);
Ary[j][i] = new Z(Ary[i][j].re, -Ary[i][j].im);
}
Ary[i][i].im = 0.;
Ary[i][i].re = 1.;
}
Zmat A = new Zmat(Ary);
Zspec B = new Zspec(A);
Zmat C = Times.o(Times.o(H.o(B.U), A), B.U);
for (i=0; i<A.nr; i++){
C.re[i][i] = C.re[i][i] - B.D.re[i];
C.im[i][i] = C.im[i][i] - B.D.im[i];
}
Print.o(Norm.fro(C));
}
for (i = 0; i < A.nr; i++) {
C.re[i][i] = C.re[i][i] - B.D.re[i];
C.im[i][i] = C.im[i][i] - B.D.im[i];
}
Print.o(Norm.fro(C));
}
}

Просмотреть файл

@ -1,295 +1,286 @@
package Jampack;
/**
Zsvd implements the singular value decomposion of a Zmat.
Specifically if X is an mxn matrix with m&gt;=n there are unitary
matrices U and V such that
<pre>
* U^H*X*V = | S |
* | 0 |
</pre>
where S = diag(s1,...,sm) with
<pre>
* s1 >= s2 >= ... >= sn >=0.
</pre>
If m&lt;n the decomposition has the form
<pre>
* U^H*X*V = | S 0 |,
</pre>
* Zsvd implements the singular value decomposion of a Zmat. Specifically if X is an mxn matrix with m&gt;=n there are unitary
* matrices U and V such that
*
* <pre>
* U^H*X*V = | S |
* | 0 |
* </pre>
*
* where S = diag(s1,...,sm) with
*
* <pre>
* s1 >= s2 >= ... >= sn >=0.
* </pre>
*
* If m&lt;n the decomposition has the form
*
* <pre>
* U^H*X*V = | S 0 |,
* </pre>
*
* where S is diagonal of order m. The diagonals of S are the singular values of A. The columns of U are the left singular vectors
* of A and the columns of V are the right singular vectors.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
where S is diagonal of order m. The diagonals of S are the
singular values of A. The columns of U are the left singular
vectors of A and the columns of V are the right singular vectors.
public class Zsvd {
@version Pre-alpha
@author G. W. Stewart
*/
/** Limits the number of iterations in the SVD algorithm */
public static int MAXITER = 30;
public class Zsvd{
/** The matrix of left singular vectors */
public Zmat U;
/** Limits the number of iterations in the SVD algorithm */
public static int MAXITER = 30;
/** The matrix of right singular vectore */
public Zmat V;
/** The matrix of left singular vectors */
public Zmat U;
/** The diagonal matrix of singular values */
/** The matrix of right singular vectore */
public Zmat V;
public Zdiagmat S;
/** The diagonal matrix of singular values */
/**
* Computes the SVD of a Zmat XX. Throws a JampackException if the maximum number of iterations is exceeded.
*
* @param XX
* A Zmat
* @return The Zsvd of XX
* @exception JampackException
* Thrown if maximimum number of iterations is exceeded.<br>
* Passed from below.
*/
public Zdiagmat S;
public Zsvd(Zmat XX) throws JampackException {
/**
Computes the SVD of a Zmat XX. Throws a JampackException
if the maximum number of iterations is exceeded.
int i, il, iu, iter, j, k, kk, m, mc;
@param XX A Zmat
@return The Zsvd of XX
@exception JampackException
Thrown if maximimum number of iterations is
exceeded.<br>
Passed from below.
*/
double as, at, au, axkk, axkk1, dmax, dmin, ds, ea, es, shift, ss, t, tre;
public Zsvd(Zmat XX)
throws JampackException{
Z xkk, xkk1, xk1k1, ukj, vik1;
int i, il, iu, iter, j, k, kk, m, mc;
Rot P = new Rot();
double as, at, au, axkk, axkk1, dmax, dmin, ds, ea,
es, shift, ss, t, tre;
/* Initialization */
Z xkk, xkk1, xk1k1, ukj, vik1;
Z scale = new Z();
Z zr = new Z();
Rot P = new Rot();
Zmat X = new Zmat(XX);
/* Initialization */
Z1 h;
Z1 temp = new Z1(Math.max(X.nr, X.nc));
Z scale = new Z();
Z zr = new Z();
mc = Math.min(X.nr, X.nc);
double d[] = new double[mc];
double e[] = new double[mc];
Zmat X = new Zmat(XX);
Z1 h;
Z1 temp = new Z1(Math.max(X.nr,X.nc));
S = new Zdiagmat(mc);
U = Eye.o(X.nr);
V = Eye.o(X.nc);
mc = Math.min(X.nr, X.nc);
double d[] = new double[mc];
double e[] = new double[mc];
m = Math.min(X.rx, X.cx);
S = new Zdiagmat(mc);
U = Eye.o(X.nr);
V = Eye.o(X.nc);
/*
* Reduction to Bidiagonal form.
*/
m = Math.min(X.rx, X.cx);
for (k = X.bx; k <= m; k++) {
/*
Reduction to Bidiagonal form.
*/
h = House.genc(X, k, X.rx, k);
House.ua(h, X, k, X.rx, k + 1, X.cx, temp);
House.au(U, h, U.bx, U.rx, k, U.cx, temp);
for (k=X.bx; k<=m; k++){
if (k != X.cx) {
h = House.genr(X, k, k + 1, X.cx);
House.au(X, h, k + 1, X.rx, k + 1, X.cx, temp);
House.au(V, h, V.bx, V.rx, k + 1, V.cx, temp);
}
}
h = House.genc(X, k, X.rx, k);
House.ua(h, X, k, X.rx, k+1, X.cx, temp);
House.au(U, h, U.bx, U.rx, k, U.cx, temp);
if (k != X.cx){
h = House.genr(X, k, k+1, X.cx);
House.au(X, h, k+1, X.rx, k+1, X.cx, temp);
House.au(V, h, V.bx, V.rx, k+1, V.cx, temp);
}
}
/*
* Scale the bidiagonal matrix so that its elements are real.
*/
/*
Scale the bidiagonal matrix so that its elements are
real.
*/
for (k = X.bx; k <= m; k++) {
kk = k - X.bx;
xkk = X.get(k, k);
axkk = Z.abs(xkk);
X.put(k, k, new Z(axkk));
d[kk] = axkk;
scale.Div(scale.Conj(xkk), axkk);
if (k < X.cx) {
xkk1 = X.get(k, k + 1);
X.put(k, k + 1, xkk1.Times(scale, xkk1));
}
scale.Conj(scale);
for (i = U.bx; i <= U.rx; i++) {
U.put(i, k, zr.Times(U.get(i, k), scale));
}
for (k=X.bx; k<=m; k++){
kk = k-X.bx;
xkk = X.get(k,k);
axkk = Z.abs(xkk);
X.put(k, k, new Z(axkk));
d[kk] = axkk;
scale.Div(scale.Conj(xkk), axkk);
if (k<X.cx){
xkk1 = X.get(k,k+1);
X.put(k, k+1, xkk1.Times(scale, xkk1));
}
scale.Conj(scale);
for (i=U.bx; i<=U.rx; i++){
U.put(i, k, zr.Times(U.get(i, k), scale));
}
if (k < X.cx) {
if (k<X.cx){
xkk1 = X.get(k, k + 1);
axkk1 = Z.abs(xkk1);
X.put(k, k + 1, new Z(axkk1));
e[kk] = axkk1;
scale.Div(scale.Conj(xkk1), axkk1);
if (k < X.rx) {
xk1k1 = X.get(k + 1, k + 1);
X.put(k + 1, k + 1, xk1k1.Times(scale, xk1k1));
}
for (i = V.bx; i <= V.rx; i++) {
V.put(i, k + 1, zr.Times(V.get(i, k + 1), scale));
}
}
}
xkk1 = X.get(k,k+1);
axkk1 = Z.abs(xkk1);
X.put(k, k+1, new Z(axkk1));
e[kk] = axkk1;
scale.Div(scale.Conj(xkk1), axkk1);
if (k<X.rx){
xk1k1 = X.get(k+1,k+1);
X.put(k+1, k+1, xk1k1.Times(scale, xk1k1));
}
for (i=V.bx; i<=V.rx; i++){
V.put(i, k+1, zr.Times(V.get(i, k+1), scale));
}
}
}
m = m - X.bx; // Zero based loops from here on.
/*
* If X has more columns than rows, rotate out the extra superdiagonal element.
*/
if (X.nr < X.nc) {
t = e[m];
for (k = m; k >= 0; k--) {
Rot.genr(d[k], t, P);
d[k] = P.zr;
if (k != 0) {
t = P.sr * e[k - 1];
e[k - 1] = P.c * e[k - 1];
}
Rot.ap(V, P, V.bx, V.rx, k + V.bx, X.rx + 1);
Rot.ap(X, P, X.bx, X.rx, k + X.bx, X.rx + 1);
}
}
/*
* Caculate the singular values of the bidiagonal matrix.
*/
iu = m;
iter = 0;
while (true) {
/*
* These two loops determine the rows (il to iu) to iterate on.
*/
while (iu > 0) {
if (Math.abs(e[iu - 1]) > 1.0e-16 * (Math.abs(d[iu]) + Math.abs(d[iu - 1])))
break;
e[iu - 1] = 0.;
iter = 0;
iu = iu - 1;
}
iter = iter + 1;
if (iter > MAXITER) {
throw new JampackException("Maximum number of iterations exceeded.");
}
if (iu == 0)
break;
m = m - X.bx; // Zero based loops from here on.
/*
If X has more columns than rows, rotate out the extra
superdiagonal element.
*/
if (X.nr < X.nc){
t = e[m];
for (k=m; k>=0; k--){
Rot.genr(d[k], t, P);
d[k] = P.zr;
if (k != 0){
t = P.sr*e[k-1];
e[k-1] = P.c*e[k-1];
}
Rot.ap(V, P, V.bx, V.rx, k+V.bx, X.rx+1);
Rot.ap(X, P, X.bx, X.rx, k+X.bx, X.rx+1);
}
}
/*
Caculate the singular values of the bidiagonal matrix.
*/
iu = m;
iter = 0;
while (true){
/*
These two loops determine the rows (il to iu) to
iterate on.
*/
while (iu > 0){
if (Math.abs(e[iu-1]) >
1.0e-16*(Math.abs(d[iu]) + Math.abs(d[iu-1])))
break;
e[iu-1] = 0.;
iter = 0;
iu = iu - 1;
}
iter = iter+1;
if (iter > MAXITER){
throw new JampackException
("Maximum number of iterations exceeded.");
}
if (iu == 0) break;
il = iu - 1;
while (il > 0) {
if (Math.abs(e[il - 1]) <= 1.0e-16 * (Math.abs(d[il]) + Math.abs(d[il - 1])))
break;
il = il - 1;
}
if (il != 0) {
e[il - 1] = 0.;
}
/*
* Compute the shift (formulas adapted from LAPACK).
*/
dmax = Math.max(Math.abs(d[iu]), Math.abs(d[iu - 1]));
dmin = Math.min(Math.abs(d[iu]), Math.abs(d[iu - 1]));
ea = Math.abs(e[iu - 1]);
if (dmin == 0.) {
shift = 0.;
} else if (ea < dmax) {
as = 1. + dmin / dmax;
at = (dmax - dmin) / dmax;
au = ea / dmax;
au = au * au;
shift = dmin * (2. / (Math.sqrt(as * as + au) + Math.sqrt(at * at + au)));
} else {
au = dmax / ea;
if (au == 0.) {
shift = (dmin * dmax) / ea;
} else {
as = 1. + dmin / dmax;
at = (dmax - dmin) / dmax;
t = 1. / (Math.sqrt(1. + (as * au) * (as * au)) + Math.sqrt(1. + (at * au) * (at * au)));
shift = (t * dmin) * au;
}
}
/*
* Perform the implicitly shifted QR step.
*/
t = Math.max(Math.max(Math.abs(d[il]), Math.abs(e[il])), shift);
ds = d[il] / t;
es = e[il] / t;
ss = shift / t;
Rot.genr((ds - ss) * (ds + ss), ds * es, P);
for (i = il; i < iu; i++) {
t = P.c * d[i] - P.sr * e[i];
e[i] = P.sr * d[i] + P.c * e[i];
d[i] = t;
t = -P.sr * d[i + 1];
d[i + 1] = P.c * d[i + 1];
Rot.ap(V, P, V.bx, V.rx, V.bx + i, V.bx + i + 1);
Rot.genc(d[i], t, P);
d[i] = P.zr;
t = P.c * e[i] + P.sr * d[i + 1];
d[i + 1] = P.c * d[i + 1] - P.sr * e[i];
e[i] = t;
Rot.aph(U, P, U.bx, U.rx, U.bx + i, U.bx + i + 1);
if (i != iu - 1) {
t = P.sr * e[i + 1];
e[i + 1] = P.c * e[i + 1];
Rot.genr(e[i], t, P);
e[i] = P.zr;
}
}
}
il = iu-1;
while(il > 0){
if(Math.abs(e[il-1]) <=
1.0e-16*(Math.abs(d[il]) + Math.abs(d[il-1])))
break;
il = il-1;
}
if (il != 0){
e[il-1] = 0.;
}
/*
Compute the shift (formulas adapted from LAPACK).
*/
dmax = Math.max(Math.abs(d[iu]), Math.abs(d[iu-1]));
dmin = Math.min(Math.abs(d[iu]), Math.abs(d[iu-1]));
ea = Math.abs(e[iu-1]);
if (dmin == 0.){
shift = 0.;
}
else if(ea < dmax){
as = 1. + dmin/dmax;
at = (dmax-dmin)/dmax;
au = ea/dmax;
au = au*au;
shift =dmin*(2./(Math.sqrt(as*as+au) + Math.sqrt(at*at+au)));
}
else{
au = dmax/ea;
if (au == 0.){
shift = (dmin*dmax)/ea;
}
else{
as = 1. + dmin/dmax;
at = (dmax-dmin)/dmax;
t = 1./(Math.sqrt(1.+(as*au)*(as*au))+
Math.sqrt(1.+(at*au)*(at*au)));
shift = (t*dmin)*au;
}
}
/*
Perform the implicitly shifted QR step.
*/
t = Math.max(Math.max(Math.abs(d[il]),Math.abs(e[il])), shift);
ds = d[il]/t; es=e[il]/t; ss = shift/t;
Rot.genr((ds-ss)*(ds+ss), ds*es, P);
for (i=il; i<iu; i++){
t = P.c*d[i] - P.sr*e[i];
e[i] = P.sr*d[i] + P.c*e[i];
d[i] = t;
t = -P.sr*d[i+1];
d[i+1] = P.c*d[i+1];
Rot.ap(V, P, V.bx, V.rx, V.bx+i, V.bx+i+1);
Rot.genc(d[i], t, P);
d[i] = P.zr;
t = P.c*e[i] + P.sr*d[i+1];
d[i+1] = P.c*d[i+1] - P.sr*e[i];
e[i] = t;
Rot.aph(U, P, U.bx, U.rx, U.bx+i, U.bx+i+1);
if (i != iu-1){
t = P.sr*e[i+1];
e[i+1] = P.c*e[i+1];
Rot.genr(e[i], t, P);
e[i] = P.zr;
}
}
}
/*
Sort the singular values, setting negative values of d
to positive.
*/
for (k=m; k>=0; k--){
if (d[k] < 0){
d[k] = -d[k];
for (i=0; i<X.nc; i++){
V.re[i][k] = -V.re[i][k];
V.im[i][k] = -V.im[i][k];
}
}
for (j=k; j<m; j++){
if(d[j] < d[j+1]){
t = d[j];
d[j] = d[j+1];
d[j+1] = t;
for (i=0; i<X.nr; i++){
t = U.re[i][j];
U.re[i][j] = U.re[i][j+1];
U.re[i][j+1] = t;
t = U.im[i][j];
U.im[i][j] = U.im[i][j+1];
U.im[i][j+1] = t;
}
for (i=0; i<X.nc; i++){
t = V.re[i][j];
V.re[i][j] = V.re[i][j+1];
V.re[i][j+1] = t;
t = V.im[i][j];
V.im[i][j] = V.im[i][j+1];
V.im[i][j+1] = t;
}
}
}
}
/*
Return the decompostion;
*/
S.re = d;
return;
}
/*
* Sort the singular values, setting negative values of d to positive.
*/
for (k = m; k >= 0; k--) {
if (d[k] < 0) {
d[k] = -d[k];
for (i = 0; i < X.nc; i++) {
V.re[i][k] = -V.re[i][k];
V.im[i][k] = -V.im[i][k];
}
}
for (j = k; j < m; j++) {
if (d[j] < d[j + 1]) {
t = d[j];
d[j] = d[j + 1];
d[j + 1] = t;
for (i = 0; i < X.nr; i++) {
t = U.re[i][j];
U.re[i][j] = U.re[i][j + 1];
U.re[i][j + 1] = t;
t = U.im[i][j];
U.im[i][j] = U.im[i][j + 1];
U.im[i][j + 1] = t;
}
for (i = 0; i < X.nc; i++) {
t = V.re[i][j];
V.re[i][j] = V.re[i][j + 1];
V.re[i][j + 1] = t;
t = V.im[i][j];
V.im[i][j] = V.im[i][j + 1];
V.im[i][j + 1] = t;
}
}
}
}
/*
* Return the decompostion;
*/
S.re = d;
return;
}
}

Просмотреть файл

@ -1,37 +1,36 @@
package Jampack;
class ZsvdTest{
class ZsvdTest {
public static void main(String[] args)
throws JampackException{
public static void main(String[] args) throws JampackException {
Parameters.setBaseIndex(0);
Parameters.setBaseIndex(0);
// Zsvd.MAXITER = 2;
// Zsvd.MAXITER = 2;
int m = 8;
int n = 10;
int m = 8;
int n = 10;
Z Ary[][] = new Z[m][n];
for (int i=0; i<m; i++){
for (int j=0; j<n; j++){
double di = i+1;
double dj = j+1;
Ary[i][j] = new Z(di/dj, i-j);
}
if (i < n){
Ary[i][i].re = Ary[i][i].re + 2*i + 1;
}
}
Z Ary[][] = new Z[m][n];
for (int i = 0; i < m; i++) {
for (int j = 0; j < n; j++) {
double di = i + 1;
double dj = j + 1;
Ary[i][j] = new Z(di / dj, i - j);
}
if (i < n) {
Ary[i][i].re = Ary[i][i].re + 2 * i + 1;
}
}
Zmat X = new Zmat(Ary);
Zsvd SVD = new Zsvd(X);
Zmat X = new Zmat(Ary);
Zsvd SVD = new Zsvd(X);
Zmat XX = Times.o(Times.o(H.o(SVD.U),X),SVD.V);
m = Math.min(XX.rx, XX.cx);
Zmat Xl = XX.get(XX.bx, m, XX.bx, m);
Xl = Minus.o(Xl, SVD.S);
XX.put(XX.bx, m, XX.bx, m, Xl);
Print.o(Norm.fro(XX));
}
Zmat XX = Times.o(Times.o(H.o(SVD.U), X), SVD.V);
m = Math.min(XX.rx, XX.cx);
Zmat Xl = XX.get(XX.bx, m, XX.bx, m);
Xl = Minus.o(Xl, SVD.S);
XX.put(XX.bx, m, XX.bx, m, Xl);
Print.o(Norm.fro(XX));
}
}

Просмотреть файл

@ -1,57 +1,54 @@
package Jampack;
class Ztest {
class Ztest{
public static void main(String[] args) throws JampackException {
Z z1 = new Z();
System.out.println(z1.re); // 0
System.out.println(z1.im); // 0
z1 = new Z(1, -1);
System.out.println(z1.re); // 1
System.out.println(z1.im); // -1
z1 = new Z(z1);
System.out.println(z1.re); // 1
System.out.println(z1.im); // -1
System.out.println(Z.abs(z1)); // sqrt(2)
Z z2 = new Z(2, -5);
z2.Eq(z1.Plus(z1, z2));
System.out.println(z1.re); // 3
System.out.println(z1.im); // -6
System.out.println(z2.re); // 3
System.out.println(z2.im); // -6
z2.Minus(z1, z2.Eq(1, -1));
System.out.println(z2.re); // 2
System.out.println(z2.im); // -5
z2.Minus(z2);
System.out.println(z2.re); // -2
System.out.println(z2.im); // 5
Z z3 = new Z();
z3.Times(z1, z2.Conj(z1));
System.out.println(z3.re); // 45
System.out.println(z3.im); // 0
z1.Eq(2, 1);
z2.Eq(1, -1);
z1.Times(z1, z2);
z3.Div(z1, z2);
System.out.println(z3.re); // 2
System.out.println(z3.im); // 1
z1.Eq(1, -2);
z1.Times(z1, z1);
z1.Sqrt(z1);
System.out.println(z1.re); // 1
System.out.println(z1.im); // -2
z1.Eq(-2.1, 1.2e-9);
z1.Times(z1, z1);
z1.Sqrt(z1);
Print.o(z1.re, 22, 15);
Print.o(z1.im, 22, 15);
z1.Eq(-1, 0);
z1.Sqrt(z1);
System.out.println(z1.re); // 0
System.out.println(z1.im); // 1
public static void main(String[] args)
throws JampackException
{
Z z1 = new Z();
System.out.println(z1.re); // 0
System.out.println(z1.im); // 0
z1 = new Z(1,-1);
System.out.println(z1.re); // 1
System.out.println(z1.im); // -1
z1 = new Z(z1);
System.out.println(z1.re); // 1
System.out.println(z1.im); // -1
System.out.println(Z.abs(z1)); // sqrt(2)
Z z2 = new Z(2,-5);
z2.Eq(z1.Plus(z1,z2));
System.out.println(z1.re); // 3
System.out.println(z1.im); // -6
System.out.println(z2.re); // 3
System.out.println(z2.im); // -6
z2.Minus(z1,z2.Eq(1,-1));
System.out.println(z2.re); // 2
System.out.println(z2.im); // -5
z2.Minus(z2);
System.out.println(z2.re); // -2
System.out.println(z2.im); // 5
Z z3 = new Z();
z3.Times(z1,z2.Conj(z1));
System.out.println(z3.re); // 45
System.out.println(z3.im); // 0
z1.Eq(2,1);
z2.Eq(1,-1);
z1.Times(z1,z2);
z3.Div(z1,z2);
System.out.println(z3.re); // 2
System.out.println(z3.im); // 1
z1.Eq(1, -2);
z1.Times(z1,z1);
z1.Sqrt(z1);
System.out.println(z1.re); // 1
System.out.println(z1.im); // -2
z1.Eq(-2.1, 1.2e-9);
z1.Times(z1,z1);
z1.Sqrt(z1);
Print.o(z1.re, 22, 15);
Print.o(z1.im, 22,15);
z1.Eq(-1,0);
z1.Sqrt(z1);
System.out.println(z1.re); // 0
System.out.println(z1.im); // 1
}
}
}

Просмотреть файл

@ -1,39 +1,33 @@
package Jampack;
/**
* Zutmat is a tag class of Zmat, which tells Jampack to expect an upper triangular matrix. The user is entirely responsible for
* the matrix having the proper form, and Jampack programs do no checking. For the constructors, see the corresponding
* constructors for <a href="Zmat.html"> Zmat </a>.
*
* @version Pre-alpha
* @author G. W. Stewart
*/
Zutmat is a tag class of Zmat, which tells Jampack to expect an
upper triangular matrix. The user is entirely responsible for the
matrix having the proper form, and Jampack programs do no checking.
For the constructors, see the corresponding constructors for <a
href="Zmat.html"> Zmat </a>.
public class Zutmat extends Zmat {
@version Pre-alpha
@author G. W. Stewart
public Zutmat(double re[][], double im[][]) throws JampackException {
super(re, im);
}
*/
public Zutmat(Z A[][]) {
super(A);
}
public class Zutmat extends Zmat{
public Zutmat(double A[][]) {
super(A);
}
public Zutmat(Zmat A) {
super(A);
}
public Zutmat(double re[][], double im[][])
throws JampackException{
super(re, im);
}
public Zutmat(Z A[][]){
super(A);
}
public Zutmat(double A[][]){
super(A);
}
public Zutmat(Zmat A){
super(A);
}
public Zutmat(int nrow, int ncol){
super(nrow, ncol);
}
public Zutmat(int nrow, int ncol) {
super(nrow, ncol);
}
}

Просмотреть файл

@ -24,85 +24,70 @@ import java.util.Arrays;
/**
*
* Implements a cluster center that has a mean vector and a covariance matrix (and its inverse)
*
*
* @author Oytun T&uumlrk
*/
public class Cluster {
public double[] meanVector;
public double[][] covMatrix;
public double[][] invCovMatrix; //This is not supported yet (requires matrix inversion)
public boolean isDiagonalCovariance;
public Cluster()
{
this(0, true);
}
public Cluster(int dimension, boolean isDiagonalCovariance)
{
allocate(dimension, isDiagonalCovariance);
}
public void allocate(int dimension, boolean isDiagonalCovarianceIn)
{
if (dimension>0)
{
isDiagonalCovariance = isDiagonalCovarianceIn;
meanVector = new double[dimension];
Arrays.fill(meanVector, 0.0);
if (isDiagonalCovariance)
{
covMatrix = new double[1][];
covMatrix[0] = new double[dimension];
Arrays.fill(covMatrix[0], 0.0);
invCovMatrix = new double[1][];
invCovMatrix[0] = new double[dimension];
Arrays.fill(invCovMatrix[0], 0.0);
}
else
{
covMatrix = new double[dimension][];
for (int i=0; i<dimension; i++)
{
covMatrix[i] = new double[dimension];
Arrays.fill(covMatrix[i], 0.0);
}
invCovMatrix = new double[dimension][];
for (int i=0; i<dimension; i++)
{
invCovMatrix[i] = new double[dimension];
Arrays.fill(invCovMatrix[i], 0.0);
}
}
}
else
{
meanVector = null;
covMatrix = null;
invCovMatrix = null;
}
}
public double[] getCovarianceDiagonal()
{
double[] diagonal = null;
if (covMatrix!=null && covMatrix[0]!=null && covMatrix[0].length>0)
{
diagonal = new double[covMatrix[0].length];
if (isDiagonalCovariance)
System.arraycopy(covMatrix[0], 0, diagonal, 0, covMatrix[0].length);
else
{
for (int i=0; i<covMatrix.length; i++)
diagonal[i] = covMatrix[i][i];
}
}
return diagonal;
}
}
public double[] meanVector;
public double[][] covMatrix;
public double[][] invCovMatrix; // This is not supported yet (requires matrix inversion)
public boolean isDiagonalCovariance;
public Cluster() {
this(0, true);
}
public Cluster(int dimension, boolean isDiagonalCovariance) {
allocate(dimension, isDiagonalCovariance);
}
public void allocate(int dimension, boolean isDiagonalCovarianceIn) {
if (dimension > 0) {
isDiagonalCovariance = isDiagonalCovarianceIn;
meanVector = new double[dimension];
Arrays.fill(meanVector, 0.0);
if (isDiagonalCovariance) {
covMatrix = new double[1][];
covMatrix[0] = new double[dimension];
Arrays.fill(covMatrix[0], 0.0);
invCovMatrix = new double[1][];
invCovMatrix[0] = new double[dimension];
Arrays.fill(invCovMatrix[0], 0.0);
} else {
covMatrix = new double[dimension][];
for (int i = 0; i < dimension; i++) {
covMatrix[i] = new double[dimension];
Arrays.fill(covMatrix[i], 0.0);
}
invCovMatrix = new double[dimension][];
for (int i = 0; i < dimension; i++) {
invCovMatrix[i] = new double[dimension];
Arrays.fill(invCovMatrix[i], 0.0);
}
}
} else {
meanVector = null;
covMatrix = null;
invCovMatrix = null;
}
}
public double[] getCovarianceDiagonal() {
double[] diagonal = null;
if (covMatrix != null && covMatrix[0] != null && covMatrix[0].length > 0) {
diagonal = new double[covMatrix[0].length];
if (isDiagonalCovariance)
System.arraycopy(covMatrix[0], 0, diagonal, 0, covMatrix[0].length);
else {
for (int i = 0; i < covMatrix.length; i++)
diagonal[i] = covMatrix[i][i];
}
}
return diagonal;
}
}

Просмотреть файл

@ -23,7 +23,6 @@ import java.util.Arrays;
import marytts.util.math.MathUtils;
/**
*
* Generates clustered data for testing machine learning algorithms
@ -31,97 +30,84 @@ import marytts.util.math.MathUtils;
* @author Oytun T&uumlrk
*/
public class ClusteredDataGenerator {
public static final int DEFAULT_NUM_SAMPLES_IN_CLUSTERS = 50;
public static final int DEFAULT_NUM_CLUSTERS = 10;
public static final double DEFAULT_INIT_MEAN = 10.0;
public static final double DEFAULT_VARIANCE = 1.0;
public double[] data;
public ClusteredDataGenerator()
{
double[] clusterMeans = new double[DEFAULT_NUM_CLUSTERS];
for (int i=0; i<DEFAULT_NUM_CLUSTERS; i++)
clusterMeans[i] = (i+1)*10.0;
init(clusterMeans);
}
public ClusteredDataGenerator(int numClusters, int numSamplesInClusters)
{
this(numClusters, numSamplesInClusters, DEFAULT_INIT_MEAN);
}
public ClusteredDataGenerator(int numClusters, int numSamplesInClusters, double initMean)
{
this(numClusters, numSamplesInClusters, initMean, DEFAULT_VARIANCE);
}
public ClusteredDataGenerator(int numClusters, int numSamplesInClusters, double initMean, double variance)
{
double[] clusterMeans = new double[numClusters];
for (int i=0; i<numClusters; i++)
clusterMeans[i] = (i+1)*initMean;
init(clusterMeans, variance, numSamplesInClusters);
}
public ClusteredDataGenerator(double[] clusterMeans)
{
this(clusterMeans, DEFAULT_VARIANCE);
}
public ClusteredDataGenerator(double[] clusterMeans, double variance)
{
init(clusterMeans, variance);
}
public ClusteredDataGenerator(double[] clusterMeans, double[] variances)
{
init(clusterMeans, variances, DEFAULT_NUM_SAMPLES_IN_CLUSTERS);
}
public ClusteredDataGenerator(double[] clusterMeans, double[] variances, int numSamplesPerCluster)
{
init(clusterMeans, variances, numSamplesPerCluster);
}
public void init(double[] clusterMeans)
{
init(clusterMeans, DEFAULT_VARIANCE);
}
public void init(double[] clusterMeans, double variance)
{
init(clusterMeans, variance, DEFAULT_NUM_SAMPLES_IN_CLUSTERS);
}
public void init(double[] clusterMeans, double variance, int numClusters)
{
double[] variances = new double[clusterMeans.length];
Arrays.fill(variances, variance);
init(clusterMeans, variances, numClusters);
}
public void init(double[] clusterMeans, double[] variances, int numSamplesPerCluster)
{
data = new double[numSamplesPerCluster*clusterMeans.length];
for (int i=0; i<clusterMeans.length; i++)
{
double[] tmp = MathUtils.random(numSamplesPerCluster);
MathUtils.adjustMean(tmp, clusterMeans[i]);
MathUtils.adjustVariance(tmp, variances[i]);
System.arraycopy(tmp, 0, data, i*numSamplesPerCluster, numSamplesPerCluster);
System.out.println("Target mean=" + String.valueOf(clusterMeans[i]) + " Target variance=" + String.valueOf(variances[i]) + " - Mean=" + String.valueOf(MathUtils.mean(tmp)) + " Variance=" + String.valueOf(MathUtils.variance(tmp)));
}
double m = MathUtils.mean(data);
double v = MathUtils.variance(data, m);
System.out.println(String.valueOf(m) + " " + String.valueOf(v));
}
public static void main(String[] args)
{
ClusteredDataGenerator c = new ClusteredDataGenerator();
}
}
public static final int DEFAULT_NUM_SAMPLES_IN_CLUSTERS = 50;
public static final int DEFAULT_NUM_CLUSTERS = 10;
public static final double DEFAULT_INIT_MEAN = 10.0;
public static final double DEFAULT_VARIANCE = 1.0;
public double[] data;
public ClusteredDataGenerator() {
double[] clusterMeans = new double[DEFAULT_NUM_CLUSTERS];
for (int i = 0; i < DEFAULT_NUM_CLUSTERS; i++)
clusterMeans[i] = (i + 1) * 10.0;
init(clusterMeans);
}
public ClusteredDataGenerator(int numClusters, int numSamplesInClusters) {
this(numClusters, numSamplesInClusters, DEFAULT_INIT_MEAN);
}
public ClusteredDataGenerator(int numClusters, int numSamplesInClusters, double initMean) {
this(numClusters, numSamplesInClusters, initMean, DEFAULT_VARIANCE);
}
public ClusteredDataGenerator(int numClusters, int numSamplesInClusters, double initMean, double variance) {
double[] clusterMeans = new double[numClusters];
for (int i = 0; i < numClusters; i++)
clusterMeans[i] = (i + 1) * initMean;
init(clusterMeans, variance, numSamplesInClusters);
}
public ClusteredDataGenerator(double[] clusterMeans) {
this(clusterMeans, DEFAULT_VARIANCE);
}
public ClusteredDataGenerator(double[] clusterMeans, double variance) {
init(clusterMeans, variance);
}
public ClusteredDataGenerator(double[] clusterMeans, double[] variances) {
init(clusterMeans, variances, DEFAULT_NUM_SAMPLES_IN_CLUSTERS);
}
public ClusteredDataGenerator(double[] clusterMeans, double[] variances, int numSamplesPerCluster) {
init(clusterMeans, variances, numSamplesPerCluster);
}
public void init(double[] clusterMeans) {
init(clusterMeans, DEFAULT_VARIANCE);
}
public void init(double[] clusterMeans, double variance) {
init(clusterMeans, variance, DEFAULT_NUM_SAMPLES_IN_CLUSTERS);
}
public void init(double[] clusterMeans, double variance, int numClusters) {
double[] variances = new double[clusterMeans.length];
Arrays.fill(variances, variance);
init(clusterMeans, variances, numClusters);
}
public void init(double[] clusterMeans, double[] variances, int numSamplesPerCluster) {
data = new double[numSamplesPerCluster * clusterMeans.length];
for (int i = 0; i < clusterMeans.length; i++) {
double[] tmp = MathUtils.random(numSamplesPerCluster);
MathUtils.adjustMean(tmp, clusterMeans[i]);
MathUtils.adjustVariance(tmp, variances[i]);
System.arraycopy(tmp, 0, data, i * numSamplesPerCluster, numSamplesPerCluster);
System.out.println("Target mean=" + String.valueOf(clusterMeans[i]) + " Target variance="
+ String.valueOf(variances[i]) + " - Mean=" + String.valueOf(MathUtils.mean(tmp)) + " Variance="
+ String.valueOf(MathUtils.variance(tmp)));
}
double m = MathUtils.mean(data);
double v = MathUtils.variance(data, m);
System.out.println(String.valueOf(m) + " " + String.valueOf(v));
}
public static void main(String[] args) {
ClusteredDataGenerator c = new ClusteredDataGenerator();
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -20,10 +20,9 @@
package marytts.machinelearning;
public interface Discretizer {
public int discretize(int aValue);
public int[] getPossibleValues();
public int discretize(int aValue);
public int[] getPossibleValues();
}

Просмотреть файл

@ -24,7 +24,6 @@ import java.io.IOException;
import marytts.util.io.MaryRandomAccessFile;
import marytts.util.math.MathUtils;
/**
*
* Wrapper for a Gaussian Mixture Model
@ -32,197 +31,170 @@ import marytts.util.math.MathUtils;
* @author Oytun T&uumlrk
*/
public class GMM {
public double[] weights;
public GaussianComponent[] components;
public String info;
public double[] weights;
public GaussianComponent[] components;
public String info;
public int featureDimension;
public int totalComponents;
public boolean isDiagonalCovariance;
public GMM()
{
this(0, 0);
}
public GMM(int featureDimensionIn, int totalMixturesIn)
{
init(featureDimensionIn, totalMixturesIn, true);
}
public GMM(int featureDimensionIn, int totalComponentsIn, boolean isDiagonalCovarIn)
{
init(featureDimensionIn, totalComponentsIn, isDiagonalCovarIn);
}
public int featureDimension;
public int totalComponents;
public boolean isDiagonalCovariance;
public GMM(KMeansClusteringTrainer kmeansClusterer)
{
init(kmeansClusterer.getFeatureDimension(), kmeansClusterer.getTotalClusters(), kmeansClusterer.isDiagonalCovariance());
int i;
for (i=0; i<kmeansClusterer.getTotalClusters(); i++)
components[i] = new GaussianComponent(kmeansClusterer.clusters[i]);
}
public GMM(GMM existing)
{
featureDimension = existing.featureDimension;
totalComponents = existing.totalComponents;
isDiagonalCovariance = existing.isDiagonalCovariance;
if (existing.totalComponents>0 && existing.components!=null)
{
components = new GaussianComponent[totalComponents];
for (int i=0; i<totalComponents; i++)
components[i] = new GaussianComponent(existing.components[i]);
}
else
{
components = null;
totalComponents = 0;
}
if (existing.weights!=null)
{
weights = new double[existing.weights.length];
System.arraycopy(existing.weights, 0, weights, 0, existing.weights.length);
}
else
weights = null;
info = existing.info;
}
public GMM(String gmmFile)
{
try {
read(gmmFile);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void init(int featureDimensionIn, int totalMixturesIn, boolean isDiagonalCovarIn)
{
featureDimension = featureDimensionIn;
totalComponents = totalMixturesIn;
isDiagonalCovariance = isDiagonalCovarIn;
if (totalComponents>0)
{
components = new GaussianComponent[totalComponents];
weights = new double[totalComponents];
for (int i=0; i<totalComponents; i++)
{
components[i] = new GaussianComponent(featureDimensionIn, isDiagonalCovarIn);
weights[i] = 1.0/totalComponents;
}
}
else
{
components = null;
weights = null;
totalComponents = 0;
if (featureDimension<0)
featureDimension=0;
}
info = "";
}
public GMM() {
this(0, 0);
}
//P(x)
public double probability(double[] x)
{
double score = 0.0;
int i;
public GMM(int featureDimensionIn, int totalMixturesIn) {
init(featureDimensionIn, totalMixturesIn, true);
}
for (i=0; i<totalComponents; i++)
score += weights[i]*components[i].probability(x);
public GMM(int featureDimensionIn, int totalComponentsIn, boolean isDiagonalCovarIn) {
init(featureDimensionIn, totalComponentsIn, isDiagonalCovarIn);
}
return score;
}
//P(Ci|x)
public double[] componentProbabilities(double[] x)
{
double[] probs = new double[totalComponents];
int i;
double totalProb = 0.0;
if (isDiagonalCovariance)
{
for (i=0; i<totalComponents; i++)
{
probs[i] = weights[i]*MathUtils.getGaussianPdfValue(x, components[i].meanVector, components[i].covMatrix[0], components[i].getConstantTerm());
totalProb += probs[i];
}
}
else
{
for (i=0; i<totalComponents; i++)
{
probs[i] = weights[i]*MathUtils.getGaussianPdfValue(x, components[i].meanVector, components[i].getDetCovMatrix(), components[i].getInvCovMatrix());
totalProb += probs[i];
}
}
for (i=0; i<totalComponents; i++)
probs[i] /= totalProb;
return probs;
}
public void write(String gmmFile) throws IOException
{
MaryRandomAccessFile stream = new MaryRandomAccessFile(gmmFile, "rw");
write(stream);
stream.close();
}
public void write(MaryRandomAccessFile stream) throws IOException
{
stream.writeIntEndian(featureDimension);
stream.writeIntEndian(totalComponents);
stream.writeBooleanEndian(isDiagonalCovariance);
if (info!=null && info.length()>0)
{
stream.writeIntEndian(info.length());
stream.writeCharEndian(info.toCharArray());
}
else
stream.writeIntEndian(0);
stream.writeDoubleEndian(weights);
for (int i=0; i<totalComponents; i++)
components[i].write(stream);
}
public void read(String gmmFile) throws IOException
{
MaryRandomAccessFile stream = new MaryRandomAccessFile(gmmFile, "r");
read(stream);
stream.close();
}
public void read(MaryRandomAccessFile stream) throws IOException
{
featureDimension = stream.readIntEndian();
totalComponents = stream.readIntEndian();
isDiagonalCovariance = stream.readBooleanEndian();
int tmpLen = stream.readIntEndian();
if (tmpLen>0)
info = String.copyValueOf(stream.readCharEndian(tmpLen));
weights = stream.readDoubleEndian(totalComponents);
components = new GaussianComponent[totalComponents];
for (int i=0; i<totalComponents; i++)
{
components[i] = new GaussianComponent();
components[i].read(stream);
}
}
public GMM(KMeansClusteringTrainer kmeansClusterer) {
init(kmeansClusterer.getFeatureDimension(), kmeansClusterer.getTotalClusters(), kmeansClusterer.isDiagonalCovariance());
int i;
for (i = 0; i < kmeansClusterer.getTotalClusters(); i++)
components[i] = new GaussianComponent(kmeansClusterer.clusters[i]);
}
public GMM(GMM existing) {
featureDimension = existing.featureDimension;
totalComponents = existing.totalComponents;
isDiagonalCovariance = existing.isDiagonalCovariance;
if (existing.totalComponents > 0 && existing.components != null) {
components = new GaussianComponent[totalComponents];
for (int i = 0; i < totalComponents; i++)
components[i] = new GaussianComponent(existing.components[i]);
} else {
components = null;
totalComponents = 0;
}
if (existing.weights != null) {
weights = new double[existing.weights.length];
System.arraycopy(existing.weights, 0, weights, 0, existing.weights.length);
} else
weights = null;
info = existing.info;
}
public GMM(String gmmFile) {
try {
read(gmmFile);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
public void init(int featureDimensionIn, int totalMixturesIn, boolean isDiagonalCovarIn) {
featureDimension = featureDimensionIn;
totalComponents = totalMixturesIn;
isDiagonalCovariance = isDiagonalCovarIn;
if (totalComponents > 0) {
components = new GaussianComponent[totalComponents];
weights = new double[totalComponents];
for (int i = 0; i < totalComponents; i++) {
components[i] = new GaussianComponent(featureDimensionIn, isDiagonalCovarIn);
weights[i] = 1.0 / totalComponents;
}
} else {
components = null;
weights = null;
totalComponents = 0;
if (featureDimension < 0)
featureDimension = 0;
}
info = "";
}
// P(x)
public double probability(double[] x) {
double score = 0.0;
int i;
for (i = 0; i < totalComponents; i++)
score += weights[i] * components[i].probability(x);
return score;
}
// P(Ci|x)
public double[] componentProbabilities(double[] x) {
double[] probs = new double[totalComponents];
int i;
double totalProb = 0.0;
if (isDiagonalCovariance) {
for (i = 0; i < totalComponents; i++) {
probs[i] = weights[i]
* MathUtils.getGaussianPdfValue(x, components[i].meanVector, components[i].covMatrix[0],
components[i].getConstantTerm());
totalProb += probs[i];
}
} else {
for (i = 0; i < totalComponents; i++) {
probs[i] = weights[i]
* MathUtils.getGaussianPdfValue(x, components[i].meanVector, components[i].getDetCovMatrix(),
components[i].getInvCovMatrix());
totalProb += probs[i];
}
}
for (i = 0; i < totalComponents; i++)
probs[i] /= totalProb;
return probs;
}
public void write(String gmmFile) throws IOException {
MaryRandomAccessFile stream = new MaryRandomAccessFile(gmmFile, "rw");
write(stream);
stream.close();
}
public void write(MaryRandomAccessFile stream) throws IOException {
stream.writeIntEndian(featureDimension);
stream.writeIntEndian(totalComponents);
stream.writeBooleanEndian(isDiagonalCovariance);
if (info != null && info.length() > 0) {
stream.writeIntEndian(info.length());
stream.writeCharEndian(info.toCharArray());
} else
stream.writeIntEndian(0);
stream.writeDoubleEndian(weights);
for (int i = 0; i < totalComponents; i++)
components[i].write(stream);
}
public void read(String gmmFile) throws IOException {
MaryRandomAccessFile stream = new MaryRandomAccessFile(gmmFile, "r");
read(stream);
stream.close();
}
public void read(MaryRandomAccessFile stream) throws IOException {
featureDimension = stream.readIntEndian();
totalComponents = stream.readIntEndian();
isDiagonalCovariance = stream.readBooleanEndian();
int tmpLen = stream.readIntEndian();
if (tmpLen > 0)
info = String.copyValueOf(stream.readCharEndian(tmpLen));
weights = stream.readDoubleEndian(totalComponents);
components = new GaussianComponent[totalComponents];
for (int i = 0; i < totalComponents; i++) {
components[i] = new GaussianComponent();
components[i].read(stream);
}
}
}

Просмотреть файл

@ -20,12 +20,11 @@
package marytts.machinelearning;
/**
* TO DO: Implement a GMM based classifier that takes as input several GMMs and data
* and outputs the probability of each GMM generating the data, the most likely GMM, etc
*
* TO DO: Implement a GMM based classifier that takes as input several GMMs and data and outputs the probability of each GMM
* generating the data, the most likely GMM, etc
*
* @author Oytun T&uumlrk
*/
public class GMMClassifier {
}

Просмотреть файл

@ -32,33 +32,30 @@ import marytts.util.string.StringUtils;
*
* Expectation-Maximization (EM) based GMM training
*
* Reference:
* A. P. Dempster, N. M. Laird, and D. B. Rubin. Maximum likelihood from in-
* complete data via the em algorithm. Journal of the Royal Statistical Society:
* Series B, 39(1):138, November 1977.
* Reference: A. P. Dempster, N. M. Laird, and D. B. Rubin. Maximum likelihood from in- complete data via the em algorithm.
* Journal of the Royal Statistical Society: Series B, 39(1):138, November 1977.
*
* @author Oytun T&uumlrk
*/
public class GMMTrainer {
public double[] logLikelihoods;
public double[] logLikelihoods;
public GMMTrainer()
{
logLikelihoods = null;
}
//This function calls the Expectation-Maximization (EM) algorithm
// to fit a Gaussian Mixture Model (GMM) to multi-dimensional data in x.
// Each row of x, i.e. x[0], x[1], ... corresponds to an observation vector.
// The dimension of each vector should be identical.
// Either a java implementation or a native C implementation (in Windows OS only) can be used.
// Note that native C implementation (GMMTrainer.exe) works 5 to 10 times faster.
// All training parameters are given by gmmParams (See GMMTrainerParams.java for details)
// Training consists of two steps:
// (a) Initialization using K-Means clustering
// (b) EM iterations to increase total log-likelihood of the model given the data
public GMM train(double[][] x, GMMTrainerParams gmmParams)
public GMMTrainer() {
logLikelihoods = null;
}
// This function calls the Expectation-Maximization (EM) algorithm
// to fit a Gaussian Mixture Model (GMM) to multi-dimensional data in x.
// Each row of x, i.e. x[0], x[1], ... corresponds to an observation vector.
// The dimension of each vector should be identical.
// Either a java implementation or a native C implementation (in Windows OS only) can be used.
// Note that native C implementation (GMMTrainer.exe) works 5 to 10 times faster.
// All training parameters are given by gmmParams (See GMMTrainerParams.java for details)
// Training consists of two steps:
// (a) Initialization using K-Means clustering
// (b) EM iterations to increase total log-likelihood of the model given the data
public GMM train(double[][] x, GMMTrainerParams gmmParams)
{
long startTime, endTime;
@ -162,25 +159,23 @@ public class GMMTrainer {
return gmm;
}
/* EM algorithm to fit a GMM to multi-dimensional data
* x: Data matrix (Each row is another observation vector)
* initialGMM: Initial GMM model (can be initialized using K-Means clustering (See function train)
* emMinimumIterations: Minimum number of EM iterations for which the algorithm will not quit
* even when the total likelihood does not change much with additional iterations)
* emMaximumIterations: Maximum number of EM iterations for which the algorithm will quit even when total likelihood
* has not settled yet
* isUpdateCovariances: Update covariance matrices in EM iterations?
* tinyLogLikelihoodChangePercent: Threshold to compare percent decrease in total log-likelihood to stop iterations automatically
* minimumCovarianceAllowed: Minimum covariance value allowed - should be a small positive number to avoid ill-conditioned training
*
* Reference:
* A. P. Dempster, N. M. Laird, and D. B. Rubin. Maximum likelihood from incomplete data via the em algorithm.
* Journal of the Royal Statistical Society: Series B, 39(1):138, November 1977.
*
* Many practical tutorials for EM training of GMMs exist on the web, i.e.:
* http://bengio.abracadoudou.com/lectures/old/tex_gmm.pdf
*/
public GMM expectationMaximization(double[][] x,
/*
* EM algorithm to fit a GMM to multi-dimensional data x: Data matrix (Each row is another observation vector) initialGMM:
* Initial GMM model (can be initialized using K-Means clustering (See function train) emMinimumIterations: Minimum number of
* EM iterations for which the algorithm will not quit even when the total likelihood does not change much with additional
* iterations) emMaximumIterations: Maximum number of EM iterations for which the algorithm will quit even when total
* likelihood has not settled yet isUpdateCovariances: Update covariance matrices in EM iterations?
* tinyLogLikelihoodChangePercent: Threshold to compare percent decrease in total log-likelihood to stop iterations
* automatically minimumCovarianceAllowed: Minimum covariance value allowed - should be a small positive number to avoid
* ill-conditioned training
*
* Reference: A. P. Dempster, N. M. Laird, and D. B. Rubin. Maximum likelihood from incomplete data via the em algorithm.
* Journal of the Royal Statistical Society: Series B, 39(1):138, November 1977.
*
* Many practical tutorials for EM training of GMMs exist on the web, i.e.:
* http://bengio.abracadoudou.com/lectures/old/tex_gmm.pdf
*/
public GMM expectationMaximization(double[][] x,
GMM initialGmm,
int emMinimumIterations,
int emMaximumIterations,
@ -410,142 +405,128 @@ public class GMMTrainer {
return gmm;
}
public static void testEndianFileIO() throws IOException
{
boolean b1 = true;
char c1 = 'c';
short s1 = 111;
int i1 = 222;
double d1 = 33.3;
float f1 = 44.4f;
long l1 = 555;
String javaFile = "d:/endianJava.tmp";
MaryRandomAccessFile fp = new MaryRandomAccessFile(javaFile, "rw");
if (fp!=null)
{
fp.writeBooleanEndian(b1);
fp.writeCharEndian(c1);
fp.writeShortEndian(s1);
fp.writeIntEndian(i1);
fp.writeDoubleEndian(d1);
fp.writeFloatEndian(f1);
fp.writeLongEndian(l1);
fp.close();
}
public static void testEndianFileIO() throws IOException {
boolean b1 = true;
char c1 = 'c';
short s1 = 111;
int i1 = 222;
double d1 = 33.3;
float f1 = 44.4f;
long l1 = 555;
boolean b2;
char c2;
short s2;
int i2;
double d2;
float f2;
long l2;
String javaFile = "d:/endianJava.tmp";
MaryRandomAccessFile fp = new MaryRandomAccessFile(javaFile, "rw");
if (fp != null) {
fp.writeBooleanEndian(b1);
fp.writeCharEndian(c1);
fp.writeShortEndian(s1);
fp.writeIntEndian(i1);
fp.writeDoubleEndian(d1);
fp.writeFloatEndian(f1);
fp.writeLongEndian(l1);
String cFile = "d:/endianC.tmp";
if (FileUtils.exists(cFile))
{
MaryRandomAccessFile fp2 = new MaryRandomAccessFile(cFile, "r");
if (fp2!=null)
{
b2 = fp2.readBooleanEndian();
c2 = fp2.readCharEndian();
s2 = fp2.readShortEndian();
i2 = fp2.readIntEndian();
d2 = fp2.readDoubleEndian();
f2 = fp2.readFloatEndian();
l2 = fp2.readLongEndian();
fp.close();
}
fp2.close();
boolean b2;
char c2;
short s2;
int i2;
double d2;
float f2;
long l2;
if (b1!=b2)
System.out.println("Error in bool!\n");
if (c1!=c2)
System.out.println("Error in char!\n");
if (s1!=s2)
System.out.println("Error in short!\n");
if (i1!=i2)
System.out.println("Error in int!\n");
if (d1!=d2)
System.out.println("Error in double!\n");
if (f1!=f2)
System.out.println("Error in float!\n");
if (l1!=l2)
System.out.println("Error in long!\n");
}
else
System.out.println("C generated file cannot be opened...\n");
}
else
System.out.println("C generated file not found...\n");
}
public static void main(String[] args)
{
int numClusters = 20;
int numSamplesInClusters = 2000;
double[] variances = {0.01};
int vectorDim = 10;
ClusteredDataGenerator[] c = new ClusteredDataGenerator[vectorDim];
int i, j, n;
int totalVectors = 0;
for (i=0; i<vectorDim; i++)
{
if (i<variances.length)
c[i] = new ClusteredDataGenerator(numClusters, numSamplesInClusters, 10.0*(i+1), variances[i]);
else
c[i] = new ClusteredDataGenerator(numClusters, numSamplesInClusters, 10.0*(i+1), variances[0]);
}
totalVectors = c[0].data.length;
double[][] x = new double[totalVectors][vectorDim];
int counter=0;
for (n=0; n<c.length; n++)
{
for (i=0; i<c[n].data.length; i++)
x[i][n] = c[n].data[i];
}
x = MathUtils.randomSort(x);
double[] m = MathUtils.mean(x);
double[] v = MathUtils.variance(x, m);
System.out.println(String.valueOf(m[0]) + " " + String.valueOf(v[0]));
GMMTrainerParams gmmParams = new GMMTrainerParams();
gmmParams.totalComponents = numClusters;
gmmParams.isDiagonalCovariance = true;
gmmParams.kmeansMaxIterations = 100;
gmmParams.kmeansMinClusterChangePercent = 0.01;
gmmParams.kmeansMinSamplesInOneCluster = 10;
gmmParams.emMinIterations = 100;
gmmParams.emMaxIterations = 2000;
gmmParams.isUpdateCovariances = true;
gmmParams.tinyLogLikelihoodChangePercent = 0.001;
gmmParams.minCovarianceAllowed = 1e-5;
gmmParams.useNativeCLibTrainer = true;
GMMTrainer g = new GMMTrainer();
GMM gmm = g.train(x, gmmParams);
if (gmm!=null)
{
for (i=0; i<gmm.totalComponents; i++)
System.out.println("Gaussian #" + String.valueOf(i+1) + " mean=" + String.valueOf(gmm.components[i].meanVector[0]) + " variance=" + String.valueOf(gmm.components[i].covMatrix[0][0])+" prior=" + gmm.weights[i]);
}
/*
try {
testEndianFileIO();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
*/
}
String cFile = "d:/endianC.tmp";
if (FileUtils.exists(cFile)) {
MaryRandomAccessFile fp2 = new MaryRandomAccessFile(cFile, "r");
if (fp2 != null) {
b2 = fp2.readBooleanEndian();
c2 = fp2.readCharEndian();
s2 = fp2.readShortEndian();
i2 = fp2.readIntEndian();
d2 = fp2.readDoubleEndian();
f2 = fp2.readFloatEndian();
l2 = fp2.readLongEndian();
fp2.close();
if (b1 != b2)
System.out.println("Error in bool!\n");
if (c1 != c2)
System.out.println("Error in char!\n");
if (s1 != s2)
System.out.println("Error in short!\n");
if (i1 != i2)
System.out.println("Error in int!\n");
if (d1 != d2)
System.out.println("Error in double!\n");
if (f1 != f2)
System.out.println("Error in float!\n");
if (l1 != l2)
System.out.println("Error in long!\n");
} else
System.out.println("C generated file cannot be opened...\n");
} else
System.out.println("C generated file not found...\n");
}
public static void main(String[] args) {
int numClusters = 20;
int numSamplesInClusters = 2000;
double[] variances = { 0.01 };
int vectorDim = 10;
ClusteredDataGenerator[] c = new ClusteredDataGenerator[vectorDim];
int i, j, n;
int totalVectors = 0;
for (i = 0; i < vectorDim; i++) {
if (i < variances.length)
c[i] = new ClusteredDataGenerator(numClusters, numSamplesInClusters, 10.0 * (i + 1), variances[i]);
else
c[i] = new ClusteredDataGenerator(numClusters, numSamplesInClusters, 10.0 * (i + 1), variances[0]);
}
totalVectors = c[0].data.length;
double[][] x = new double[totalVectors][vectorDim];
int counter = 0;
for (n = 0; n < c.length; n++) {
for (i = 0; i < c[n].data.length; i++)
x[i][n] = c[n].data[i];
}
x = MathUtils.randomSort(x);
double[] m = MathUtils.mean(x);
double[] v = MathUtils.variance(x, m);
System.out.println(String.valueOf(m[0]) + " " + String.valueOf(v[0]));
GMMTrainerParams gmmParams = new GMMTrainerParams();
gmmParams.totalComponents = numClusters;
gmmParams.isDiagonalCovariance = true;
gmmParams.kmeansMaxIterations = 100;
gmmParams.kmeansMinClusterChangePercent = 0.01;
gmmParams.kmeansMinSamplesInOneCluster = 10;
gmmParams.emMinIterations = 100;
gmmParams.emMaxIterations = 2000;
gmmParams.isUpdateCovariances = true;
gmmParams.tinyLogLikelihoodChangePercent = 0.001;
gmmParams.minCovarianceAllowed = 1e-5;
gmmParams.useNativeCLibTrainer = true;
GMMTrainer g = new GMMTrainer();
GMM gmm = g.train(x, gmmParams);
if (gmm != null) {
for (i = 0; i < gmm.totalComponents; i++)
System.out.println("Gaussian #" + String.valueOf(i + 1) + " mean="
+ String.valueOf(gmm.components[i].meanVector[0]) + " variance="
+ String.valueOf(gmm.components[i].covMatrix[0][0]) + " prior=" + gmm.weights[i]);
}
/*
* try { testEndianFileIO(); } catch (IOException e) { // TODO Auto-generated catch block e.printStackTrace(); }
*/
}
}

Просмотреть файл

@ -23,237 +23,232 @@ import java.io.IOException;
import marytts.util.io.MaryRandomAccessFile;
/**
* Wrapper class for GMM training parameters
*
*
* @author Oytun T&uumlrk
*/
public class GMMTrainerParams {
//A set of default values for GMM training parameters
public static final int EM_TOTAL_COMPONENTS_DEFAULT = 1;
public static final boolean EM_IS_DIAGONAL_COVARIANCE_DEFAULT = true;
public static final int EM_MIN_ITERATIONS_DEFAULT = 500;
public static final int EM_MAX_ITERATIONS_DEFAULT = 2000;
public static final boolean EM_IS_UPDATE_COVARIANCES_DEFAULT = true;
public static final double EM_TINY_LOGLIKELIHOOD_CHANGE_PERCENT_DEFAULT = 0.0001;
public static final double EM_MIN_COVARIANCE_ALLOWED_DEFAULT = 1e-4;
public static final boolean EM_USE_NATIVE_C_LIB_TRAINER_DEFAULT = false;
//
public int totalComponents; //Total number of Gaussians in the GMM
public boolean isDiagonalCovariance; //Estimate diagonal covariance matrices?
// Full-covariance training is likely to result in ill-conditioned training due to insufficient training data
public int kmeansMaxIterations; //Minimum number of K-Means iterations to initialize the GMM
public double kmeansMinClusterChangePercent; //Maximum number of K-Means iterations to initialize the GMM
public int kmeansMinSamplesInOneCluster; //Minimum number of observations in one cluster while initializing the GMM with K-Means
public int emMinIterations; //Minimum number of EM iterations for which the algorithm will not quit
// even when the total likelihood does not change much with additional iterations
public int emMaxIterations; //Maximum number of EM iterations for which the algorithm will quit
// even when total likelihood has not settled yet
public boolean isUpdateCovariances; //Update covariance matrices in EM iterations?
public double tinyLogLikelihoodChangePercent; //Threshold to compare percent decrease in total log-likelihood to stop iterations automatically
public double minCovarianceAllowed; //Minimum covariance value allowed - should be a small positive number to avoid ill-conditioned training
public boolean useNativeCLibTrainer; //Use native C library trainer (Windows OS only)
//Default constructor
public GMMTrainerParams()
{
totalComponents = EM_TOTAL_COMPONENTS_DEFAULT;
isDiagonalCovariance = EM_IS_DIAGONAL_COVARIANCE_DEFAULT;
kmeansMaxIterations = KMeansClusteringTrainerParams.KMEANS_MAX_ITERATIONS_DEFAULT;
kmeansMinClusterChangePercent = KMeansClusteringTrainerParams.KMEANS_MIN_CLUSTER_CHANGE_PERCENT_DEFAULT;
kmeansMinSamplesInOneCluster = KMeansClusteringTrainerParams.KMEANS_MIN_SAMPLES_IN_ONE_CLUSTER_DEFAULT;
emMinIterations = EM_MIN_ITERATIONS_DEFAULT;
emMaxIterations = EM_MAX_ITERATIONS_DEFAULT;
isUpdateCovariances = EM_IS_UPDATE_COVARIANCES_DEFAULT;
tinyLogLikelihoodChangePercent = EM_TINY_LOGLIKELIHOOD_CHANGE_PERCENT_DEFAULT;
minCovarianceAllowed = EM_MIN_COVARIANCE_ALLOWED_DEFAULT;
useNativeCLibTrainer = EM_USE_NATIVE_C_LIB_TRAINER_DEFAULT;
}
//Constructor using an existing parameter set
public GMMTrainerParams(GMMTrainerParams existing)
{
totalComponents = existing.totalComponents;
isDiagonalCovariance = existing.isDiagonalCovariance;
kmeansMaxIterations = existing.kmeansMaxIterations;
kmeansMinClusterChangePercent = existing.kmeansMinClusterChangePercent;
kmeansMinSamplesInOneCluster = existing.kmeansMinSamplesInOneCluster;
emMinIterations = existing.emMinIterations;
emMaxIterations = existing.emMaxIterations;
isUpdateCovariances = existing.isUpdateCovariances;
tinyLogLikelihoodChangePercent = existing.tinyLogLikelihoodChangePercent;
minCovarianceAllowed = existing.minCovarianceAllowed;
useNativeCLibTrainer = existing.useNativeCLibTrainer;
}
//Constructor that reads GMM training parameters from a binary file stream
public GMMTrainerParams(MaryRandomAccessFile stream)
{
read(stream);
}
//Function to write GMM training parameters to a binary file stream
public void write(MaryRandomAccessFile stream)
{
if (stream!=null)
{
try {
stream.writeInt(totalComponents);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeBoolean(isDiagonalCovariance);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(kmeansMaxIterations);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeDouble(kmeansMinClusterChangePercent);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(kmeansMinSamplesInOneCluster);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(emMinIterations);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(emMaxIterations);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeBoolean(isUpdateCovariances);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeDouble(tinyLogLikelihoodChangePercent);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
// A set of default values for GMM training parameters
public static final int EM_TOTAL_COMPONENTS_DEFAULT = 1;
public static final boolean EM_IS_DIAGONAL_COVARIANCE_DEFAULT = true;
public static final int EM_MIN_ITERATIONS_DEFAULT = 500;
public static final int EM_MAX_ITERATIONS_DEFAULT = 2000;
public static final boolean EM_IS_UPDATE_COVARIANCES_DEFAULT = true;
public static final double EM_TINY_LOGLIKELIHOOD_CHANGE_PERCENT_DEFAULT = 0.0001;
public static final double EM_MIN_COVARIANCE_ALLOWED_DEFAULT = 1e-4;
public static final boolean EM_USE_NATIVE_C_LIB_TRAINER_DEFAULT = false;
//
try {
stream.writeDouble(minCovarianceAllowed);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
public int totalComponents; // Total number of Gaussians in the GMM
public boolean isDiagonalCovariance; // Estimate diagonal covariance matrices?
// Full-covariance training is likely to result in ill-conditioned training due to
// insufficient training data
public int kmeansMaxIterations; // Minimum number of K-Means iterations to initialize the GMM
public double kmeansMinClusterChangePercent; // Maximum number of K-Means iterations to initialize the GMM
public int kmeansMinSamplesInOneCluster; // Minimum number of observations in one cluster while initializing the GMM with
// K-Means
public int emMinIterations; // Minimum number of EM iterations for which the algorithm will not quit
// even when the total likelihood does not change much with additional iterations
public int emMaxIterations; // Maximum number of EM iterations for which the algorithm will quit
// even when total likelihood has not settled yet
public boolean isUpdateCovariances; // Update covariance matrices in EM iterations?
public double tinyLogLikelihoodChangePercent; // Threshold to compare percent decrease in total log-likelihood to stop
// iterations automatically
public double minCovarianceAllowed; // Minimum covariance value allowed - should be a small positive number to avoid
// ill-conditioned training
public boolean useNativeCLibTrainer; // Use native C library trainer (Windows OS only)
try {
stream.writeBoolean(useNativeCLibTrainer);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
//Function that reads GMM training parameters from a binary file stream
public void read(MaryRandomAccessFile stream)
{
if (stream!=null)
{
try {
totalComponents = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
isDiagonalCovariance = stream.readBoolean();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
kmeansMaxIterations = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
kmeansMinClusterChangePercent = stream.readDouble();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
kmeansMinSamplesInOneCluster = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
emMinIterations = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
emMaxIterations = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
isUpdateCovariances = stream.readBoolean();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
tinyLogLikelihoodChangePercent = stream.readDouble();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
minCovarianceAllowed = stream.readDouble();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
useNativeCLibTrainer = stream.readBoolean();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
// Default constructor
public GMMTrainerParams() {
totalComponents = EM_TOTAL_COMPONENTS_DEFAULT;
isDiagonalCovariance = EM_IS_DIAGONAL_COVARIANCE_DEFAULT;
kmeansMaxIterations = KMeansClusteringTrainerParams.KMEANS_MAX_ITERATIONS_DEFAULT;
kmeansMinClusterChangePercent = KMeansClusteringTrainerParams.KMEANS_MIN_CLUSTER_CHANGE_PERCENT_DEFAULT;
kmeansMinSamplesInOneCluster = KMeansClusteringTrainerParams.KMEANS_MIN_SAMPLES_IN_ONE_CLUSTER_DEFAULT;
emMinIterations = EM_MIN_ITERATIONS_DEFAULT;
emMaxIterations = EM_MAX_ITERATIONS_DEFAULT;
isUpdateCovariances = EM_IS_UPDATE_COVARIANCES_DEFAULT;
tinyLogLikelihoodChangePercent = EM_TINY_LOGLIKELIHOOD_CHANGE_PERCENT_DEFAULT;
minCovarianceAllowed = EM_MIN_COVARIANCE_ALLOWED_DEFAULT;
useNativeCLibTrainer = EM_USE_NATIVE_C_LIB_TRAINER_DEFAULT;
}
// Constructor using an existing parameter set
public GMMTrainerParams(GMMTrainerParams existing) {
totalComponents = existing.totalComponents;
isDiagonalCovariance = existing.isDiagonalCovariance;
kmeansMaxIterations = existing.kmeansMaxIterations;
kmeansMinClusterChangePercent = existing.kmeansMinClusterChangePercent;
kmeansMinSamplesInOneCluster = existing.kmeansMinSamplesInOneCluster;
emMinIterations = existing.emMinIterations;
emMaxIterations = existing.emMaxIterations;
isUpdateCovariances = existing.isUpdateCovariances;
tinyLogLikelihoodChangePercent = existing.tinyLogLikelihoodChangePercent;
minCovarianceAllowed = existing.minCovarianceAllowed;
useNativeCLibTrainer = existing.useNativeCLibTrainer;
}
// Constructor that reads GMM training parameters from a binary file stream
public GMMTrainerParams(MaryRandomAccessFile stream) {
read(stream);
}
// Function to write GMM training parameters to a binary file stream
public void write(MaryRandomAccessFile stream) {
if (stream != null) {
try {
stream.writeInt(totalComponents);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeBoolean(isDiagonalCovariance);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(kmeansMaxIterations);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeDouble(kmeansMinClusterChangePercent);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(kmeansMinSamplesInOneCluster);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(emMinIterations);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeInt(emMaxIterations);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeBoolean(isUpdateCovariances);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeDouble(tinyLogLikelihoodChangePercent);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeDouble(minCovarianceAllowed);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
stream.writeBoolean(useNativeCLibTrainer);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
// Function that reads GMM training parameters from a binary file stream
public void read(MaryRandomAccessFile stream) {
if (stream != null) {
try {
totalComponents = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
isDiagonalCovariance = stream.readBoolean();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
kmeansMaxIterations = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
kmeansMinClusterChangePercent = stream.readDouble();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
kmeansMinSamplesInOneCluster = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
emMinIterations = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
emMaxIterations = stream.readInt();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
isUpdateCovariances = stream.readBoolean();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
tinyLogLikelihoodChangePercent = stream.readDouble();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
minCovarianceAllowed = stream.readDouble();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
try {
useNativeCLibTrainer = stream.readBoolean();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
}

Просмотреть файл

@ -24,64 +24,55 @@ import java.io.IOException;
import marytts.util.io.MaryRandomAccessFile;
import marytts.util.math.MathUtils;
/**
*
* Implements a single Gaussian component with a mean vector and a covariance matrix
* It also computes terms for pdf computation out of this Gaussian component once the mean
* and covariance is specified
* Implements a single Gaussian component with a mean vector and a covariance matrix It also computes terms for pdf computation
* out of this Gaussian component once the mean and covariance is specified
*
* @author Oytun T&uumlrk
*/
public class GaussianComponent {
public double[] meanVector;
public double[][] covMatrix;
//These are used in pdf computation
private double[][] invCovMatrix;
private double detCovMatrix;
private double constantTerm;
private double constantTermLog;
//
public GaussianComponent()
{
this(0, true);
}
public GaussianComponent(int featureDimensionIn, boolean isDiagonal)
{
init(featureDimensionIn, isDiagonal);
}
public GaussianComponent(GaussianComponent existing)
{
init(existing.meanVector, existing.covMatrix);
}
public GaussianComponent(Cluster c)
{
init(c.meanVector, c.covMatrix);
}
public void init(int featureDimensionIn, boolean isDiagonal)
{
if (featureDimensionIn>0)
{
meanVector = new double[featureDimensionIn];
if (isDiagonal)
covMatrix = new double[1][featureDimensionIn];
else
covMatrix = new double[featureDimensionIn][featureDimensionIn];
}
else
{
meanVector = null;
covMatrix = null;
}
}
public void init(double[] meanVectorIn, double[][] covMatrixIn)
public double[] meanVector;
public double[][] covMatrix;
// These are used in pdf computation
private double[][] invCovMatrix;
private double detCovMatrix;
private double constantTerm;
private double constantTermLog;
//
public GaussianComponent() {
this(0, true);
}
public GaussianComponent(int featureDimensionIn, boolean isDiagonal) {
init(featureDimensionIn, isDiagonal);
}
public GaussianComponent(GaussianComponent existing) {
init(existing.meanVector, existing.covMatrix);
}
public GaussianComponent(Cluster c) {
init(c.meanVector, c.covMatrix);
}
public void init(int featureDimensionIn, boolean isDiagonal) {
if (featureDimensionIn > 0) {
meanVector = new double[featureDimensionIn];
if (isDiagonal)
covMatrix = new double[1][featureDimensionIn];
else
covMatrix = new double[featureDimensionIn][featureDimensionIn];
} else {
meanVector = null;
covMatrix = null;
}
}
public void init(double[] meanVectorIn, double[][] covMatrixIn)
{
setMeanVector(meanVectorIn);
setCovMatrix(covMatrixIn);
@ -89,263 +80,222 @@ public class GaussianComponent {
for (int i=0; i<covMatrix.length; i++)
assert meanVector.length == covMatrix[i].length;
}
public void setMeanVector(double[] meanVectorIn)
{
setMeanVector(meanVectorIn, 0, meanVectorIn.length);
}
public void setMeanVector(double[] bigVector, int startIndex, int meanLength)
{
if (bigVector!=null && meanLength>0)
{
if (startIndex+meanLength>bigVector.length)
meanLength = bigVector.length-startIndex;
if (meanVector==null || meanLength!=meanVector.length)
meanVector = new double[meanLength];
public void setMeanVector(double[] meanVectorIn) {
setMeanVector(meanVectorIn, 0, meanVectorIn.length);
}
for (int i=0; i<meanLength; i++)
meanVector[i] = bigVector[startIndex+i];
}
else
meanVector = null;
}
public void setCovMatrix(double[][] covMatrixIn)
{
if (covMatrixIn.length==1)
setCovMatrix(covMatrixIn, 0, 0, covMatrixIn[0].length);
else
setCovMatrix(covMatrixIn, 0, 0, covMatrixIn.length);
}
public void setCovMatrix(double[][] bigCovMatrix, int rowStartIndex, int colStartIndex, int covLength)
{
if (bigCovMatrix!=null && covLength>0)
{
if (bigCovMatrix.length==1) //Diagonal
{
int startIndex = Math.max(rowStartIndex, colStartIndex);
if (startIndex+covLength>bigCovMatrix[0].length)
covLength = bigCovMatrix[0].length-startIndex;
if (covMatrix==null || covMatrix.length>1 || covMatrix[0].length!=covLength)
covMatrix = new double[1][covLength];
System.arraycopy(bigCovMatrix[0], startIndex, covMatrix[0], 0, covLength);
}
else //Full
{
int i, j;
for (i=0; i<bigCovMatrix.length; i++)
{
if (colStartIndex+covLength>bigCovMatrix[i].length)
covLength = bigCovMatrix[i].length-colStartIndex;
}
if (rowStartIndex+covLength>bigCovMatrix.length)
covLength = bigCovMatrix.length-rowStartIndex;
if (covMatrix==null)
covMatrix = new double[covLength][];
for (i=rowStartIndex; i<rowStartIndex+covLength; i++)
{
if (covMatrix[i-rowStartIndex]==null || covMatrix[i-rowStartIndex].length!=covLength)
covMatrix[i-rowStartIndex] = new double[covLength];
for (j=colStartIndex; j<colStartIndex+covLength; j++)
covMatrix[i-rowStartIndex][j-colStartIndex] = bigCovMatrix[i][j];
}
}
}
else
covMatrix = null;
public void setMeanVector(double[] bigVector, int startIndex, int meanLength) {
if (bigVector != null && meanLength > 0) {
if (startIndex + meanLength > bigVector.length)
meanLength = bigVector.length - startIndex;
setDerivedValues();
}
//Computes the inverse covariance, determinant, constant term to be used in pdf evalutaion
public void setDerivedValues()
{
if (covMatrix!=null)
{
invCovMatrix = MathUtils.inverse(covMatrix);
detCovMatrix = MathUtils.determinant(covMatrix);
constantTerm = MathUtils.getGaussianPdfValueConstantTerm(covMatrix[0].length, detCovMatrix);
constantTermLog = MathUtils.getGaussianPdfValueConstantTermLog(covMatrix[0].length, detCovMatrix);
}
else
{
invCovMatrix = null;
detCovMatrix = 0.0;
constantTerm = 0.0;
constantTermLog = 0.0;
}
}
public boolean isDiagonalCovariance()
{
if (meanVector!=null && covMatrix!=null)
{
if (covMatrix.length==1 && meanVector.length>1 && covMatrix[0].length==meanVector.length)
return true;
}
return false;
}
public double[] getCovMatrixDiagonal()
{
if (covMatrix!=null)
return covMatrix[0];
else
return null;
}
public double[][] getInvCovMatrix()
{
return invCovMatrix;
}
public double getDetCovMatrix()
{
return detCovMatrix;
}
public double getConstantTerm()
{
return constantTerm;
}
public double getConstantTermLog()
{
return constantTermLog;
}
public void write(MaryRandomAccessFile stream) throws IOException
{
boolean isDiagonal = isDiagonalCovariance();
stream.writeBooleanEndian(isDiagonal);
if (meanVector!=null)
{
stream.writeIntEndian(meanVector.length);
stream.writeDoubleEndian(meanVector);
}
else
stream.writeIntEndian(0);
int i;
if (covMatrix!=null)
stream.writeIntEndian(covMatrix.length);
else
stream.writeIntEndian(0);
if (covMatrix!=null)
{
for (i=0; i<covMatrix.length; i++)
{
if (covMatrix[i]!=null)
{
stream.writeIntEndian(covMatrix[i].length);
stream.writeDoubleEndian(covMatrix[i]);
}
else
stream.writeIntEndian(0);
}
}
if (invCovMatrix!=null)
stream.writeIntEndian(invCovMatrix.length);
else
stream.writeIntEndian(0);
if (invCovMatrix!=null)
{
for (i=0; i<invCovMatrix.length; i++)
{
if (invCovMatrix[i]!=null)
{
stream.writeIntEndian(invCovMatrix[i].length);
stream.writeDoubleEndian(invCovMatrix[i]);
}
else
stream.writeIntEndian(0);
}
}
stream.writeDoubleEndian(detCovMatrix);
stream.writeDoubleEndian(constantTerm);
stream.writeDoubleEndian(constantTermLog);
}
public void read(MaryRandomAccessFile stream) throws IOException
{
boolean isDiagonal = stream.readBooleanEndian(); //This is for compatibility with C version
int tmpLen, tmpLen2;
tmpLen = stream.readIntEndian();
if (tmpLen>0)
meanVector = stream.readDoubleEndian(tmpLen);
else
meanVector = null;
int i;
tmpLen = stream.readIntEndian();
if (tmpLen>0)
{
covMatrix = new double[tmpLen][];
for (i=0; i<tmpLen; i++)
{
tmpLen2 = stream.readIntEndian();
if (tmpLen2>0)
covMatrix[i] = stream.readDoubleEndian(tmpLen2);
else
covMatrix[i] = null;
}
}
else
covMatrix = null;
tmpLen = stream.readIntEndian();
if (tmpLen>0)
{
invCovMatrix = new double[tmpLen][];
if (meanVector == null || meanLength != meanVector.length)
meanVector = new double[meanLength];
for (i=0; i<tmpLen; i++)
{
tmpLen2 = stream.readIntEndian();
if (tmpLen2>0)
invCovMatrix[i] = stream.readDoubleEndian(tmpLen2);
else
invCovMatrix[i] = null;
}
}
else
invCovMatrix = null;
detCovMatrix = stream.readDoubleEndian();
constantTerm = stream.readDoubleEndian();
constantTermLog = stream.readDoubleEndian();
}
public double probability(double[] x)
{
double P;
if (covMatrix.length==1) //Diagonal
P = MathUtils.getGaussianPdfValue(x, meanVector, covMatrix[0], getConstantTerm());
else //Full-covariance
P = MathUtils.getGaussianPdfValue(x, meanVector, getDetCovMatrix(), getInvCovMatrix());
return P;
}
for (int i = 0; i < meanLength; i++)
meanVector[i] = bigVector[startIndex + i];
} else
meanVector = null;
}
public void setCovMatrix(double[][] covMatrixIn) {
if (covMatrixIn.length == 1)
setCovMatrix(covMatrixIn, 0, 0, covMatrixIn[0].length);
else
setCovMatrix(covMatrixIn, 0, 0, covMatrixIn.length);
}
public void setCovMatrix(double[][] bigCovMatrix, int rowStartIndex, int colStartIndex, int covLength) {
if (bigCovMatrix != null && covLength > 0) {
if (bigCovMatrix.length == 1) // Diagonal
{
int startIndex = Math.max(rowStartIndex, colStartIndex);
if (startIndex + covLength > bigCovMatrix[0].length)
covLength = bigCovMatrix[0].length - startIndex;
if (covMatrix == null || covMatrix.length > 1 || covMatrix[0].length != covLength)
covMatrix = new double[1][covLength];
System.arraycopy(bigCovMatrix[0], startIndex, covMatrix[0], 0, covLength);
} else // Full
{
int i, j;
for (i = 0; i < bigCovMatrix.length; i++) {
if (colStartIndex + covLength > bigCovMatrix[i].length)
covLength = bigCovMatrix[i].length - colStartIndex;
}
if (rowStartIndex + covLength > bigCovMatrix.length)
covLength = bigCovMatrix.length - rowStartIndex;
if (covMatrix == null)
covMatrix = new double[covLength][];
for (i = rowStartIndex; i < rowStartIndex + covLength; i++) {
if (covMatrix[i - rowStartIndex] == null || covMatrix[i - rowStartIndex].length != covLength)
covMatrix[i - rowStartIndex] = new double[covLength];
for (j = colStartIndex; j < colStartIndex + covLength; j++)
covMatrix[i - rowStartIndex][j - colStartIndex] = bigCovMatrix[i][j];
}
}
} else
covMatrix = null;
setDerivedValues();
}
// Computes the inverse covariance, determinant, constant term to be used in pdf evalutaion
public void setDerivedValues() {
if (covMatrix != null) {
invCovMatrix = MathUtils.inverse(covMatrix);
detCovMatrix = MathUtils.determinant(covMatrix);
constantTerm = MathUtils.getGaussianPdfValueConstantTerm(covMatrix[0].length, detCovMatrix);
constantTermLog = MathUtils.getGaussianPdfValueConstantTermLog(covMatrix[0].length, detCovMatrix);
} else {
invCovMatrix = null;
detCovMatrix = 0.0;
constantTerm = 0.0;
constantTermLog = 0.0;
}
}
public boolean isDiagonalCovariance() {
if (meanVector != null && covMatrix != null) {
if (covMatrix.length == 1 && meanVector.length > 1 && covMatrix[0].length == meanVector.length)
return true;
}
return false;
}
public double[] getCovMatrixDiagonal() {
if (covMatrix != null)
return covMatrix[0];
else
return null;
}
public double[][] getInvCovMatrix() {
return invCovMatrix;
}
public double getDetCovMatrix() {
return detCovMatrix;
}
public double getConstantTerm() {
return constantTerm;
}
public double getConstantTermLog() {
return constantTermLog;
}
public void write(MaryRandomAccessFile stream) throws IOException {
boolean isDiagonal = isDiagonalCovariance();
stream.writeBooleanEndian(isDiagonal);
if (meanVector != null) {
stream.writeIntEndian(meanVector.length);
stream.writeDoubleEndian(meanVector);
} else
stream.writeIntEndian(0);
int i;
if (covMatrix != null)
stream.writeIntEndian(covMatrix.length);
else
stream.writeIntEndian(0);
if (covMatrix != null) {
for (i = 0; i < covMatrix.length; i++) {
if (covMatrix[i] != null) {
stream.writeIntEndian(covMatrix[i].length);
stream.writeDoubleEndian(covMatrix[i]);
} else
stream.writeIntEndian(0);
}
}
if (invCovMatrix != null)
stream.writeIntEndian(invCovMatrix.length);
else
stream.writeIntEndian(0);
if (invCovMatrix != null) {
for (i = 0; i < invCovMatrix.length; i++) {
if (invCovMatrix[i] != null) {
stream.writeIntEndian(invCovMatrix[i].length);
stream.writeDoubleEndian(invCovMatrix[i]);
} else
stream.writeIntEndian(0);
}
}
stream.writeDoubleEndian(detCovMatrix);
stream.writeDoubleEndian(constantTerm);
stream.writeDoubleEndian(constantTermLog);
}
public void read(MaryRandomAccessFile stream) throws IOException {
boolean isDiagonal = stream.readBooleanEndian(); // This is for compatibility with C version
int tmpLen, tmpLen2;
tmpLen = stream.readIntEndian();
if (tmpLen > 0)
meanVector = stream.readDoubleEndian(tmpLen);
else
meanVector = null;
int i;
tmpLen = stream.readIntEndian();
if (tmpLen > 0) {
covMatrix = new double[tmpLen][];
for (i = 0; i < tmpLen; i++) {
tmpLen2 = stream.readIntEndian();
if (tmpLen2 > 0)
covMatrix[i] = stream.readDoubleEndian(tmpLen2);
else
covMatrix[i] = null;
}
} else
covMatrix = null;
tmpLen = stream.readIntEndian();
if (tmpLen > 0) {
invCovMatrix = new double[tmpLen][];
for (i = 0; i < tmpLen; i++) {
tmpLen2 = stream.readIntEndian();
if (tmpLen2 > 0)
invCovMatrix[i] = stream.readDoubleEndian(tmpLen2);
else
invCovMatrix[i] = null;
}
} else
invCovMatrix = null;
detCovMatrix = stream.readDoubleEndian();
constantTerm = stream.readDoubleEndian();
constantTermLog = stream.readDoubleEndian();
}
public double probability(double[] x) {
double P;
if (covMatrix.length == 1) // Diagonal
P = MathUtils.getGaussianPdfValue(x, meanVector, covMatrix[0], getConstantTerm());
else
// Full-covariance
P = MathUtils.getGaussianPdfValue(x, meanVector, getDetCovMatrix(), getInvCovMatrix());
return P;
}
}

Просмотреть файл

@ -22,368 +22,327 @@ package marytts.machinelearning;
import marytts.signalproc.analysis.distance.DistanceComputer;
import marytts.util.math.MathUtils;
/**
* K-Means clustering training algorithm
*
* Reference:
* J. MacQueen, 1967, "Some methods for classification and analysis of multivariate observations",
* Proc. Fifth Berkeley Symp. on Math. Statist. and Prob., Vol. 1 (Univ. of Calif. Press, 1967), pp. 281-297.
*
* Reference: J. MacQueen, 1967, "Some methods for classification and analysis of multivariate observations", Proc. Fifth Berkeley
* Symp. on Math. Statist. and Prob., Vol. 1 (Univ. of Calif. Press, 1967), pp. 281-297.
*
* @author Oytun T&uumlrk
*/
public class KMeansClusteringTrainer {
public Cluster[] clusters; //Parameters of each cluster
public int[] totalObservationsInClusters; //Total number of observations in each cluster
public int[] clusterIndices; //Assigned cluster for each observation vector
public double[][] covMatrixGlobal; //Global covariance matrix of data
public double[][] invCovMatrixGlobal; //Inverse of global covariance matrix of data
//This function clusters multi-dimensional feature vectors using K-Means clustering procedure
// Each row of x, i.e. x[0], x[1], ... corresponds to an observation vector.
// The dimension of each vector should be identical.
// All training parameters are given by kmeansParams (See KMeansClusteringTrainerParams.java for details)
// Training consists of four steps:
// (a) Initialization (random assignment of cluster means using data points that are far away from each other + slight random shifts)
// (b) Hard clustering of samples according to new cluster means
// (c) Update of cluster means using assigned samples
// (d) Re-iteration of (b) and (c) until convergence, i.e. when overall cluster occupancy does not change much
public void train(double[][] x, KMeansClusteringTrainerParams kmeansParams)
{
if (kmeansParams.globalVariances==null)
{
double[] meanVector = MathUtils.mean(x, true);
kmeansParams.globalVariances = MathUtils.variance(x, meanVector, true);
}
int observations = x.length;
int dimension = x[0].length;
int c, k, k2, d, t, iter, i, j, totChanged;
int ind = -1;
boolean bCont;
double rnd, tmpDist;
double minDist = Double.MIN_VALUE;
double[][] m_new = new double[kmeansParams.numClusters][];
for (k=0; k<kmeansParams.numClusters; k++)
m_new[k] = new double[dimension];
int[][] b = new int[observations][];
for (t=0; t<observations; t++)
b[t] = new int[kmeansParams.numClusters];
int[][] b_old = new int[observations][];
for (t=0; t<observations; t++)
b_old[t] = new int[kmeansParams.numClusters];
int[] prev_totals = new int[kmeansParams.numClusters];
double changedPerc;
public class KMeansClusteringTrainer {
public Cluster[] clusters; // Parameters of each cluster
public int[] totalObservationsInClusters; // Total number of observations in each cluster
public int[] clusterIndices; // Assigned cluster for each observation vector
public double[][] covMatrixGlobal; // Global covariance matrix of data
public double[][] invCovMatrixGlobal; // Inverse of global covariance matrix of data
double[] mAll = new double[dimension];
clusters = new Cluster[kmeansParams.numClusters];
for (k=0; k<kmeansParams.numClusters; k++)
clusters[k] = new Cluster(dimension, kmeansParams.isDiagonalOutputCovariance);
for (k=1; k<=kmeansParams.numClusters; k++)
{
for (d=1; d<=dimension; d++)
clusters[k-1].meanVector[d-1] = 0.0;
for (t=1; t<=observations; t++)
b[t-1][k-1] = 0;
}
//Select initial cluster centers
mAll = MathUtils.mean(x, true);
k = 1;
double[] dists = new double[observations];
double[] tmp = new double[kmeansParams.numClusters+1];
double maxD = Double.MAX_VALUE;
int maxInd = -1;
while(k<=kmeansParams.numClusters)
{
for (t=1; t<=observations; t++)
{
if (k>1)
{
for (i=1; i<=k-1; i++)
tmp[i-1] = DistanceComputer.getNormalizedEuclideanDistance(clusters[i-1].meanVector, x[t-1], kmeansParams.globalVariances);
tmp[k-1] = DistanceComputer.getNormalizedEuclideanDistance(mAll, x[t-1], kmeansParams.globalVariances);
dists[t-1] = MathUtils.mean(tmp, 0, k-1);
}
else
{
dists[t-1] = DistanceComputer.getNormalizedEuclideanDistance(mAll, x[t-1], kmeansParams.globalVariances);
}
}
// This function clusters multi-dimensional feature vectors using K-Means clustering procedure
// Each row of x, i.e. x[0], x[1], ... corresponds to an observation vector.
// The dimension of each vector should be identical.
// All training parameters are given by kmeansParams (See KMeansClusteringTrainerParams.java for details)
// Training consists of four steps:
// (a) Initialization (random assignment of cluster means using data points that are far away from each other + slight random
// shifts)
// (b) Hard clustering of samples according to new cluster means
// (c) Update of cluster means using assigned samples
// (d) Re-iteration of (b) and (c) until convergence, i.e. when overall cluster occupancy does not change much
public void train(double[][] x, KMeansClusteringTrainerParams kmeansParams) {
if (kmeansParams.globalVariances == null) {
double[] meanVector = MathUtils.mean(x, true);
kmeansParams.globalVariances = MathUtils.variance(x, meanVector, true);
}
for (t=1; t<=observations; t++)
{
if (t==1 || dists[t-1]>maxD)
{
maxD = dists[t-1];
maxInd = t;
}
}
for (d=0; d<dimension; d++)
clusters[k-1].meanVector[d] = x[maxInd-1][d];
//System.out.println("Cluster center " + String.valueOf(k) + " initialized...");
k++;
}
//
int observations = x.length;
int dimension = x[0].length;
int[] tinyClusterInds = new int[kmeansParams.numClusters];
int numTinyClusters = 0;
double[] tmps = new double[kmeansParams.numClusters];
int[] inds;
totalObservationsInClusters = new int[kmeansParams.numClusters];
clusterIndices = new int[observations];
int c, k, k2, d, t, iter, i, j, totChanged;
int ind = -1;
boolean bCont;
double rnd, tmpDist;
double minDist = Double.MIN_VALUE;
iter = 0;
bCont = true;
while(bCont)
{
for (t=1; t<=observations; t++) //Overall observations
{
for (i=1; i<=kmeansParams.numClusters; i++) //Overall classes
{
tmpDist = DistanceComputer.getNormalizedEuclideanDistance(clusters[i-1].meanVector, x[t-1], kmeansParams.globalVariances);
b[t-1][i-1] = 0;
if (i==1 || tmpDist<minDist)
{
minDist = tmpDist;
ind = i;
}
}
for (i=1; i<=kmeansParams.numClusters; i++) //Overall classes
{
if (i==ind)
b[t-1][i-1] = 1;
}
}
//Update means
for (i=1; i<=kmeansParams.numClusters; i++)
{
totalObservationsInClusters[i-1] = 0;
tinyClusterInds[i-1] = 0;
}
c=1;
for (i=1; i<=kmeansParams.numClusters; i++)
{
for (d=1; d<=dimension; d++)
m_new[i-1][d-1]=0.0f;
for (t=1; t<=observations; t++)
{
if (b[t-1][i-1]==1)
{
for (d=1; d<=dimension; d++)
m_new[i-1][d-1] = m_new[i-1][d-1] + x[t-1][d-1];
clusterIndices[t-1] = i-1; // zero-based
(totalObservationsInClusters[i-1])++;
}
}
//Do something if totalObservationsInClusters[i-1] is less than some value
// (i.e. there are too few observations for the cluster)
if ((double)totalObservationsInClusters[i-1]<kmeansParams.minSamplesInOneCluster)
{
tinyClusterInds[c-1] = i;
numTinyClusters++;
c++;
}
}
//
c=0;
for (i=0; i<totalObservationsInClusters.length; i++)
tmps[i] = totalObservationsInClusters[i];
inds = MathUtils.quickSort(tmps, 0, kmeansParams.numClusters-1);
for (i=1; i<=kmeansParams.numClusters; i++)
{
if (totalObservationsInClusters[i-1]>=kmeansParams.minSamplesInOneCluster)
{
for (d=1; d<=dimension; d++)
clusters[i-1].meanVector[d-1] = m_new[i-1][d-1]/totalObservationsInClusters[i-1];
}
else
{
for (d=1; d<=dimension; d++)
{
rnd = Math.random()*Math.abs(clusters[inds[kmeansParams.numClusters-c-1]].meanVector[d-1])*0.01;
clusters[i-1].meanVector[d-1] = clusters[inds[kmeansParams.numClusters-c-1]].meanVector[d-1] + rnd;
}
c++;
}
}
for (i=1; i<=kmeansParams.numClusters; i++)
prev_totals[i-1] = totalObservationsInClusters[i-1];
iter++;
totChanged = 0;
if (iter>1)
{
if (iter>=kmeansParams.maxIterations)
bCont=false;
for (t=1; t<=observations; t++)
{
for (i=1; i<=kmeansParams.numClusters; i++)
{
if (b_old[t-1][i-1] != b[t-1][i-1])
{
totChanged++;
break; //Count each difference once
}
}
}
changedPerc = (double)totChanged/observations*100.0;
if (changedPerc < kmeansParams.minClusterChangePercent) //stop if number of clusters changed is less than %MIN_CHANGE_PERCENT of total observation
bCont = false;
//System.out.println("K-Means iteration: " + String.valueOf(iter) + " with " + String.valueOf(changedPerc) + " percent of cluster assignments updated");
}
//else
// System.out.println("K-Means iteration: " + String.valueOf(iter) + " K-means initialized");
for (t=1; t<=observations; t++)
{
for (k2=1; k2<=kmeansParams.numClusters; k2++)
b_old[t-1][k2-1] = b[t-1][k2-1];
}
}
double[][] m_new = new double[kmeansParams.numClusters][];
for (k = 0; k < kmeansParams.numClusters; k++)
m_new[k] = new double[dimension];
//Finally, calculate the cluster covariances
double[][] tmpCov = null;
double[] diag = null;
int d1, d2;
for (i=0; i<kmeansParams.numClusters; i++)
{
if (totalObservationsInClusters[i]>0)
{
int[] indices = new int[totalObservationsInClusters[i]];
int count = 0;
for (t=0; t<observations; t++)
{
if (clusterIndices[t]==i)
indices[count++] = t;
}
int[][] b = new int[observations][];
for (t = 0; t < observations; t++)
b[t] = new int[kmeansParams.numClusters];
if (kmeansParams.isDiagonalOutputCovariance)
{
tmpCov = MathUtils.covariance(x, clusters[i].meanVector, true, indices);
diag = MathUtils.diagonal(tmpCov);
for (d1=0; d1<diag.length; d1++)
diag[d1] = Math.max(diag[d1], kmeansParams.minCovarianceAllowed);
System.arraycopy(diag, 0, clusters[i].covMatrix[0], 0, diag.length);
clusters[i].invCovMatrix[0] = MathUtils.inverse(clusters[i].covMatrix[0]);
}
else
{
clusters[i].covMatrix = MathUtils.covariance(x, clusters[i].meanVector, true, indices);
for (d1=0; d1<clusters[i].covMatrix.length; d1++)
{
for (d2=0; d2<clusters[i].covMatrix[d1].length; d2++)
clusters[i].covMatrix[d1][d2] = Math.max(clusters[i].covMatrix[d1][d2], kmeansParams.minCovarianceAllowed);
}
clusters[i].invCovMatrix = MathUtils.inverse(clusters[i].covMatrix);
}
}
}
//There can be no observations for some clusters, i.e. when the number of clusters is large as compared to the actual clusters in data
//In this case, assign largest cluster´s mean, covariance, and inverse covariance to these empty clusters
for (i=0; i<kmeansParams.numClusters; i++)
tmps[i] = totalObservationsInClusters[i];
int[][] b_old = new int[observations][];
for (t = 0; t < observations; t++)
b_old[t] = new int[kmeansParams.numClusters];
inds = MathUtils.quickSort(tmps, 0, kmeansParams.numClusters-1);
int largestClusterInd = inds[kmeansParams.numClusters-1];
for (i=0; i<kmeansParams.numClusters; i++)
{
if (totalObservationsInClusters[i]<kmeansParams.minSamplesInOneCluster)
{
System.arraycopy(clusters[largestClusterInd].meanVector, 0, clusters[i].meanVector, 0, dimension);
if (kmeansParams.isDiagonalOutputCovariance)
{
System.arraycopy(clusters[largestClusterInd].covMatrix[0], 0, clusters[i].covMatrix[0], 0, dimension);
System.arraycopy(clusters[largestClusterInd].invCovMatrix[0], 0, clusters[i].invCovMatrix[0], 0, dimension);
}
else
{
for (j=0; j<dimension; j++)
{
System.arraycopy(clusters[largestClusterInd].covMatrix[j], 0, clusters[i].covMatrix[j], 0, dimension);
System.arraycopy(clusters[largestClusterInd].invCovMatrix[j], 0, clusters[i].invCovMatrix[j], 0, dimension);
}
}
}
}
//
if (kmeansParams.isDiagonalOutputCovariance)
{
tmpCov = MathUtils.covariance(x, true);
covMatrixGlobal = new double[1][tmpCov.length];
covMatrixGlobal[0] = MathUtils.diagonal(tmpCov);
for (d1=0; d1<covMatrixGlobal[0].length; d1++)
covMatrixGlobal[0][d1] = Math.max(covMatrixGlobal[0][d1], kmeansParams.minCovarianceAllowed);
invCovMatrixGlobal = new double[1][tmpCov.length];
invCovMatrixGlobal[0] = MathUtils.inverse(covMatrixGlobal[0]);
}
else
{
covMatrixGlobal = MathUtils.covariance(x);
for (d1=0; d1<covMatrixGlobal[0].length; d1++)
{
for (d2=0; d2<covMatrixGlobal[d1].length; d2++)
covMatrixGlobal[d1][d2] = Math.max(covMatrixGlobal[d1][d2], kmeansParams.minCovarianceAllowed);
}
invCovMatrixGlobal = MathUtils.inverse(covMatrixGlobal);
}
//System.out.println("K-Means clustering completed...");
}
public int getFeatureDimension()
{
if (clusters!=null && clusters[0].meanVector!=null)
return clusters[0].meanVector.length;
else
return 0;
}
public int getTotalClusters()
{
if (clusters!=null)
return clusters.length;
else
return 0;
}
public boolean isDiagonalCovariance()
{
if (clusters!=null)
return clusters[0].isDiagonalCovariance;
else
return false;
}
int[] prev_totals = new int[kmeansParams.numClusters];
double changedPerc;
double[] mAll = new double[dimension];
clusters = new Cluster[kmeansParams.numClusters];
for (k = 0; k < kmeansParams.numClusters; k++)
clusters[k] = new Cluster(dimension, kmeansParams.isDiagonalOutputCovariance);
for (k = 1; k <= kmeansParams.numClusters; k++) {
for (d = 1; d <= dimension; d++)
clusters[k - 1].meanVector[d - 1] = 0.0;
for (t = 1; t <= observations; t++)
b[t - 1][k - 1] = 0;
}
// Select initial cluster centers
mAll = MathUtils.mean(x, true);
k = 1;
double[] dists = new double[observations];
double[] tmp = new double[kmeansParams.numClusters + 1];
double maxD = Double.MAX_VALUE;
int maxInd = -1;
while (k <= kmeansParams.numClusters) {
for (t = 1; t <= observations; t++) {
if (k > 1) {
for (i = 1; i <= k - 1; i++)
tmp[i - 1] = DistanceComputer.getNormalizedEuclideanDistance(clusters[i - 1].meanVector, x[t - 1],
kmeansParams.globalVariances);
tmp[k - 1] = DistanceComputer.getNormalizedEuclideanDistance(mAll, x[t - 1], kmeansParams.globalVariances);
dists[t - 1] = MathUtils.mean(tmp, 0, k - 1);
} else {
dists[t - 1] = DistanceComputer.getNormalizedEuclideanDistance(mAll, x[t - 1], kmeansParams.globalVariances);
}
}
for (t = 1; t <= observations; t++) {
if (t == 1 || dists[t - 1] > maxD) {
maxD = dists[t - 1];
maxInd = t;
}
}
for (d = 0; d < dimension; d++)
clusters[k - 1].meanVector[d] = x[maxInd - 1][d];
// System.out.println("Cluster center " + String.valueOf(k) + " initialized...");
k++;
}
//
int[] tinyClusterInds = new int[kmeansParams.numClusters];
int numTinyClusters = 0;
double[] tmps = new double[kmeansParams.numClusters];
int[] inds;
totalObservationsInClusters = new int[kmeansParams.numClusters];
clusterIndices = new int[observations];
iter = 0;
bCont = true;
while (bCont) {
for (t = 1; t <= observations; t++) // Overall observations
{
for (i = 1; i <= kmeansParams.numClusters; i++) // Overall classes
{
tmpDist = DistanceComputer.getNormalizedEuclideanDistance(clusters[i - 1].meanVector, x[t - 1],
kmeansParams.globalVariances);
b[t - 1][i - 1] = 0;
if (i == 1 || tmpDist < minDist) {
minDist = tmpDist;
ind = i;
}
}
for (i = 1; i <= kmeansParams.numClusters; i++) // Overall classes
{
if (i == ind)
b[t - 1][i - 1] = 1;
}
}
// Update means
for (i = 1; i <= kmeansParams.numClusters; i++) {
totalObservationsInClusters[i - 1] = 0;
tinyClusterInds[i - 1] = 0;
}
c = 1;
for (i = 1; i <= kmeansParams.numClusters; i++) {
for (d = 1; d <= dimension; d++)
m_new[i - 1][d - 1] = 0.0f;
for (t = 1; t <= observations; t++) {
if (b[t - 1][i - 1] == 1) {
for (d = 1; d <= dimension; d++)
m_new[i - 1][d - 1] = m_new[i - 1][d - 1] + x[t - 1][d - 1];
clusterIndices[t - 1] = i - 1; // zero-based
(totalObservationsInClusters[i - 1])++;
}
}
// Do something if totalObservationsInClusters[i-1] is less than some value
// (i.e. there are too few observations for the cluster)
if ((double) totalObservationsInClusters[i - 1] < kmeansParams.minSamplesInOneCluster) {
tinyClusterInds[c - 1] = i;
numTinyClusters++;
c++;
}
}
//
c = 0;
for (i = 0; i < totalObservationsInClusters.length; i++)
tmps[i] = totalObservationsInClusters[i];
inds = MathUtils.quickSort(tmps, 0, kmeansParams.numClusters - 1);
for (i = 1; i <= kmeansParams.numClusters; i++) {
if (totalObservationsInClusters[i - 1] >= kmeansParams.minSamplesInOneCluster) {
for (d = 1; d <= dimension; d++)
clusters[i - 1].meanVector[d - 1] = m_new[i - 1][d - 1] / totalObservationsInClusters[i - 1];
} else {
for (d = 1; d <= dimension; d++) {
rnd = Math.random() * Math.abs(clusters[inds[kmeansParams.numClusters - c - 1]].meanVector[d - 1]) * 0.01;
clusters[i - 1].meanVector[d - 1] = clusters[inds[kmeansParams.numClusters - c - 1]].meanVector[d - 1]
+ rnd;
}
c++;
}
}
for (i = 1; i <= kmeansParams.numClusters; i++)
prev_totals[i - 1] = totalObservationsInClusters[i - 1];
iter++;
totChanged = 0;
if (iter > 1) {
if (iter >= kmeansParams.maxIterations)
bCont = false;
for (t = 1; t <= observations; t++) {
for (i = 1; i <= kmeansParams.numClusters; i++) {
if (b_old[t - 1][i - 1] != b[t - 1][i - 1]) {
totChanged++;
break; // Count each difference once
}
}
}
changedPerc = (double) totChanged / observations * 100.0;
if (changedPerc < kmeansParams.minClusterChangePercent) // stop if number of clusters changed is less than
// %MIN_CHANGE_PERCENT of total observation
bCont = false;
// System.out.println("K-Means iteration: " + String.valueOf(iter) + " with " + String.valueOf(changedPerc) +
// " percent of cluster assignments updated");
}
// else
// System.out.println("K-Means iteration: " + String.valueOf(iter) + " K-means initialized");
for (t = 1; t <= observations; t++) {
for (k2 = 1; k2 <= kmeansParams.numClusters; k2++)
b_old[t - 1][k2 - 1] = b[t - 1][k2 - 1];
}
}
// Finally, calculate the cluster covariances
double[][] tmpCov = null;
double[] diag = null;
int d1, d2;
for (i = 0; i < kmeansParams.numClusters; i++) {
if (totalObservationsInClusters[i] > 0) {
int[] indices = new int[totalObservationsInClusters[i]];
int count = 0;
for (t = 0; t < observations; t++) {
if (clusterIndices[t] == i)
indices[count++] = t;
}
if (kmeansParams.isDiagonalOutputCovariance) {
tmpCov = MathUtils.covariance(x, clusters[i].meanVector, true, indices);
diag = MathUtils.diagonal(tmpCov);
for (d1 = 0; d1 < diag.length; d1++)
diag[d1] = Math.max(diag[d1], kmeansParams.minCovarianceAllowed);
System.arraycopy(diag, 0, clusters[i].covMatrix[0], 0, diag.length);
clusters[i].invCovMatrix[0] = MathUtils.inverse(clusters[i].covMatrix[0]);
} else {
clusters[i].covMatrix = MathUtils.covariance(x, clusters[i].meanVector, true, indices);
for (d1 = 0; d1 < clusters[i].covMatrix.length; d1++) {
for (d2 = 0; d2 < clusters[i].covMatrix[d1].length; d2++)
clusters[i].covMatrix[d1][d2] = Math.max(clusters[i].covMatrix[d1][d2],
kmeansParams.minCovarianceAllowed);
}
clusters[i].invCovMatrix = MathUtils.inverse(clusters[i].covMatrix);
}
}
}
// There can be no observations for some clusters, i.e. when the number of clusters is large as compared to the actual
// clusters in data
// In this case, assign largest cluster´s mean, covariance, and inverse covariance to these empty clusters
for (i = 0; i < kmeansParams.numClusters; i++)
tmps[i] = totalObservationsInClusters[i];
inds = MathUtils.quickSort(tmps, 0, kmeansParams.numClusters - 1);
int largestClusterInd = inds[kmeansParams.numClusters - 1];
for (i = 0; i < kmeansParams.numClusters; i++) {
if (totalObservationsInClusters[i] < kmeansParams.minSamplesInOneCluster) {
System.arraycopy(clusters[largestClusterInd].meanVector, 0, clusters[i].meanVector, 0, dimension);
if (kmeansParams.isDiagonalOutputCovariance) {
System.arraycopy(clusters[largestClusterInd].covMatrix[0], 0, clusters[i].covMatrix[0], 0, dimension);
System.arraycopy(clusters[largestClusterInd].invCovMatrix[0], 0, clusters[i].invCovMatrix[0], 0, dimension);
} else {
for (j = 0; j < dimension; j++) {
System.arraycopy(clusters[largestClusterInd].covMatrix[j], 0, clusters[i].covMatrix[j], 0, dimension);
System.arraycopy(clusters[largestClusterInd].invCovMatrix[j], 0, clusters[i].invCovMatrix[j], 0,
dimension);
}
}
}
}
//
if (kmeansParams.isDiagonalOutputCovariance) {
tmpCov = MathUtils.covariance(x, true);
covMatrixGlobal = new double[1][tmpCov.length];
covMatrixGlobal[0] = MathUtils.diagonal(tmpCov);
for (d1 = 0; d1 < covMatrixGlobal[0].length; d1++)
covMatrixGlobal[0][d1] = Math.max(covMatrixGlobal[0][d1], kmeansParams.minCovarianceAllowed);
invCovMatrixGlobal = new double[1][tmpCov.length];
invCovMatrixGlobal[0] = MathUtils.inverse(covMatrixGlobal[0]);
} else {
covMatrixGlobal = MathUtils.covariance(x);
for (d1 = 0; d1 < covMatrixGlobal[0].length; d1++) {
for (d2 = 0; d2 < covMatrixGlobal[d1].length; d2++)
covMatrixGlobal[d1][d2] = Math.max(covMatrixGlobal[d1][d2], kmeansParams.minCovarianceAllowed);
}
invCovMatrixGlobal = MathUtils.inverse(covMatrixGlobal);
}
// System.out.println("K-Means clustering completed...");
}
public int getFeatureDimension() {
if (clusters != null && clusters[0].meanVector != null)
return clusters[0].meanVector.length;
else
return 0;
}
public int getTotalClusters() {
if (clusters != null)
return clusters.length;
else
return 0;
}
public boolean isDiagonalCovariance() {
if (clusters != null)
return clusters[0].isDiagonalCovariance;
else
return false;
}
}

Просмотреть файл

@ -25,69 +25,63 @@ package marytts.machinelearning;
* @author Oytun T&uumlrk
*/
public class KMeansClusteringTrainerParams {
//A set of default values for K-Means training parameters
public static final int KMEANS_MAX_ITERATIONS_DEFAULT = 200;
public static final double KMEANS_MIN_CLUSTER_CHANGE_PERCENT_DEFAULT = 0.0001;
public static final boolean KMEANS_IS_DIAGONAL_COVARIANCE_DEFAULT = true;
public static final int KMEANS_MIN_SAMPLES_IN_ONE_CLUSTER_DEFAULT = 10;
private static final double KMEANS_MIN_COVARIANCE_ALLOWED_DEFAULT = 1e-5;
//
public int numClusters; //Number of clusters to be trained
public int maxIterations; //Maximum iterations to stop K-means training
public double minClusterChangePercent; //Minimum percent change in cluster assignments to stop K-Means iterations
public boolean isDiagonalOutputCovariance; //Estimate diagonal cluster covariances finally?
public int minSamplesInOneCluster; //Minimum number of observations allowed in one cluster
public double minCovarianceAllowed; //Minimum covariance value allowed for final cluster covariance matrices
public double[] globalVariances; //Global variance vector of whole data
//Default constructor
public KMeansClusteringTrainerParams()
{
numClusters = 0;
maxIterations = KMEANS_MAX_ITERATIONS_DEFAULT;
minClusterChangePercent = KMEANS_MIN_CLUSTER_CHANGE_PERCENT_DEFAULT;
isDiagonalOutputCovariance = KMEANS_IS_DIAGONAL_COVARIANCE_DEFAULT;
minSamplesInOneCluster = KMEANS_MIN_SAMPLES_IN_ONE_CLUSTER_DEFAULT;
minCovarianceAllowed = KMEANS_MIN_COVARIANCE_ALLOWED_DEFAULT;
globalVariances = null;
}
//Constructor using GMM training parameters
public KMeansClusteringTrainerParams(GMMTrainerParams gmmParams)
{
numClusters = gmmParams.totalComponents;
maxIterations = gmmParams.kmeansMaxIterations;
minClusterChangePercent = gmmParams.kmeansMinClusterChangePercent;
isDiagonalOutputCovariance = gmmParams.isDiagonalCovariance;
minSamplesInOneCluster = gmmParams.kmeansMinSamplesInOneCluster;
minCovarianceAllowed = gmmParams.minCovarianceAllowed;
globalVariances = null;
}
//Constructor using an existing parameter set
public KMeansClusteringTrainerParams(KMeansClusteringTrainerParams existing)
{
numClusters = existing.numClusters;
maxIterations = existing.maxIterations;
minClusterChangePercent = existing.minClusterChangePercent;
isDiagonalOutputCovariance = existing.isDiagonalOutputCovariance;
minSamplesInOneCluster = existing.minSamplesInOneCluster;
setGlobalVariances(existing.globalVariances);
}
//Set global variance values
public void setGlobalVariances(double[] globalVariancesIn)
{
if (globalVariancesIn!=null)
{
if (globalVariances==null || globalVariancesIn.length!=globalVariances.length)
globalVariances = new double[globalVariancesIn.length];
System.arraycopy(globalVariancesIn, 0, globalVariances, 0, globalVariancesIn.length);
}
}
// A set of default values for K-Means training parameters
public static final int KMEANS_MAX_ITERATIONS_DEFAULT = 200;
public static final double KMEANS_MIN_CLUSTER_CHANGE_PERCENT_DEFAULT = 0.0001;
public static final boolean KMEANS_IS_DIAGONAL_COVARIANCE_DEFAULT = true;
public static final int KMEANS_MIN_SAMPLES_IN_ONE_CLUSTER_DEFAULT = 10;
private static final double KMEANS_MIN_COVARIANCE_ALLOWED_DEFAULT = 1e-5;
//
public int numClusters; // Number of clusters to be trained
public int maxIterations; // Maximum iterations to stop K-means training
public double minClusterChangePercent; // Minimum percent change in cluster assignments to stop K-Means iterations
public boolean isDiagonalOutputCovariance; // Estimate diagonal cluster covariances finally?
public int minSamplesInOneCluster; // Minimum number of observations allowed in one cluster
public double minCovarianceAllowed; // Minimum covariance value allowed for final cluster covariance matrices
public double[] globalVariances; // Global variance vector of whole data
// Default constructor
public KMeansClusteringTrainerParams() {
numClusters = 0;
maxIterations = KMEANS_MAX_ITERATIONS_DEFAULT;
minClusterChangePercent = KMEANS_MIN_CLUSTER_CHANGE_PERCENT_DEFAULT;
isDiagonalOutputCovariance = KMEANS_IS_DIAGONAL_COVARIANCE_DEFAULT;
minSamplesInOneCluster = KMEANS_MIN_SAMPLES_IN_ONE_CLUSTER_DEFAULT;
minCovarianceAllowed = KMEANS_MIN_COVARIANCE_ALLOWED_DEFAULT;
globalVariances = null;
}
// Constructor using GMM training parameters
public KMeansClusteringTrainerParams(GMMTrainerParams gmmParams) {
numClusters = gmmParams.totalComponents;
maxIterations = gmmParams.kmeansMaxIterations;
minClusterChangePercent = gmmParams.kmeansMinClusterChangePercent;
isDiagonalOutputCovariance = gmmParams.isDiagonalCovariance;
minSamplesInOneCluster = gmmParams.kmeansMinSamplesInOneCluster;
minCovarianceAllowed = gmmParams.minCovarianceAllowed;
globalVariances = null;
}
// Constructor using an existing parameter set
public KMeansClusteringTrainerParams(KMeansClusteringTrainerParams existing) {
numClusters = existing.numClusters;
maxIterations = existing.maxIterations;
minClusterChangePercent = existing.minClusterChangePercent;
isDiagonalOutputCovariance = existing.isDiagonalOutputCovariance;
minSamplesInOneCluster = existing.minSamplesInOneCluster;
setGlobalVariances(existing.globalVariances);
}
// Set global variance values
public void setGlobalVariances(double[] globalVariancesIn) {
if (globalVariancesIn != null) {
if (globalVariances == null || globalVariancesIn.length != globalVariances.length)
globalVariances = new double[globalVariancesIn.length];
System.arraycopy(globalVariancesIn, 0, globalVariances, 0, globalVariancesIn.length);
}
}
}

Просмотреть файл

@ -23,28 +23,25 @@ import marytts.util.math.Polynomial;
/**
*
* Implements a cluster center that has a mean
*
* Implements a cluster center that has a mean
*
* @author Oytun T&uuml;rk, Marc Schröder
*/
public class PolynomialCluster {
private Polynomial meanPolynomial;
private Polynomial[] clusterMembers;
public PolynomialCluster(Polynomial meanPolynomial, Polynomial[] clusterMembers) {
this.meanPolynomial = meanPolynomial;
this.clusterMembers = clusterMembers;
}
public Polynomial getMeanPolynomial() {
return meanPolynomial;
}
public Polynomial[] getClusterMembers() {
return clusterMembers;
}
}
private Polynomial meanPolynomial;
private Polynomial[] clusterMembers;
public PolynomialCluster(Polynomial meanPolynomial, Polynomial[] clusterMembers) {
this.meanPolynomial = meanPolynomial;
this.clusterMembers = clusterMembers;
}
public Polynomial getMeanPolynomial() {
return meanPolynomial;
}
public Polynomial[] getClusterMembers() {
return clusterMembers;
}
}

Просмотреть файл

@ -28,31 +28,31 @@ import marytts.signalproc.display.FunctionGraph;
import marytts.util.math.MathUtils;
import marytts.util.math.Polynomial;
/**
* K-Means clustering training algorithm
*
* Reference:
* J. MacQueen, 1967, "Some methods for classification and analysis of multivariate observations",
* Proc. Fifth Berkeley Symp. on Math. Statist. and Prob., Vol. 1 (Univ. of Calif. Press, 1967), pp. 281-297.
*
*This version is adapted to work with a distance function between polynomials.
*
* Reference: J. MacQueen, 1967, "Some methods for classification and analysis of multivariate observations", Proc. Fifth Berkeley
* Symp. on Math. Statist. and Prob., Vol. 1 (Univ. of Calif. Press, 1967), pp. 281-297.
*
* This version is adapted to work with a distance function between polynomials.
*
* @author Oytun T&uuml;rk, Marc Schröder
*/
public class PolynomialKMeansClusteringTrainer {
/** This function clusters polynomials using K-Means clustering procedure, using a polynomial distance function.
* Training consists of four steps:
* (a) Initialization (random assignment of cluster means using data points that are far away from each other + slight random shifts)
* (b) Hard clustering of samples according to new cluster means
* (c) Update of cluster means using assigned samples
* (d) Re-iteration of (b) and (c) until convergence, i.e. when overall cluster occupancy does not change much
*
* @param polynomials the observations to cluster
* @param kmeansParams All training parameters are given by kmeansParams (See KMeansClusteringTrainerParams.java for details)
* @return the clusters trained
*/
public static PolynomialCluster[] train(Polynomial[] polynomials, KMeansClusteringTrainerParams kmeansParams) {
/**
* This function clusters polynomials using K-Means clustering procedure, using a polynomial distance function. Training
* consists of four steps: (a) Initialization (random assignment of cluster means using data points that are far away from
* each other + slight random shifts) (b) Hard clustering of samples according to new cluster means (c) Update of cluster
* means using assigned samples (d) Re-iteration of (b) and (c) until convergence, i.e. when overall cluster occupancy does
* not change much
*
* @param polynomials
* the observations to cluster
* @param kmeansParams
* All training parameters are given by kmeansParams (See KMeansClusteringTrainerParams.java for details)
* @return the clusters trained
*/
public static PolynomialCluster[] train(Polynomial[] polynomials, KMeansClusteringTrainerParams kmeansParams) {
int[] totalObservationsInClusters; //Total number of observations in each cluster
int[] clusterIndices; //Assigned cluster for each observation vector
@ -240,55 +240,53 @@ public class PolynomialKMeansClusteringTrainer {
return clusters;
//System.out.println("K-Means clustering completed...");
}
public static void main(String[] args) {
// Test clustering with random polynomials, and visualise result
int order = 3;
int numPolynomials = 1000;
int numClusters = 50;
// Initialise with random data:
Polynomial[] ps = new Polynomial[numPolynomials];
for (int i=0; i<numPolynomials; i++) {
double[] coeffs = new double[order+1];
for (int c=0; c<coeffs.length; c++) {
coeffs[c] = Math.random();
}
ps[i] = new Polynomial(coeffs);
}
KMeansClusteringTrainerParams params = new KMeansClusteringTrainerParams();
params.numClusters = numClusters;
// Train:
PolynomialCluster[] clusters = PolynomialKMeansClusteringTrainer.train(ps, params);
// Visualise:
FunctionGraph clusterGraph = new FunctionGraph(0, 1, new double[1]);
clusterGraph.setYMinMax(0, 5);
clusterGraph.setPrimaryDataSeriesStyle(Color.BLUE, FunctionGraph.DRAW_DOTS, FunctionGraph.DOT_FULLCIRCLE);
JFrame jf = clusterGraph.showInJFrame("", false, true);
for (int i=0; i<clusters.length; i++) {
double[] meanValues = clusters[i].getMeanPolynomial().generatePolynomialValues(100, 0, 1);
clusterGraph.updateData(0, 1./meanValues.length, meanValues);
Polynomial[] members = clusters[i].getClusterMembers();
for (int m=0; m<members.length; m++) {
double[] pred = members[m].generatePolynomialValues(meanValues.length, 0, 1);
clusterGraph.addDataSeries(pred, Color.GRAY, FunctionGraph.DRAW_LINE, -1);
jf.repaint();
}
jf.setTitle("Cluster "+(i+1)+" of "+clusters.length+": "+members.length+" members");
jf.repaint();
try {
Thread.sleep(500);
} catch (InterruptedException ie) {}
}
System.exit(0);
}
public static void main(String[] args) {
// Test clustering with random polynomials, and visualise result
int order = 3;
int numPolynomials = 1000;
int numClusters = 50;
// Initialise with random data:
Polynomial[] ps = new Polynomial[numPolynomials];
for (int i = 0; i < numPolynomials; i++) {
double[] coeffs = new double[order + 1];
for (int c = 0; c < coeffs.length; c++) {
coeffs[c] = Math.random();
}
ps[i] = new Polynomial(coeffs);
}
KMeansClusteringTrainerParams params = new KMeansClusteringTrainerParams();
params.numClusters = numClusters;
// Train:
PolynomialCluster[] clusters = PolynomialKMeansClusteringTrainer.train(ps, params);
// Visualise:
FunctionGraph clusterGraph = new FunctionGraph(0, 1, new double[1]);
clusterGraph.setYMinMax(0, 5);
clusterGraph.setPrimaryDataSeriesStyle(Color.BLUE, FunctionGraph.DRAW_DOTS, FunctionGraph.DOT_FULLCIRCLE);
JFrame jf = clusterGraph.showInJFrame("", false, true);
for (int i = 0; i < clusters.length; i++) {
double[] meanValues = clusters[i].getMeanPolynomial().generatePolynomialValues(100, 0, 1);
clusterGraph.updateData(0, 1. / meanValues.length, meanValues);
Polynomial[] members = clusters[i].getClusterMembers();
for (int m = 0; m < members.length; m++) {
double[] pred = members[m].generatePolynomialValues(meanValues.length, 0, 1);
clusterGraph.addDataSeries(pred, Color.GRAY, FunctionGraph.DRAW_LINE, -1);
jf.repaint();
}
jf.setTitle("Cluster " + (i + 1) + " of " + clusters.length + ": " + members.length + " members");
jf.repaint();
try {
Thread.sleep(500);
} catch (InterruptedException ie) {
}
}
System.exit(0);
}
}

Просмотреть файл

@ -24,33 +24,28 @@ import marytts.signalproc.window.Window;
/**
*
* @author Marc Schr&ouml;der
*
* A set of static getters for System properties.
*
*
* A set of static getters for System properties.
*
*/
public class Defaults
{
public static int getWindowSize()
{
return Integer.getInteger("signalproc.default.windowsize", 512).intValue();
}
public class Defaults {
public static int getWindowSize() {
return Integer.getInteger("signalproc.default.windowsize", 512).intValue();
}
public static int getWindowType()
{
return Window.getTypeByName(System.getProperty("signalproc.default.window", "HAMMING"));
}
public static int getWindowType() {
return Window.getTypeByName(System.getProperty("signalproc.default.window", "HAMMING"));
}
public static int getFFTSize()
{
return Integer.getInteger("signalproc.default.fftsize", 1024).intValue();
}
public static int getFrameShift()
{
int shift = Integer.getInteger("signalproc.default.frameshift", -1).intValue();
if (shift == -1) shift = getWindowSize()/2;
return shift;
}
public static int getFFTSize() {
return Integer.getInteger("signalproc.default.fftsize", 1024).intValue();
}
public static int getFrameShift() {
int shift = Integer.getInteger("signalproc.default.frameshift", -1).intValue();
if (shift == -1)
shift = getWindowSize() / 2;
return shift;
}
}

Просмотреть файл

@ -30,414 +30,373 @@ import marytts.util.math.MathUtils;
import marytts.util.signal.SignalProcUtils;
import marytts.util.string.StringUtils;
/**
* Generic utilities for voice conversion
*
*
* @author Oytun T&uumlrk
*/
public class AdaptationUtils {
public static int ALL_AVAILABLE_TRAINING_FRAMES = -1;
public class AdaptationUtils {
//An optimal alignment is found by dynamic programming if the labels are not identical
public static IndexMap mapFramesFeatures(String sourceLabelFile, String targetLabelFile,
String sourceFeatureFile, String targetFeatureFile,
int vocalTractFeature,
String[] labelsToExcludeFromTraining)
throws IOException
{
IndexMap im = null;
public static int ALL_AVAILABLE_TRAINING_FRAMES = -1;
//Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
//Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
if (vocalTractFeature==BaselineFeatureExtractor.LSF_FEATURES)
{
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
}
else if (vocalTractFeature==BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES)
{
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
// An optimal alignment is found by dynamic programming if the labels are not identical
public static IndexMap mapFramesFeatures(String sourceLabelFile, String targetLabelFile, String sourceFeatureFile,
String targetFeatureFile, int vocalTractFeature, String[] labelsToExcludeFromTraining) throws IOException {
IndexMap im = null;
if (hdr1!=null && hdr2!=null && sourceLabels.items!=null && targetLabels.items!=null)
{
//Find the optimum alignment between the source and the target labels since the phone sequences may not be identical due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
// Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
if (labelMap!=null)
{
int j, srcLabInd, tgtLabInd, tgtFrmInd;
double time1, time2;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
// Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
srcLabInd = 0;
if (vocalTractFeature == BaselineFeatureExtractor.LSF_FEATURES) {
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
} else if (vocalTractFeature == BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES) {
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
//Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(hdr1.numfrm,2);
if (hdr1 != null && hdr2 != null && sourceLabels.items != null && targetLabels.items != null) {
// Find the optimum alignment between the source and the target labels since the phone sequences may not be identical
// due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
for (j=0; j<hdr1.numfrm; j++)
{
time1 = SignalProcUtils.frameIndex2Time(j, hdr1.winsize, hdr1.skipsize);
if (labelMap != null) {
int j, srcLabInd, tgtLabInd, tgtFrmInd;
double time1, time2;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
while (time1>sourceLabels.items[srcLabInd].time)
{
srcLabInd++;
if (srcLabInd>sourceLabels.items.length-1)
{
srcLabInd = sourceLabels.items.length-1;
break;
}
}
srcLabInd = 0;
tgtLabInd = StringUtils.findInMap(labelMap, srcLabInd);
// Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(hdr1.numfrm, 2);
if (tgtLabInd>=0 && sourceLabels.items[srcLabInd].phn.compareTo(targetLabels.items[tgtLabInd].phn)==0)
{
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining!=null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[srcLabInd].phn, labelsToExcludeFromTraining);
if (isLabelDesired)
{
if (srcLabInd>0)
srcStartTime = sourceLabels.items[srcLabInd-1].time;
else
srcStartTime = 0.0;
for (j = 0; j < hdr1.numfrm; j++) {
time1 = SignalProcUtils.frameIndex2Time(j, hdr1.winsize, hdr1.skipsize);
if (tgtLabInd>0)
tgtStartTime = targetLabels.items[tgtLabInd-1].time;
else
tgtStartTime = 0.0;
while (time1 > sourceLabels.items[srcLabInd].time) {
srcLabInd++;
if (srcLabInd > sourceLabels.items.length - 1) {
srcLabInd = sourceLabels.items.length - 1;
break;
}
}
srcEndTime = sourceLabels.items[srcLabInd].time;
tgtEndTime = targetLabels.items[tgtLabInd].time;
tgtLabInd = StringUtils.findInMap(labelMap, srcLabInd);
time2 = MathUtils.linearMap(time1, srcStartTime, srcEndTime, tgtStartTime, tgtEndTime);
if (tgtLabInd >= 0 && sourceLabels.items[srcLabInd].phn.compareTo(targetLabels.items[tgtLabInd].phn) == 0) {
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining != null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[srcLabInd].phn, labelsToExcludeFromTraining);
tgtFrmInd = SignalProcUtils.time2frameIndex(time2, hdr2.winsize, hdr2.skipsize);
tgtFrmInd = Math.max(0, tgtFrmInd);
tgtFrmInd = Math.min(tgtFrmInd, hdr2.numfrm-1);
if (isLabelDesired) {
if (srcLabInd > 0)
srcStartTime = sourceLabels.items[srcLabInd - 1].time;
else
srcStartTime = 0.0;
im.files[0].indicesMap[count][0] = j;
im.files[0].indicesMap[count][1] = tgtFrmInd;
count++;
if (tgtLabInd > 0)
tgtStartTime = targetLabels.items[tgtLabInd - 1].time;
else
tgtStartTime = 0.0;
if (count>hdr1.numfrm-1)
break;
}
}
}
}
}
srcEndTime = sourceLabels.items[srcLabInd].time;
tgtEndTime = targetLabels.items[tgtLabInd].time;
return im;
}
time2 = MathUtils.linearMap(time1, srcStartTime, srcEndTime, tgtStartTime, tgtEndTime);
//Each frame is mapped as a group of frames, i.e. with frames on the left and right context
public static IndexMap mapFrameGroupsFeatures(String sourceLabelFile, String targetLabelFile,
String sourceFeatureFile, String targetFeatureFile,
int numNeighbours, int vocalTractFeature,
String[] labelsToExcludeFromTraining)
throws IOException
{
IndexMap im = null;
tgtFrmInd = SignalProcUtils.time2frameIndex(time2, hdr2.winsize, hdr2.skipsize);
tgtFrmInd = Math.max(0, tgtFrmInd);
tgtFrmInd = Math.min(tgtFrmInd, hdr2.numfrm - 1);
//Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
//Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
if (vocalTractFeature==BaselineFeatureExtractor.LSF_FEATURES)
{
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
}
else if (vocalTractFeature==BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES)
{
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
im.files[0].indicesMap[count][0] = j;
im.files[0].indicesMap[count][1] = tgtFrmInd;
count++;
if (hdr1!=null && hdr2!=null && sourceLabels.items!=null && targetLabels.items!=null)
{
//Find the optimum alignment between the source and the target labels since the phone sequences may not be identical due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
if (count > hdr1.numfrm - 1)
break;
}
}
}
}
}
if (labelMap!=null)
{
int j, srcLabInd, tgtLabInd, tgtFrmInd;
double time1, time2;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
return im;
}
srcLabInd = 0;
// Each frame is mapped as a group of frames, i.e. with frames on the left and right context
public static IndexMap mapFrameGroupsFeatures(String sourceLabelFile, String targetLabelFile, String sourceFeatureFile,
String targetFeatureFile, int numNeighbours, int vocalTractFeature, String[] labelsToExcludeFromTraining)
throws IOException {
IndexMap im = null;
//Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(hdr1.numfrm,4);
// Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
for (j=0; j<hdr1.numfrm; j++)
{
time1 = SignalProcUtils.frameIndex2Time(j, hdr1.winsize, hdr1.skipsize);
// Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
while (time1>sourceLabels.items[srcLabInd].time)
{
srcLabInd++;
if (srcLabInd>sourceLabels.items.length-1)
{
srcLabInd = sourceLabels.items.length-1;
break;
}
}
if (vocalTractFeature == BaselineFeatureExtractor.LSF_FEATURES) {
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
} else if (vocalTractFeature == BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES) {
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
tgtLabInd = StringUtils.findInMap(labelMap, srcLabInd);
if (hdr1 != null && hdr2 != null && sourceLabels.items != null && targetLabels.items != null) {
// Find the optimum alignment between the source and the target labels since the phone sequences may not be identical
// due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
if (tgtLabInd>=0 && sourceLabels.items[srcLabInd].phn.compareTo(targetLabels.items[tgtLabInd].phn)==0)
{
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining!=null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[srcLabInd].phn, labelsToExcludeFromTraining);
if (isLabelDesired)
{
if (srcLabInd>0)
srcStartTime = sourceLabels.items[srcLabInd-1].time;
else
srcStartTime = 0.0;
if (labelMap != null) {
int j, srcLabInd, tgtLabInd, tgtFrmInd;
double time1, time2;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
if (tgtLabInd>0)
tgtStartTime = targetLabels.items[tgtLabInd-1].time;
else
tgtStartTime = 0.0;
srcLabInd = 0;
srcEndTime = sourceLabels.items[srcLabInd].time;
tgtEndTime = targetLabels.items[tgtLabInd].time;
// Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(hdr1.numfrm, 4);
time2 = MathUtils.linearMap(time1, srcStartTime, srcEndTime, tgtStartTime, tgtEndTime);
for (j = 0; j < hdr1.numfrm; j++) {
time1 = SignalProcUtils.frameIndex2Time(j, hdr1.winsize, hdr1.skipsize);
tgtFrmInd = SignalProcUtils.time2frameIndex(time2, hdr2.winsize, hdr2.skipsize);
while (time1 > sourceLabels.items[srcLabInd].time) {
srcLabInd++;
if (srcLabInd > sourceLabels.items.length - 1) {
srcLabInd = sourceLabels.items.length - 1;
break;
}
}
im.files[0].indicesMap[count][0] = Math.max(0, j-numNeighbours);
im.files[0].indicesMap[count][1] = Math.min(j+numNeighbours, hdr1.numfrm-1);
im.files[0].indicesMap[count][2] = Math.max(0, tgtFrmInd-numNeighbours);
im.files[0].indicesMap[count][3] = Math.min(tgtFrmInd+numNeighbours, hdr2.numfrm-1);
count++;
tgtLabInd = StringUtils.findInMap(labelMap, srcLabInd);
if (count>hdr1.numfrm-1)
break;
}
}
}
}
}
if (tgtLabInd >= 0 && sourceLabels.items[srcLabInd].phn.compareTo(targetLabels.items[tgtLabInd].phn) == 0) {
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining != null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[srcLabInd].phn, labelsToExcludeFromTraining);
return im;
}
if (isLabelDesired) {
if (srcLabInd > 0)
srcStartTime = sourceLabels.items[srcLabInd - 1].time;
else
srcStartTime = 0.0;
public static IndexMap mapLabelsFeatures(String sourceLabelFile, String targetLabelFile,
String sourceFeatureFile, String targetFeatureFile,
int vocalTractFeature,
String[] labelsToExcludeFromTraining)
throws IOException
{
IndexMap im = null;
if (tgtLabInd > 0)
tgtStartTime = targetLabels.items[tgtLabInd - 1].time;
else
tgtStartTime = 0.0;
//Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
//Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
if (vocalTractFeature==BaselineFeatureExtractor.LSF_FEATURES)
{
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
}
else if (vocalTractFeature==BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES)
{
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
srcEndTime = sourceLabels.items[srcLabInd].time;
tgtEndTime = targetLabels.items[tgtLabInd].time;
if (hdr1!=null && hdr2!=null && sourceLabels.items!=null && targetLabels.items!=null)
{
//Find the optimum alignment between the source and the target labels since the phone sequences may not be identical due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
time2 = MathUtils.linearMap(time1, srcStartTime, srcEndTime, tgtStartTime, tgtEndTime);
if (labelMap!=null)
{
int j, tgtLabInd;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
tgtFrmInd = SignalProcUtils.time2frameIndex(time2, hdr2.winsize, hdr2.skipsize);
//Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(sourceLabels.items.length,4);
im.files[0].indicesMap[count][0] = Math.max(0, j - numNeighbours);
im.files[0].indicesMap[count][1] = Math.min(j + numNeighbours, hdr1.numfrm - 1);
im.files[0].indicesMap[count][2] = Math.max(0, tgtFrmInd - numNeighbours);
im.files[0].indicesMap[count][3] = Math.min(tgtFrmInd + numNeighbours, hdr2.numfrm - 1);
count++;
for (j=0; j<sourceLabels.items.length; j++)
{
if (j>0)
srcStartTime = sourceLabels.items[j-1].time;
else
srcStartTime = 0.0;
if (count > hdr1.numfrm - 1)
break;
}
}
}
}
}
tgtLabInd = StringUtils.findInMap(labelMap, j);
return im;
}
if (tgtLabInd>=0 && sourceLabels.items[j].phn.compareTo(targetLabels.items[tgtLabInd].phn)==0)
{
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining!=null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[j].phn, labelsToExcludeFromTraining);
public static IndexMap mapLabelsFeatures(String sourceLabelFile, String targetLabelFile, String sourceFeatureFile,
String targetFeatureFile, int vocalTractFeature, String[] labelsToExcludeFromTraining) throws IOException {
IndexMap im = null;
if (isLabelDesired)
{
if (tgtLabInd>0)
tgtStartTime = targetLabels.items[tgtLabInd-1].time;
else
tgtStartTime = 0.0;
// Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
srcEndTime = sourceLabels.items[j].time;
tgtEndTime = targetLabels.items[tgtLabInd].time;
// Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
im.files[0].indicesMap[count][0] = SignalProcUtils.time2frameIndex(srcStartTime, hdr1.winsize, hdr1.skipsize);
im.files[0].indicesMap[count][1] = SignalProcUtils.time2frameIndex(srcEndTime, hdr1.winsize, hdr1.skipsize);
im.files[0].indicesMap[count][2] = SignalProcUtils.time2frameIndex(tgtStartTime, hdr2.winsize, hdr2.skipsize);
im.files[0].indicesMap[count][3] = SignalProcUtils.time2frameIndex(tgtEndTime, hdr2.winsize, hdr2.skipsize);
if (vocalTractFeature == BaselineFeatureExtractor.LSF_FEATURES) {
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
} else if (vocalTractFeature == BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES) {
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
count++;
if (hdr1 != null && hdr2 != null && sourceLabels.items != null && targetLabels.items != null) {
// Find the optimum alignment between the source and the target labels since the phone sequences may not be identical
// due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
if (count>sourceLabels.items.length-1)
break;
}
}
}
}
}
if (labelMap != null) {
int j, tgtLabInd;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
return im;
}
// Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(sourceLabels.items.length, 4);
public static IndexMap mapLabelGroupsFeatures(String sourceLabelFile, String targetLabelFile,
String sourceFeatureFile, String targetFeatureFile,
int numNeighbours, int vocalTractFeature,
String[] labelsToExcludeFromTraining)
throws IOException
{
IndexMap im = null;
for (j = 0; j < sourceLabels.items.length; j++) {
if (j > 0)
srcStartTime = sourceLabels.items[j - 1].time;
else
srcStartTime = 0.0;
//Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
//Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
if (vocalTractFeature==BaselineFeatureExtractor.LSF_FEATURES)
{
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
}
else if (vocalTractFeature==BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES)
{
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
tgtLabInd = StringUtils.findInMap(labelMap, j);
if (hdr1!=null && hdr2!=null && sourceLabels.items!=null && targetLabels.items!=null)
{
//Find the optimum alignment between the source and the target labels since the phone sequences may not be identical due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
if (tgtLabInd >= 0 && sourceLabels.items[j].phn.compareTo(targetLabels.items[tgtLabInd].phn) == 0) {
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining != null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[j].phn, labelsToExcludeFromTraining);
if (labelMap!=null)
{
int j, tgtLabInd;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
if (isLabelDesired) {
if (tgtLabInd > 0)
tgtStartTime = targetLabels.items[tgtLabInd - 1].time;
else
tgtStartTime = 0.0;
//Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(sourceLabels.items.length,4);
srcEndTime = sourceLabels.items[j].time;
tgtEndTime = targetLabels.items[tgtLabInd].time;
for (j=0; j<sourceLabels.items.length; j++)
{
if (j-numNeighbours-1>=0)
srcStartTime = sourceLabels.items[j-numNeighbours-1].time;
else
srcStartTime = 0.0;
im.files[0].indicesMap[count][0] = SignalProcUtils.time2frameIndex(srcStartTime, hdr1.winsize,
hdr1.skipsize);
im.files[0].indicesMap[count][1] = SignalProcUtils.time2frameIndex(srcEndTime, hdr1.winsize,
hdr1.skipsize);
im.files[0].indicesMap[count][2] = SignalProcUtils.time2frameIndex(tgtStartTime, hdr2.winsize,
hdr2.skipsize);
im.files[0].indicesMap[count][3] = SignalProcUtils.time2frameIndex(tgtEndTime, hdr2.winsize,
hdr2.skipsize);
tgtLabInd = StringUtils.findInMap(labelMap, j);
count++;
if (tgtLabInd>=0 && sourceLabels.items[j].phn.compareTo(targetLabels.items[tgtLabInd].phn)==0)
{
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining!=null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[j].phn, labelsToExcludeFromTraining);
if (count > sourceLabels.items.length - 1)
break;
}
}
}
}
}
if (isLabelDesired)
{
if (tgtLabInd-numNeighbours-1>=0)
tgtStartTime = targetLabels.items[tgtLabInd-numNeighbours-1].time;
else
tgtStartTime = 0.0;
return im;
}
srcEndTime = sourceLabels.items[Math.min(j+numNeighbours, sourceLabels.items.length-1)].time;
tgtEndTime = targetLabels.items[Math.min(tgtLabInd+numNeighbours, targetLabels.items.length-1)].time;
public static IndexMap mapLabelGroupsFeatures(String sourceLabelFile, String targetLabelFile, String sourceFeatureFile,
String targetFeatureFile, int numNeighbours, int vocalTractFeature, String[] labelsToExcludeFromTraining)
throws IOException {
IndexMap im = null;
im.files[0].indicesMap[count][0] = SignalProcUtils.time2frameIndex(srcStartTime, hdr1.winsize, hdr1.skipsize);
im.files[0].indicesMap[count][1] = SignalProcUtils.time2frameIndex(srcEndTime, hdr1.winsize, hdr1.skipsize);
im.files[0].indicesMap[count][2] = SignalProcUtils.time2frameIndex(tgtStartTime, hdr2.winsize, hdr2.skipsize);
im.files[0].indicesMap[count][3] = SignalProcUtils.time2frameIndex(tgtEndTime, hdr2.winsize, hdr2.skipsize);
// Read label files
Labels sourceLabels = new Labels(sourceLabelFile);
Labels targetLabels = new Labels(targetLabelFile);
//
count++;
// Read feature file headers
FeatureFileHeader hdr1 = null;
FeatureFileHeader hdr2 = null;
if (count>sourceLabels.items.length-1)
break;
}
}
}
}
}
if (vocalTractFeature == BaselineFeatureExtractor.LSF_FEATURES) {
hdr1 = new LsfFileHeader(sourceFeatureFile);
hdr2 = new LsfFileHeader(targetFeatureFile);
} else if (vocalTractFeature == BaselineFeatureExtractor.MFCC_FEATURES_FROM_FILES) {
hdr1 = new MfccFileHeader(sourceFeatureFile);
hdr2 = new MfccFileHeader(targetFeatureFile);
}
//
return im;
}
public static IndexMap mapSpeechFeatures()
{
IndexMap im = new IndexMap(1);
im.files[0] = new FileMap(1,1);
im.files[0].indicesMap[0][0] = ALL_AVAILABLE_TRAINING_FRAMES;
return im;
}
if (hdr1 != null && hdr2 != null && sourceLabels.items != null && targetLabels.items != null) {
// Find the optimum alignment between the source and the target labels since the phone sequences may not be identical
// due to silence periods etc.
int[][] labelMap = AlignLabelsUtils.alignLabels(sourceLabels.items, targetLabels.items);
//
if (labelMap != null) {
int j, tgtLabInd;
double srcStartTime, srcEndTime, tgtStartTime, tgtEndTime;
// Find the corresponding target frame index for each source frame index
int count = 0;
im = new IndexMap(1);
im.files[0] = new FileMap(sourceLabels.items.length, 4);
for (j = 0; j < sourceLabels.items.length; j++) {
if (j - numNeighbours - 1 >= 0)
srcStartTime = sourceLabels.items[j - numNeighbours - 1].time;
else
srcStartTime = 0.0;
tgtLabInd = StringUtils.findInMap(labelMap, j);
if (tgtLabInd >= 0 && sourceLabels.items[j].phn.compareTo(targetLabels.items[tgtLabInd].phn) == 0) {
boolean isLabelDesired = true;
if (labelsToExcludeFromTraining != null)
isLabelDesired = !StringUtils.isOneOf(sourceLabels.items[j].phn, labelsToExcludeFromTraining);
if (isLabelDesired) {
if (tgtLabInd - numNeighbours - 1 >= 0)
tgtStartTime = targetLabels.items[tgtLabInd - numNeighbours - 1].time;
else
tgtStartTime = 0.0;
srcEndTime = sourceLabels.items[Math.min(j + numNeighbours, sourceLabels.items.length - 1)].time;
tgtEndTime = targetLabels.items[Math.min(tgtLabInd + numNeighbours, targetLabels.items.length - 1)].time;
im.files[0].indicesMap[count][0] = SignalProcUtils.time2frameIndex(srcStartTime, hdr1.winsize,
hdr1.skipsize);
im.files[0].indicesMap[count][1] = SignalProcUtils.time2frameIndex(srcEndTime, hdr1.winsize,
hdr1.skipsize);
im.files[0].indicesMap[count][2] = SignalProcUtils.time2frameIndex(tgtStartTime, hdr2.winsize,
hdr2.skipsize);
im.files[0].indicesMap[count][3] = SignalProcUtils.time2frameIndex(tgtEndTime, hdr2.winsize,
hdr2.skipsize);
count++;
if (count > sourceLabels.items.length - 1)
break;
}
}
}
}
}
return im;
}
public static IndexMap mapSpeechFeatures() {
IndexMap im = new IndexMap(1);
im.files[0] = new FileMap(1, 1);
im.files[0].indicesMap[0][0] = ALL_AVAILABLE_TRAINING_FRAMES;
return im;
}
}

Просмотреть файл

@ -22,115 +22,144 @@ package marytts.signalproc.adaptation;
import marytts.util.string.StringUtils;
/**
* This class keeps information on each specific training item
* For example, a training item for a sentence based voice conversion training database
* could be a wav file, the corresponding text transcription, label file, pitch contour file, etc.
* The training set is a collection of BaseTrainingItem objects
*
* This class keeps information on each specific training item For example, a training item for a sentence based voice conversion
* training database could be a wav file, the corresponding text transcription, label file, pitch contour file, etc. The training
* set is a collection of BaseTrainingItem objects
*
* @author Oytun T&uumlrk
*/
public class BaselineAdaptationItem {
//A decomposition of the file into its sinus+noise+transients+residual components
// audioFile = sinesFile+noiseFile+transientsFile+residualFile
public String sinesFile; //Sinusoids
public String noiseFile; //Noise
public String transientsFile; //Transients
public String residualFile; //Residual (what remains after all model based decomposition)
//
public String labelFile; //Labels
public String pitchFile; //f0 contour in binary format (.ptc)
public String f0File; //f0 contour in ESPS format (.f0)
public String pitchMarkFile; //Pitch marks
public String energyFile; //Energy contour
public String textFile; //Text
public String rawMfccFile; //Raw mel frequency cepstral coefficients
public String mfccFile; //Mel frequency cepstral coefficients
public String lsfFile; //Line spectral frequencies
public String lpcFile; //Linear prediction coefficients
public String lpResidualFile; //Time-domain residual waveform after LP inverse filtering
public String cepsFile; //Cepstrum coefficients file
public String eggFile; //Electro-glottograph file
//Mary TTS outputs to specify target features for tests, transplantation, etc
public String targetFestivalUttFile; //FESTIVAL_UTT output which contains target timing and f0s (also the labels)
// This needs to be mapped with actual labels (i.e. labelFile) and f0s (pitchFile) to
// obtain required prosody modification factors
public String targetLabelFile; //Target labels for mapping
public String targetPitchFile; //Target pitch file, to be used in transplantations
public String targetF0File; //Target pitch file, to be used in transplantations
public String targetEnergyFile; //Target energy file, to be used in transplantations
public String targetWavFile; //Target waveform file
//
public String audioFile; //Original waveform file
public BaselineAdaptationItem()
{
}
public BaselineAdaptationItem(BaselineAdaptationItem existing)
{
sinesFile = existing.sinesFile;
noiseFile = existing.noiseFile;
transientsFile = existing.transientsFile;
residualFile = existing.residualFile;
labelFile = existing.labelFile;
pitchFile = existing.pitchFile;
f0File = existing.f0File;
pitchMarkFile = existing.pitchMarkFile;
energyFile = existing.energyFile;
textFile = existing.textFile;
rawMfccFile = existing.rawMfccFile;
mfccFile = existing.mfccFile;
lsfFile = existing.lsfFile;
lpcFile = existing.lpcFile;
lpResidualFile = existing.lpResidualFile;
cepsFile = existing.cepsFile;
eggFile = existing.eggFile;
targetFestivalUttFile = existing.targetFestivalUttFile;
targetLabelFile = existing.targetLabelFile;
targetPitchFile = existing.targetPitchFile;
targetF0File = existing.targetF0File;
targetEnergyFile = existing.targetEnergyFile;
targetWavFile = existing.targetWavFile;
audioFile = existing.audioFile;
}
public void setFromWavFilename(String referenceFilename)
{
audioFile = referenceFilename;
sinesFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.SINUSOID_EXTENSION_DEFAULT); //Sinusoids
noiseFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.NOISE_EXTENSION_DEFAULT); //Noise
transientsFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TRANSIENT_EXTENSION_DEFAULT); //Transients
residualFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.RESIDUAL_EXTENSION_DEFAULT); //Residual (what remains after all model based decomposition)
labelFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LABEL_EXTENSION_DEFAULT); //Labels
pitchFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.PITCH_EXTENSION_DEFAULT); //f0 contour
f0File = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.F0_EXTENSION_DEFAULT); //f0 contour
pitchMarkFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.PITCHMARK_EXTENSION_DEFAULT); //Pitch marks
energyFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.ENERGY_EXTENSION_DEFAULT); //Energy contour
textFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TEXT_EXTENSION_DEFAULT); //Text
mfccFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.MFCC_EXTENSION_DEFAULT); //Mel frequency cepstral coefficients
rawMfccFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.RAWMFCC_EXTENSION_DEFAULT);
lsfFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LSF_EXTENSION_DEFAULT); //Line spectral frequencies
lpcFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LPC_EXTENSION_DEFAULT); //Linear prediction coefficients
lpResidualFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LPRESIDUAL_EXTENSION_DEFAULT); //Time-domain residual waveform after LP inverse filtering
cepsFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.CEPSTRUM_EXTENSION_DEFAULT); //Cepstrum coefficients file
eggFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.EGG_EXTENSION_DEFAULT); //Electro-glottograph file
targetFestivalUttFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETFESTIVALUTT_EXTENSION_DEFAULT); //FESTIVAL_UTT file
targetLabelFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETLABEL_EXTENSION_DEFAULT); //Target labels for mapping
targetPitchFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETPITCH_EXTENSION_DEFAULT); //Target pitch for copy synthesis
targetF0File = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETF0_EXTENSION_DEFAULT); //Target pitch for copy synthesis
targetEnergyFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETENERGY_EXTENSION_DEFAULT); //Target energy file, to be used in transplantations
targetWavFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETWAV_EXTENSION_DEFAULT); //Target waveform file
}
}
// A decomposition of the file into its sinus+noise+transients+residual components
// audioFile = sinesFile+noiseFile+transientsFile+residualFile
public String sinesFile; // Sinusoids
public String noiseFile; // Noise
public String transientsFile; // Transients
public String residualFile; // Residual (what remains after all model based decomposition)
//
public String labelFile; // Labels
public String pitchFile; // f0 contour in binary format (.ptc)
public String f0File; // f0 contour in ESPS format (.f0)
public String pitchMarkFile; // Pitch marks
public String energyFile; // Energy contour
public String textFile; // Text
public String rawMfccFile; // Raw mel frequency cepstral coefficients
public String mfccFile; // Mel frequency cepstral coefficients
public String lsfFile; // Line spectral frequencies
public String lpcFile; // Linear prediction coefficients
public String lpResidualFile; // Time-domain residual waveform after LP inverse filtering
public String cepsFile; // Cepstrum coefficients file
public String eggFile; // Electro-glottograph file
// Mary TTS outputs to specify target features for tests, transplantation, etc
public String targetFestivalUttFile; // FESTIVAL_UTT output which contains target timing and f0s (also the labels)
// This needs to be mapped with actual labels (i.e. labelFile) and f0s (pitchFile) to
// obtain required prosody modification factors
public String targetLabelFile; // Target labels for mapping
public String targetPitchFile; // Target pitch file, to be used in transplantations
public String targetF0File; // Target pitch file, to be used in transplantations
public String targetEnergyFile; // Target energy file, to be used in transplantations
public String targetWavFile; // Target waveform file
//
public String audioFile; // Original waveform file
public BaselineAdaptationItem() {
}
public BaselineAdaptationItem(BaselineAdaptationItem existing) {
sinesFile = existing.sinesFile;
noiseFile = existing.noiseFile;
transientsFile = existing.transientsFile;
residualFile = existing.residualFile;
labelFile = existing.labelFile;
pitchFile = existing.pitchFile;
f0File = existing.f0File;
pitchMarkFile = existing.pitchMarkFile;
energyFile = existing.energyFile;
textFile = existing.textFile;
rawMfccFile = existing.rawMfccFile;
mfccFile = existing.mfccFile;
lsfFile = existing.lsfFile;
lpcFile = existing.lpcFile;
lpResidualFile = existing.lpResidualFile;
cepsFile = existing.cepsFile;
eggFile = existing.eggFile;
targetFestivalUttFile = existing.targetFestivalUttFile;
targetLabelFile = existing.targetLabelFile;
targetPitchFile = existing.targetPitchFile;
targetF0File = existing.targetF0File;
targetEnergyFile = existing.targetEnergyFile;
targetWavFile = existing.targetWavFile;
audioFile = existing.audioFile;
}
public void setFromWavFilename(String referenceFilename) {
audioFile = referenceFilename;
sinesFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.SINUSOID_EXTENSION_DEFAULT); // Sinusoids
noiseFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.NOISE_EXTENSION_DEFAULT); // Noise
transientsFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TRANSIENT_EXTENSION_DEFAULT); // Transients
residualFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.RESIDUAL_EXTENSION_DEFAULT); // Residual (what
// remains
// after all
// model based
// decomposition)
labelFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LABEL_EXTENSION_DEFAULT); // Labels
pitchFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.PITCH_EXTENSION_DEFAULT); // f0 contour
f0File = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.F0_EXTENSION_DEFAULT); // f0 contour
pitchMarkFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.PITCHMARK_EXTENSION_DEFAULT); // Pitch marks
energyFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.ENERGY_EXTENSION_DEFAULT); // Energy contour
textFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TEXT_EXTENSION_DEFAULT); // Text
mfccFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.MFCC_EXTENSION_DEFAULT); // Mel frequency cepstral
// coefficients
rawMfccFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.RAWMFCC_EXTENSION_DEFAULT);
lsfFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LSF_EXTENSION_DEFAULT); // Line spectral
// frequencies
lpcFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LPC_EXTENSION_DEFAULT); // Linear prediction
// coefficients
lpResidualFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.LPRESIDUAL_EXTENSION_DEFAULT); // Time-domain
// residual
// waveform
// after
// LP
// inverse
// filtering
cepsFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.CEPSTRUM_EXTENSION_DEFAULT); // Cepstrum
// coefficients
// file
eggFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.EGG_EXTENSION_DEFAULT); // Electro-glottograph file
targetFestivalUttFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETFESTIVALUTT_EXTENSION_DEFAULT); // FESTIVAL_UTT
// file
targetLabelFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETLABEL_EXTENSION_DEFAULT); // Target
// labels
// for
// mapping
targetPitchFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETPITCH_EXTENSION_DEFAULT); // Target
// pitch
// for
// copy
// synthesis
targetF0File = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETF0_EXTENSION_DEFAULT); // Target pitch
// for copy
// synthesis
targetEnergyFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETENERGY_EXTENSION_DEFAULT); // Target
// energy
// file,
// to
// be
// used
// in
// transplantations
targetWavFile = StringUtils.modifyExtension(audioFile, BaselineAdaptationSet.TARGETWAV_EXTENSION_DEFAULT); // Target
// waveform
// file
}
}

Просмотреть файл

@ -27,288 +27,246 @@ import marytts.util.io.BasenameList;
* @author Oytun T&uumlrk
*/
public class BaselineAdaptationSet {
public BaselineAdaptationItem[] items;
public static final String WAV_EXTENSION_DEFAULT = ".wav";
public static final String SINUSOID_EXTENSION_DEFAULT = ".sin";
public static final String NOISE_EXTENSION_DEFAULT = ".noi";
public static final String TRANSIENT_EXTENSION_DEFAULT = ".tra";
public static final String RESIDUAL_EXTENSION_DEFAULT = ".res";
public static final String LABEL_EXTENSION_DEFAULT = ".lab";
public static final String PITCH_EXTENSION_DEFAULT = ".ptc";
public static final String F0_EXTENSION_DEFAULT = ".f0";
public static final String PITCHMARK_EXTENSION_DEFAULT = ".pm";
public static final String ENERGY_EXTENSION_DEFAULT = ".ene";
public static final String TEXT_EXTENSION_DEFAULT= ".txt";
public static final String RAWMFCC_EXTENSION_DEFAULT = ".mgc";
public static final String MFCC_EXTENSION_DEFAULT = ".mfc";
public static final String LSF_EXTENSION_DEFAULT = ".lsf";
public static final String LPC_EXTENSION_DEFAULT = ".lpc";
public static final String LPRESIDUAL_EXTENSION_DEFAULT = ".lpr";
public static final String CEPSTRUM_EXTENSION_DEFAULT = ".cep";
public static final String EGG_EXTENSION_DEFAULT = ".egg";
public static final String TARGETFESTIVALUTT_EXTENSION_DEFAULT = ".tutt";
public static final String TARGETLABEL_EXTENSION_DEFAULT = ".tlab";
public static final String TARGETPITCH_EXTENSION_DEFAULT = ".tptc";
public static final String TARGETF0_EXTENSION_DEFAULT = ".tf0";
public static final String TARGETENERGY_EXTENSION_DEFAULT = ".tene";
public static final String TARGETWAV_EXTENSION_DEFAULT = ".twav";
public BaselineAdaptationItem[] items;
public static final String WAV_EXTENSION_DEFAULT = ".wav";
public static final String SINUSOID_EXTENSION_DEFAULT = ".sin";
public static final String NOISE_EXTENSION_DEFAULT = ".noi";
public static final String TRANSIENT_EXTENSION_DEFAULT = ".tra";
public static final String RESIDUAL_EXTENSION_DEFAULT = ".res";
public static final String LABEL_EXTENSION_DEFAULT = ".lab";
public static final String PITCH_EXTENSION_DEFAULT = ".ptc";
public static final String F0_EXTENSION_DEFAULT = ".f0";
public static final String PITCHMARK_EXTENSION_DEFAULT = ".pm";
public static final String ENERGY_EXTENSION_DEFAULT = ".ene";
public static final String TEXT_EXTENSION_DEFAULT = ".txt";
public static final String RAWMFCC_EXTENSION_DEFAULT = ".mgc";
public static final String MFCC_EXTENSION_DEFAULT = ".mfc";
public static final String LSF_EXTENSION_DEFAULT = ".lsf";
public static final String LPC_EXTENSION_DEFAULT = ".lpc";
public static final String LPRESIDUAL_EXTENSION_DEFAULT = ".lpr";
public static final String CEPSTRUM_EXTENSION_DEFAULT = ".cep";
public static final String EGG_EXTENSION_DEFAULT = ".egg";
public static final String TARGETFESTIVALUTT_EXTENSION_DEFAULT = ".tutt";
public static final String TARGETLABEL_EXTENSION_DEFAULT = ".tlab";
public static final String TARGETPITCH_EXTENSION_DEFAULT = ".tptc";
public static final String TARGETF0_EXTENSION_DEFAULT = ".tf0";
public static final String TARGETENERGY_EXTENSION_DEFAULT = ".tene";
public static final String TARGETWAV_EXTENSION_DEFAULT = ".twav";
public BaselineAdaptationSet()
{
items = null;
}
public BaselineAdaptationSet() {
items = null;
}
public BaselineAdaptationSet(int numItems)
{
allocate(numItems);
}
public BaselineAdaptationSet(String folder)
{
this(folder, WAV_EXTENSION_DEFAULT);
}
public BaselineAdaptationSet(String folder, String referenceFileExt)
{
BasenameList b = new BasenameList(folder, referenceFileExt);
allocate(b.getListAsVector().size());
for (int i=0; i<items.length; i++)
items[i].setFromWavFilename(folder + b.getName(i) + referenceFileExt);
}
public void allocate(int numItems)
{
if (numItems>0)
{
items = new BaselineAdaptationItem[numItems];
for (int i=0; i<numItems; i++)
items[i] = new BaselineAdaptationItem();
}
else
items = null;
}
public String[] getLabelFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].labelFile;
}
return fileList;
}
public String [] getLsfFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].lsfFile;
}
return fileList;
}
public BaselineAdaptationSet(int numItems) {
allocate(numItems);
}
public String[] getAudioFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].audioFile;
}
return fileList;
}
public String [] getCepsFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].cepsFile;
}
return fileList;
}
public String[] getEggFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].eggFile;
}
return fileList;
}
public String [] getPitchFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].pitchFile;
}
return fileList;
}
public String[] getLpcFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].lpcFile;
}
return fileList;
}
public String [] getLpResidualFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].lpResidualFile;
}
return fileList;
}
public String[] getRawMfccFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].rawMfccFile;
}
return fileList;
}
public String[] getMfccFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].mfccFile;
}
return fileList;
}
public String [] getNoiseFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].noiseFile;
}
return fileList;
}
public String[] getPitchMarkFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].pitchMarkFile;
}
return fileList;
}
public String [] getResidualFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].residualFile;
}
return fileList;
}
public String[] getSinesFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].sinesFile;
}
return fileList;
}
public String [] getTextFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].textFile;
}
return fileList;
}
public String [] getTransientsFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].transientsFile;
}
return fileList;
}
public String [] getEnergyFiles()
{
String [] fileList = null;
if (items!=null && items.length>0)
{
fileList = new String[items.length];
for (int i=0; i<items.length; i++)
fileList[i] = items[i].energyFile;
}
return fileList;
}
public BaselineAdaptationSet(String folder) {
this(folder, WAV_EXTENSION_DEFAULT);
}
public BaselineAdaptationSet(String folder, String referenceFileExt) {
BasenameList b = new BasenameList(folder, referenceFileExt);
allocate(b.getListAsVector().size());
for (int i = 0; i < items.length; i++)
items[i].setFromWavFilename(folder + b.getName(i) + referenceFileExt);
}
public void allocate(int numItems) {
if (numItems > 0) {
items = new BaselineAdaptationItem[numItems];
for (int i = 0; i < numItems; i++)
items[i] = new BaselineAdaptationItem();
} else
items = null;
}
public String[] getLabelFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].labelFile;
}
return fileList;
}
public String[] getLsfFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].lsfFile;
}
return fileList;
}
public String[] getAudioFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].audioFile;
}
return fileList;
}
public String[] getCepsFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].cepsFile;
}
return fileList;
}
public String[] getEggFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].eggFile;
}
return fileList;
}
public String[] getPitchFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].pitchFile;
}
return fileList;
}
public String[] getLpcFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].lpcFile;
}
return fileList;
}
public String[] getLpResidualFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].lpResidualFile;
}
return fileList;
}
public String[] getRawMfccFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].rawMfccFile;
}
return fileList;
}
public String[] getMfccFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].mfccFile;
}
return fileList;
}
public String[] getNoiseFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].noiseFile;
}
return fileList;
}
public String[] getPitchMarkFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].pitchMarkFile;
}
return fileList;
}
public String[] getResidualFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].residualFile;
}
return fileList;
}
public String[] getSinesFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].sinesFile;
}
return fileList;
}
public String[] getTextFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].textFile;
}
return fileList;
}
public String[] getTransientsFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].transientsFile;
}
return fileList;
}
public String[] getEnergyFiles() {
String[] fileList = null;
if (items != null && items.length > 0) {
fileList = new String[items.length];
for (int i = 0; i < items.length; i++)
fileList[i] = items[i].energyFile;
}
return fileList;
}
}

Просмотреть файл

@ -21,11 +21,10 @@ package marytts.signalproc.adaptation;
/**
*
* Baseline class for indexed binary files of acoustic feature sets
*
* Baseline class for indexed binary files of acoustic feature sets
*
* @author Oytun T&uumlrk
*/
public class BaselineFeatureCollection {
}

Просмотреть файл

@ -36,7 +36,6 @@ import marytts.signalproc.analysis.PitchFileHeader;
import marytts.util.io.FileUtils;
import marytts.util.string.StringUtils;
/**
*
* Baseline class for acoustic feature analysis for voice conversion
@ -44,189 +43,174 @@ import marytts.util.string.StringUtils;
* @author Oytun T&uumlrk
*/
public class BaselineFeatureExtractor {
//Add more as necessary & make sure you can discriminate each using AND(&) operator
// from a single integer that represents desired analyses (See the function run())
public static final int NOT_DEFINED = Integer.parseInt("00000000", 2);
public static final int LSF_FEATURES = Integer.parseInt("00000001", 2);
public static final int F0_FEATURES = Integer.parseInt("00000010", 2);
public static final int ENERGY_FEATURES = Integer.parseInt("00000100", 2);
public static final int DURATION_FEATURES = Integer.parseInt("00001000", 2);
public static final int MFCC_FEATURES_FROM_FILES = Integer.parseInt("00010000", 2);
public BaselineFeatureExtractor()
{
this(null);
}
public BaselineFeatureExtractor(BaselineFeatureExtractor existing)
{
if (existing!=null)
{
//Copy class members if you add any
}
else
{
//Set default class member values
}
}
public void run(BaselineAdaptationSet fileSet, BaselineParams params, int desiredFeatures) throws IOException, UnsupportedAudioFileException
{
LsfFileHeader lsfParams = null;
if (params instanceof WeightedCodebookTrainerParams)
lsfParams = new LsfFileHeader(((WeightedCodebookTrainerParams)params).codebookHeader.lsfParams);
else if (params instanceof WeightedCodebookTransformerParams)
lsfParams = new LsfFileHeader(((WeightedCodebookTransformerParams)params).lsfParams);
else if (params instanceof JointGMMTransformerParams)
lsfParams = new LsfFileHeader(((JointGMMTransformerParams)params).lsfParams);
PitchFileHeader ptcParams = null;
if (params instanceof WeightedCodebookTrainerParams)
ptcParams = new PitchFileHeader(((WeightedCodebookTrainerParams)params).codebookHeader.ptcParams);
else if (params instanceof WeightedCodebookTransformerParams)
ptcParams = new PitchFileHeader(((WeightedCodebookTransformerParams)params).ptcParams);
else if (params instanceof JointGMMTransformerParams)
ptcParams = new PitchFileHeader(((JointGMMTransformerParams)params).ptcParams);
EnergyFileHeader energyParams = null;
if (params instanceof WeightedCodebookTrainerParams)
energyParams = new EnergyFileHeader(((WeightedCodebookTrainerParams)params).codebookHeader.energyParams);
else if (params instanceof WeightedCodebookTransformerParams)
energyParams = new EnergyFileHeader(((WeightedCodebookTransformerParams)params).energyParams);
else if (params instanceof JointGMMTransformerParams)
energyParams = new EnergyFileHeader(((JointGMMTransformerParams)params).energyParams);
MfccFileHeader mfccParams = null;
if (params instanceof WeightedCodebookTrainerParams)
mfccParams = new MfccFileHeader(((WeightedCodebookTrainerParams)params).codebookHeader.mfccParams);
else if (params instanceof WeightedCodebookTransformerParams)
mfccParams = new MfccFileHeader(((WeightedCodebookTransformerParams)params).mfccParams);
else if (params instanceof JointGMMTransformerParams)
mfccParams = new MfccFileHeader(((JointGMMTransformerParams)params).mfccParams);
boolean isForcedAnalysis = false;
if (params instanceof WeightedCodebookTrainerParams)
isForcedAnalysis = ((WeightedCodebookTrainerParams)params).isForcedAnalysis;
else if (params instanceof WeightedCodebookTransformerParams)
isForcedAnalysis = ((WeightedCodebookTransformerParams)params).isForcedAnalysis;
else if (params instanceof JointGMMTransformerParams)
isForcedAnalysis = ((JointGMMTransformerParams)params).isForcedAnalysis;
//ADD more analyses as necessary
if (StringUtils.isDesired(LSF_FEATURES, desiredFeatures))
lsfAnalysis(fileSet, lsfParams, isForcedAnalysis);
// Add more as necessary & make sure you can discriminate each using AND(&) operator
// from a single integer that represents desired analyses (See the function run())
public static final int NOT_DEFINED = Integer.parseInt("00000000", 2);
public static final int LSF_FEATURES = Integer.parseInt("00000001", 2);
public static final int F0_FEATURES = Integer.parseInt("00000010", 2);
public static final int ENERGY_FEATURES = Integer.parseInt("00000100", 2);
public static final int DURATION_FEATURES = Integer.parseInt("00001000", 2);
public static final int MFCC_FEATURES_FROM_FILES = Integer.parseInt("00010000", 2);
if (StringUtils.isDesired(F0_FEATURES, desiredFeatures))
f0Analysis(fileSet, ptcParams, isForcedAnalysis);
if (StringUtils.isDesired(ENERGY_FEATURES, desiredFeatures))
energyAnalysis(fileSet, energyParams, isForcedAnalysis);
if (StringUtils.isDesired(MFCC_FEATURES_FROM_FILES, desiredFeatures))
checkMfccFiles(fileSet, mfccParams, isForcedAnalysis);
//
}
public static void lsfAnalysis(BaselineAdaptationItem item, LsfFileHeader lsfParams, boolean isForcedAnalysis) throws IOException
{
BaselineAdaptationSet fileSet = new BaselineAdaptationSet(1);
fileSet.items[0] = new BaselineAdaptationItem(item);
lsfAnalysis(fileSet, lsfParams, isForcedAnalysis);
}
public static void lsfAnalysis(BaselineAdaptationSet fileSet, LsfFileHeader lsfParams, boolean isForcedAnalysis) throws IOException
{
System.err.println("Starting LSF analysis...");
boolean bAnalyze;
for (int i=0; i<fileSet.items.length; i++)
{
bAnalyze = true;
if (!isForcedAnalysis && FileUtils.exists(fileSet.items[i].lsfFile))
{
LsfFileHeader tmpParams = new LsfFileHeader(fileSet.items[i].lsfFile);
if (tmpParams.isIdenticalAnalysisParams(lsfParams))
bAnalyze = false;
}
if (bAnalyze)
{
LsfAnalyser.lsfAnalyzeWavFile(fileSet.items[i].audioFile, fileSet.items[i].lsfFile, lsfParams);
System.err.println("Extracted LSFs: " + fileSet.items[i].lsfFile);
}
else
System.err.println("LSF file found with identical analysis parameters: " + fileSet.items[i].lsfFile);
}
System.err.println("LSF analysis completed...");
}
public static void f0Analysis(BaselineAdaptationSet fileSet, PitchFileHeader ptcParams, boolean isForcedAnalysis) throws UnsupportedAudioFileException, IOException
{
System.err.println("Starting f0 analysis...");
boolean bAnalyze;
F0TrackerAutocorrelationHeuristic p = new F0TrackerAutocorrelationHeuristic(ptcParams);
for (int i=0; i<fileSet.items.length; i++)
{
bAnalyze = true;
if (!isForcedAnalysis && FileUtils.exists(fileSet.items[i].pitchFile)) //No f0 detection if ptc file already exists
bAnalyze = false;
if (bAnalyze)
{
p.pitchAnalyzeWavFile(fileSet.items[i].audioFile, fileSet.items[i].pitchFile);
System.err.println("Extracted f0 contour: " + fileSet.items[i].pitchFile);
}
else
System.err.println("F0 file found with identical analysis parameters: " + fileSet.items[i].pitchFile);
}
System.err.println("f0 analysis completed...");
}
public BaselineFeatureExtractor() {
this(null);
}
public static void energyAnalysis(BaselineAdaptationSet fileSet, EnergyFileHeader energyParams, boolean isForcedAnalysis) throws UnsupportedAudioFileException, IOException
{
System.err.println("Starting energy analysis...");
boolean bAnalyze;
EnergyContourRms e = null;
for (int i=0; i<fileSet.items.length; i++)
{
bAnalyze = true;
if (!isForcedAnalysis && FileUtils.exists(fileSet.items[i].energyFile)) //No f0 detection if ptc file already exists
bAnalyze = false;
if (bAnalyze)
{
e = new EnergyContourRms(fileSet.items[i].audioFile, fileSet.items[i].energyFile, energyParams.windowSizeInSeconds, energyParams.skipSizeInSeconds);
System.err.println("Extracted energy contour: " + fileSet.items[i].energyFile);
}
else
System.err.println("Energy file found with identical analysis parameters: " + fileSet.items[i].energyFile);
}
System.err.println("Energy analysis completed...");
}
public BaselineFeatureExtractor(BaselineFeatureExtractor existing) {
if (existing != null) {
// Copy class members if you add any
} else {
// Set default class member values
}
public static void checkMfccFiles(BaselineAdaptationSet fileSet, MfccFileHeader mfccParams, boolean isForcedAnalysis) throws IOException
{
System.err.println("Attempting to read MFCC parameters from files...");
for (int i=0; i<fileSet.items.length; i++)
{
if (!FileUtils.exists(fileSet.items[i].mfccFile))
System.err.println("MFCC files not found!Please use SPTK generated raw MFCC file named as " + fileSet.items[i].mfccFile);
}
System.err.println("MFCC files verified...");
}
}
public void run(BaselineAdaptationSet fileSet, BaselineParams params, int desiredFeatures) throws IOException,
UnsupportedAudioFileException {
LsfFileHeader lsfParams = null;
if (params instanceof WeightedCodebookTrainerParams)
lsfParams = new LsfFileHeader(((WeightedCodebookTrainerParams) params).codebookHeader.lsfParams);
else if (params instanceof WeightedCodebookTransformerParams)
lsfParams = new LsfFileHeader(((WeightedCodebookTransformerParams) params).lsfParams);
else if (params instanceof JointGMMTransformerParams)
lsfParams = new LsfFileHeader(((JointGMMTransformerParams) params).lsfParams);
PitchFileHeader ptcParams = null;
if (params instanceof WeightedCodebookTrainerParams)
ptcParams = new PitchFileHeader(((WeightedCodebookTrainerParams) params).codebookHeader.ptcParams);
else if (params instanceof WeightedCodebookTransformerParams)
ptcParams = new PitchFileHeader(((WeightedCodebookTransformerParams) params).ptcParams);
else if (params instanceof JointGMMTransformerParams)
ptcParams = new PitchFileHeader(((JointGMMTransformerParams) params).ptcParams);
EnergyFileHeader energyParams = null;
if (params instanceof WeightedCodebookTrainerParams)
energyParams = new EnergyFileHeader(((WeightedCodebookTrainerParams) params).codebookHeader.energyParams);
else if (params instanceof WeightedCodebookTransformerParams)
energyParams = new EnergyFileHeader(((WeightedCodebookTransformerParams) params).energyParams);
else if (params instanceof JointGMMTransformerParams)
energyParams = new EnergyFileHeader(((JointGMMTransformerParams) params).energyParams);
MfccFileHeader mfccParams = null;
if (params instanceof WeightedCodebookTrainerParams)
mfccParams = new MfccFileHeader(((WeightedCodebookTrainerParams) params).codebookHeader.mfccParams);
else if (params instanceof WeightedCodebookTransformerParams)
mfccParams = new MfccFileHeader(((WeightedCodebookTransformerParams) params).mfccParams);
else if (params instanceof JointGMMTransformerParams)
mfccParams = new MfccFileHeader(((JointGMMTransformerParams) params).mfccParams);
boolean isForcedAnalysis = false;
if (params instanceof WeightedCodebookTrainerParams)
isForcedAnalysis = ((WeightedCodebookTrainerParams) params).isForcedAnalysis;
else if (params instanceof WeightedCodebookTransformerParams)
isForcedAnalysis = ((WeightedCodebookTransformerParams) params).isForcedAnalysis;
else if (params instanceof JointGMMTransformerParams)
isForcedAnalysis = ((JointGMMTransformerParams) params).isForcedAnalysis;
// ADD more analyses as necessary
if (StringUtils.isDesired(LSF_FEATURES, desiredFeatures))
lsfAnalysis(fileSet, lsfParams, isForcedAnalysis);
if (StringUtils.isDesired(F0_FEATURES, desiredFeatures))
f0Analysis(fileSet, ptcParams, isForcedAnalysis);
if (StringUtils.isDesired(ENERGY_FEATURES, desiredFeatures))
energyAnalysis(fileSet, energyParams, isForcedAnalysis);
if (StringUtils.isDesired(MFCC_FEATURES_FROM_FILES, desiredFeatures))
checkMfccFiles(fileSet, mfccParams, isForcedAnalysis);
//
}
public static void lsfAnalysis(BaselineAdaptationItem item, LsfFileHeader lsfParams, boolean isForcedAnalysis)
throws IOException {
BaselineAdaptationSet fileSet = new BaselineAdaptationSet(1);
fileSet.items[0] = new BaselineAdaptationItem(item);
lsfAnalysis(fileSet, lsfParams, isForcedAnalysis);
}
public static void lsfAnalysis(BaselineAdaptationSet fileSet, LsfFileHeader lsfParams, boolean isForcedAnalysis)
throws IOException {
System.err.println("Starting LSF analysis...");
boolean bAnalyze;
for (int i = 0; i < fileSet.items.length; i++) {
bAnalyze = true;
if (!isForcedAnalysis && FileUtils.exists(fileSet.items[i].lsfFile)) {
LsfFileHeader tmpParams = new LsfFileHeader(fileSet.items[i].lsfFile);
if (tmpParams.isIdenticalAnalysisParams(lsfParams))
bAnalyze = false;
}
if (bAnalyze) {
LsfAnalyser.lsfAnalyzeWavFile(fileSet.items[i].audioFile, fileSet.items[i].lsfFile, lsfParams);
System.err.println("Extracted LSFs: " + fileSet.items[i].lsfFile);
} else
System.err.println("LSF file found with identical analysis parameters: " + fileSet.items[i].lsfFile);
}
System.err.println("LSF analysis completed...");
}
public static void f0Analysis(BaselineAdaptationSet fileSet, PitchFileHeader ptcParams, boolean isForcedAnalysis)
throws UnsupportedAudioFileException, IOException {
System.err.println("Starting f0 analysis...");
boolean bAnalyze;
F0TrackerAutocorrelationHeuristic p = new F0TrackerAutocorrelationHeuristic(ptcParams);
for (int i = 0; i < fileSet.items.length; i++) {
bAnalyze = true;
if (!isForcedAnalysis && FileUtils.exists(fileSet.items[i].pitchFile)) // No f0 detection if ptc file already exists
bAnalyze = false;
if (bAnalyze) {
p.pitchAnalyzeWavFile(fileSet.items[i].audioFile, fileSet.items[i].pitchFile);
System.err.println("Extracted f0 contour: " + fileSet.items[i].pitchFile);
} else
System.err.println("F0 file found with identical analysis parameters: " + fileSet.items[i].pitchFile);
}
System.err.println("f0 analysis completed...");
}
public static void energyAnalysis(BaselineAdaptationSet fileSet, EnergyFileHeader energyParams, boolean isForcedAnalysis)
throws UnsupportedAudioFileException, IOException {
System.err.println("Starting energy analysis...");
boolean bAnalyze;
EnergyContourRms e = null;
for (int i = 0; i < fileSet.items.length; i++) {
bAnalyze = true;
if (!isForcedAnalysis && FileUtils.exists(fileSet.items[i].energyFile)) // No f0 detection if ptc file already exists
bAnalyze = false;
if (bAnalyze) {
e = new EnergyContourRms(fileSet.items[i].audioFile, fileSet.items[i].energyFile,
energyParams.windowSizeInSeconds, energyParams.skipSizeInSeconds);
System.err.println("Extracted energy contour: " + fileSet.items[i].energyFile);
} else
System.err.println("Energy file found with identical analysis parameters: " + fileSet.items[i].energyFile);
}
System.err.println("Energy analysis completed...");
}
public static void checkMfccFiles(BaselineAdaptationSet fileSet, MfccFileHeader mfccParams, boolean isForcedAnalysis)
throws IOException {
System.err.println("Attempting to read MFCC parameters from files...");
for (int i = 0; i < fileSet.items.length; i++) {
if (!FileUtils.exists(fileSet.items[i].mfccFile))
System.err.println("MFCC files not found!Please use SPTK generated raw MFCC file named as "
+ fileSet.items[i].mfccFile);
}
System.err.println("MFCC files verified...");
}
}

Просмотреть файл

@ -25,35 +25,32 @@ import marytts.signalproc.analysis.PitchFileHeader;
/**
* Baseline class for voice conversion function binary file header
*
*
* @author Oytun T&uumlrk
*/
public class BaselineMappingFileHeader {
public String sourceTag; //Source name tag (i.e. style or speaker identity)
public String targetTag; //Target name tag (i.e. style or speaker identity)
public LsfFileHeader lsfParams;
public PitchFileHeader ptcParams;
public EnergyFileHeader energyParams;
public BaselineMappingFileHeader()
{
sourceTag = "source"; //Source name tag (i.e. style or speaker identity)
targetTag = "target"; //Target name tag (i.e. style or speaker identity)
public String sourceTag; // Source name tag (i.e. style or speaker identity)
public String targetTag; // Target name tag (i.e. style or speaker identity)
lsfParams = new LsfFileHeader();
ptcParams = new PitchFileHeader();
energyParams = new EnergyFileHeader();
}
public LsfFileHeader lsfParams;
public PitchFileHeader ptcParams;
public EnergyFileHeader energyParams;
public BaselineMappingFileHeader(BaselineMappingFileHeader existing)
{
sourceTag = existing.sourceTag;
targetTag = existing.targetTag;
public BaselineMappingFileHeader() {
sourceTag = "source"; // Source name tag (i.e. style or speaker identity)
targetTag = "target"; // Target name tag (i.e. style or speaker identity)
lsfParams = new LsfFileHeader(existing.lsfParams);
ptcParams = new PitchFileHeader(existing.ptcParams);
energyParams = new EnergyFileHeader(existing.energyParams);
}
lsfParams = new LsfFileHeader();
ptcParams = new PitchFileHeader();
energyParams = new EnergyFileHeader();
}
public BaselineMappingFileHeader(BaselineMappingFileHeader existing) {
sourceTag = existing.sourceTag;
targetTag = existing.targetTag;
lsfParams = new LsfFileHeader(existing.lsfParams);
ptcParams = new PitchFileHeader(existing.ptcParams);
energyParams = new EnergyFileHeader(existing.energyParams);
}
}

Просмотреть файл

@ -20,21 +20,17 @@
package marytts.signalproc.adaptation;
/**
* Baseline voice conversion parameters.
* Both training and transformation parameters should be derived from this class.
* Baseline voice conversion parameters. Both training and transformation parameters should be derived from this class.
*
* @author Oytun T&uumlrk
*/
public class BaselineParams {
public BaselineParams()
{
}
public BaselineParams(BaselineParams existing)
{
}
public BaselineParams() {
}
public BaselineParams(BaselineParams existing) {
}
}

Просмотреть файл

@ -25,27 +25,20 @@ package marytts.signalproc.adaptation;
* @author Oytun T&uumlrk
*/
public class BaselinePostprocessor {
public BaselinePostprocessor()
{
this(null);
}
public BaselinePostprocessor(BaselinePostprocessor existing)
{
if (existing!=null)
{
//Copy class members if you add any
}
else
{
//Set default class member values
}
}
public void run(BaselineAdaptationSet trainingSet)
{
}
public BaselinePostprocessor() {
this(null);
}
public BaselinePostprocessor(BaselinePostprocessor existing) {
if (existing != null) {
// Copy class members if you add any
} else {
// Set default class member values
}
}
public void run(BaselineAdaptationSet trainingSet) {
}
}

Просмотреть файл

@ -21,31 +21,24 @@ package marytts.signalproc.adaptation;
/**
* Baseline class for pre-processing operations after voice conversion
*
*
* @author Oytun T&uumlrk
*/
public class BaselinePreprocessor {
public BaselinePreprocessor()
{
this(null);
}
public BaselinePreprocessor(BaselinePreprocessor existing)
{
if (existing!=null)
{
//Copy class members if you add any
}
else
{
//Set default class member values
}
}
public void run(BaselineAdaptationSet trainingSet)
{
}
}
public BaselinePreprocessor() {
this(null);
}
public BaselinePreprocessor(BaselinePreprocessor existing) {
if (existing != null) {
// Copy class members if you add any
} else {
// Set default class member values
}
}
public void run(BaselineAdaptationSet trainingSet) {
}
}

Просмотреть файл

@ -19,47 +19,39 @@
*/
package marytts.signalproc.adaptation;
/**
* Baseline class for voice conversion training
*
*
* @author Oytun T&uumlrk
*/
public class BaselineTrainer {
public BaselinePreprocessor preprocessor;
public BaselineFeatureExtractor featureExtractor;
public BaselineTrainer(BaselinePreprocessor pp,
BaselineFeatureExtractor fe)
{
preprocessor = new BaselinePreprocessor(pp);
featureExtractor = new BaselineFeatureExtractor(fe);
}
//This baseline version does nothing. Please implement functionality in derived classes.
public boolean checkParams()
{
return true;
}
//This baseline version just returns identical target indices for each source entry
//Note that the returned map contains smallest number of items in source and target training sets
public int[] getIndexedMapping(BaselineAdaptationSet sourceTrainingSet, BaselineAdaptationSet targetTrainingSet)
{
int[] map = null;
int numItems = Math.min(sourceTrainingSet.items.length, targetTrainingSet.items.length);
if (numItems>0)
{
map = new int[numItems];
int i;
for (i=0; i<numItems; i++)
map[i] = i;
}
return map;
}
}
public BaselinePreprocessor preprocessor;
public BaselineFeatureExtractor featureExtractor;
public BaselineTrainer(BaselinePreprocessor pp, BaselineFeatureExtractor fe) {
preprocessor = new BaselinePreprocessor(pp);
featureExtractor = new BaselineFeatureExtractor(fe);
}
// This baseline version does nothing. Please implement functionality in derived classes.
public boolean checkParams() {
return true;
}
// This baseline version just returns identical target indices for each source entry
// Note that the returned map contains smallest number of items in source and target training sets
public int[] getIndexedMapping(BaselineAdaptationSet sourceTrainingSet, BaselineAdaptationSet targetTrainingSet) {
int[] map = null;
int numItems = Math.min(sourceTrainingSet.items.length, targetTrainingSet.items.length);
if (numItems > 0) {
map = new int[numItems];
int i;
for (i = 0; i < numItems; i++)
map[i] = i;
}
return map;
}
}

Просмотреть файл

@ -21,9 +21,8 @@ package marytts.signalproc.adaptation;
/**
* @author Oytun T&uumlrk
*
*
*/
public class BaselineTrainerParams extends BaselineParams {
}

Просмотреть файл

@ -25,80 +25,63 @@ import javax.sound.sampled.UnsupportedAudioFileException;
import marytts.signalproc.adaptation.prosody.PitchTransformationData;
/**
* Baseline class for voice conversion transformation
*
* @author Oytun T&uumlrk
*/
public class BaselineTransformer {
public BaselinePreprocessor preprocessor;
public BaselineFeatureExtractor featureExtractor;
public BaselinePostprocessor postprocessor;
BaselineTransformerParams params;
public BaselineTransformer(BaselinePreprocessor pp,
BaselineFeatureExtractor fe,
BaselinePostprocessor po,
BaselineTransformerParams pa)
{
preprocessor = new BaselinePreprocessor(pp);
featureExtractor = new BaselineFeatureExtractor(fe);
postprocessor = new BaselinePostprocessor(po);
params = new BaselineTransformerParams(pa);
}
//Baseline version does nothing, override in derived classes
public boolean checkParams() throws IOException
{
return true;
}
//Baseline version does nothing, override in derived classes
public void transform(BaselineAdaptationSet inputSet, BaselineAdaptationSet outputSet) throws UnsupportedAudioFileException
{
}
public static void transformOneItem(BaselineAdaptationItem inputItem,
BaselineAdaptationItem outputItem,
BaselineTransformerParams tfmParams,
VocalTractTransformationFunction vttFunction,
VocalTractTransformationData vtData,
PitchTransformationData pMap
) throws UnsupportedAudioFileException, IOException
{
}
public static boolean isScalingsRequired(double[] pscales, double[] tscales, double[] escales, double[] vscales)
{
int i;
for (i=0; i<pscales.length; i++)
{
if (pscales[i]!=1.0)
return true;
}
for (i=0; i<tscales.length; i++)
{
if (tscales[i]!=1.0)
return true;
}
for (i=0; i<escales.length; i++)
{
if (escales[i]!=1.0)
return true;
}
for (i=0; i<vscales.length; i++)
{
if (vscales[i]!=1.0)
return true;
}
return false;
}
}
public BaselinePreprocessor preprocessor;
public BaselineFeatureExtractor featureExtractor;
public BaselinePostprocessor postprocessor;
BaselineTransformerParams params;
public BaselineTransformer(BaselinePreprocessor pp, BaselineFeatureExtractor fe, BaselinePostprocessor po,
BaselineTransformerParams pa) {
preprocessor = new BaselinePreprocessor(pp);
featureExtractor = new BaselineFeatureExtractor(fe);
postprocessor = new BaselinePostprocessor(po);
params = new BaselineTransformerParams(pa);
}
// Baseline version does nothing, override in derived classes
public boolean checkParams() throws IOException {
return true;
}
// Baseline version does nothing, override in derived classes
public void transform(BaselineAdaptationSet inputSet, BaselineAdaptationSet outputSet) throws UnsupportedAudioFileException {
}
public static void transformOneItem(BaselineAdaptationItem inputItem, BaselineAdaptationItem outputItem,
BaselineTransformerParams tfmParams, VocalTractTransformationFunction vttFunction,
VocalTractTransformationData vtData, PitchTransformationData pMap) throws UnsupportedAudioFileException, IOException {
}
public static boolean isScalingsRequired(double[] pscales, double[] tscales, double[] escales, double[] vscales) {
int i;
for (i = 0; i < pscales.length; i++) {
if (pscales[i] != 1.0)
return true;
}
for (i = 0; i < tscales.length; i++) {
if (tscales[i] != 1.0)
return true;
}
for (i = 0; i < escales.length; i++) {
if (escales[i] != 1.0)
return true;
}
for (i = 0; i < vscales.length; i++) {
if (vscales[i] != 1.0)
return true;
}
return false;
}
}

Просмотреть файл

@ -27,154 +27,151 @@ import marytts.signalproc.analysis.MfccFileHeader;
import marytts.signalproc.analysis.PitchFileHeader;
/**
* Baseline class for voice conversion transformation parameters
* All specific implementations of transformation stage of a given voice conversion algorithm should use
* a parameter set that is derived from this class
*
* Baseline class for voice conversion transformation parameters All specific implementations of transformation stage of a given
* voice conversion algorithm should use a parameter set that is derived from this class
*
* @author Oytun T&uumlrk
*/
public class BaselineTransformerParams extends BaselineParams {
public String inputFolder; //Folder of input files to be transformed
public String outputBaseFolder; //Base folder of output files
public String outputFolder; //Individual folder of output files (Note that this is automatically generated using parameter values)
public String outputFolderInfoString; //An information string to be appended as a prefix to the output folder
public boolean isSourceToTarget; //if true source is transformed to target, else target is transformed to source
public boolean isDisplayProcessingFrameCount; //Display processed frame indices while transforming?
public ProsodyTransformerParams prosodyParams;
public LsfFileHeader lsfParams;
public PitchFileHeader ptcParams;
public EnergyFileHeader energyParams;
public MfccFileHeader mfccParams;
public boolean isForcedAnalysis;
public boolean isVocalTractTransformation;
public String inputFolder; // Folder of input files to be transformed
public String outputBaseFolder; // Base folder of output files
public String outputFolder; // Individual folder of output files (Note that this is automatically generated using parameter
// values)
public String outputFolderInfoString; // An information string to be appended as a prefix to the output folder
public boolean isSourceToTarget; // if true source is transformed to target, else target is transformed to source
public boolean isDisplayProcessingFrameCount; // Display processed frame indices while transforming?
public boolean isSeparateProsody;
public boolean isSaveVocalTractOnlyVersion;
public boolean isFixedRateVocalTractConversion;
public boolean isTemporalSmoothing;
public int smoothingMethod;
public int smoothingNumNeighbours;
public int smoothingState;
public String smoothedVocalTractFile;
public boolean isSourceVocalTractSpectrumFromModel;
public boolean isResynthesizeVocalTractFromSourceModel;
public boolean isVocalTractMatchUsingTargetModel;
public boolean isLsfsFromTargetFile;
public String pitchMappingFile;
//For copy-paste prosody
public boolean isPitchFromTargetFile;
public int pitchFromTargetMethod;
public boolean isDurationFromTargetFile;
public int durationFromTargetMethod;
public boolean isEnergyFromTargetFile;
public int targetAlignmentFileType;
public static final int LABELS = 1;
public static final int FESTIVAL_UTT = 2;
//
public static final double MINIMUM_ALLOWED_PITCH_SCALE = 0.6;
public static final double MAXIMUM_ALLOWED_PITCH_SCALE = 2.5;
public static final double MINIMUM_ALLOWED_TIME_SCALE = 0.6;
public static final double MAXIMUM_ALLOWED_TIME_SCALE = 2.5;
public ProsodyTransformerParams prosodyParams;
public BaselineTransformerParams()
{
inputFolder = "";
outputBaseFolder = "";
outputFolder = "";
outputFolderInfoString = "";
isSourceToTarget = true;
isDisplayProcessingFrameCount = false;
prosodyParams = new ProsodyTransformerParams();
lsfParams = new LsfFileHeader();
ptcParams = new PitchFileHeader();
energyParams = new EnergyFileHeader();
mfccParams = new MfccFileHeader();
isForcedAnalysis = false;
isSourceVocalTractSpectrumFromModel = true;
isVocalTractTransformation = true;
isResynthesizeVocalTractFromSourceModel = false;
isVocalTractMatchUsingTargetModel = false;
isSeparateProsody = true;
isSaveVocalTractOnlyVersion = true;
isFixedRateVocalTractConversion = true;
isTemporalSmoothing = false;
smoothingMethod = SmoothingDefinitions.NO_SMOOTHING;
smoothingNumNeighbours = SmoothingDefinitions.DEFAULT_NUM_NEIGHBOURS;
smoothingState = SmoothingDefinitions.NONE;
smoothedVocalTractFile = "";
isSourceVocalTractSpectrumFromModel = false;
isResynthesizeVocalTractFromSourceModel = false;
isVocalTractMatchUsingTargetModel = false;
pitchMappingFile = "";
isPitchFromTargetFile = false;
pitchFromTargetMethod = ProsodyTransformerParams.SENTENCE_MEAN_STDDEV;
isDurationFromTargetFile = false;
durationFromTargetMethod = ProsodyTransformerParams.SENTENCE_DURATION;
isEnergyFromTargetFile = false;
isLsfsFromTargetFile = false;
targetAlignmentFileType = BaselineTransformerParams.LABELS;
}
public BaselineTransformerParams(BaselineTransformerParams existing)
{
inputFolder = existing.inputFolder;
outputBaseFolder = existing.outputBaseFolder;
outputFolder = existing.outputFolder;
outputFolderInfoString = existing.outputFolderInfoString;
isSourceToTarget = existing.isSourceToTarget;
isDisplayProcessingFrameCount = existing.isDisplayProcessingFrameCount;
prosodyParams = new ProsodyTransformerParams(existing.prosodyParams);
lsfParams = new LsfFileHeader(existing.lsfParams);
ptcParams = new PitchFileHeader(existing.ptcParams);
energyParams = new EnergyFileHeader(existing.energyParams);
mfccParams = new MfccFileHeader(existing.mfccParams);
isForcedAnalysis = existing.isForcedAnalysis;
isVocalTractTransformation = existing.isVocalTractTransformation;
isSeparateProsody = existing.isSeparateProsody;
isSaveVocalTractOnlyVersion = existing.isSaveVocalTractOnlyVersion;
isFixedRateVocalTractConversion = existing.isFixedRateVocalTractConversion;
isTemporalSmoothing = existing.isTemporalSmoothing;
smoothingMethod = existing.smoothingMethod;
smoothingNumNeighbours = existing.smoothingNumNeighbours;
smoothingState = existing.smoothingState;
smoothedVocalTractFile = existing.smoothedVocalTractFile;
isSourceVocalTractSpectrumFromModel = existing.isSourceVocalTractSpectrumFromModel;
isResynthesizeVocalTractFromSourceModel = existing.isResynthesizeVocalTractFromSourceModel;
isVocalTractMatchUsingTargetModel = existing.isVocalTractMatchUsingTargetModel;
pitchMappingFile = existing.pitchMappingFile;
isPitchFromTargetFile = existing.isPitchFromTargetFile;
pitchFromTargetMethod = existing.pitchFromTargetMethod;
isDurationFromTargetFile = existing.isDurationFromTargetFile;
durationFromTargetMethod = existing.durationFromTargetMethod;
isEnergyFromTargetFile = existing.isEnergyFromTargetFile;
isLsfsFromTargetFile = existing.isLsfsFromTargetFile;
targetAlignmentFileType = existing.targetAlignmentFileType;
}
public LsfFileHeader lsfParams;
public PitchFileHeader ptcParams;
public EnergyFileHeader energyParams;
public MfccFileHeader mfccParams;
public boolean isForcedAnalysis;
public boolean isVocalTractTransformation;
public boolean isSeparateProsody;
public boolean isSaveVocalTractOnlyVersion;
public boolean isFixedRateVocalTractConversion;
public boolean isTemporalSmoothing;
public int smoothingMethod;
public int smoothingNumNeighbours;
public int smoothingState;
public String smoothedVocalTractFile;
public boolean isSourceVocalTractSpectrumFromModel;
public boolean isResynthesizeVocalTractFromSourceModel;
public boolean isVocalTractMatchUsingTargetModel;
public boolean isLsfsFromTargetFile;
public String pitchMappingFile;
// For copy-paste prosody
public boolean isPitchFromTargetFile;
public int pitchFromTargetMethod;
public boolean isDurationFromTargetFile;
public int durationFromTargetMethod;
public boolean isEnergyFromTargetFile;
public int targetAlignmentFileType;
public static final int LABELS = 1;
public static final int FESTIVAL_UTT = 2;
//
public static final double MINIMUM_ALLOWED_PITCH_SCALE = 0.6;
public static final double MAXIMUM_ALLOWED_PITCH_SCALE = 2.5;
public static final double MINIMUM_ALLOWED_TIME_SCALE = 0.6;
public static final double MAXIMUM_ALLOWED_TIME_SCALE = 2.5;
public BaselineTransformerParams() {
inputFolder = "";
outputBaseFolder = "";
outputFolder = "";
outputFolderInfoString = "";
isSourceToTarget = true;
isDisplayProcessingFrameCount = false;
prosodyParams = new ProsodyTransformerParams();
lsfParams = new LsfFileHeader();
ptcParams = new PitchFileHeader();
energyParams = new EnergyFileHeader();
mfccParams = new MfccFileHeader();
isForcedAnalysis = false;
isSourceVocalTractSpectrumFromModel = true;
isVocalTractTransformation = true;
isResynthesizeVocalTractFromSourceModel = false;
isVocalTractMatchUsingTargetModel = false;
isSeparateProsody = true;
isSaveVocalTractOnlyVersion = true;
isFixedRateVocalTractConversion = true;
isTemporalSmoothing = false;
smoothingMethod = SmoothingDefinitions.NO_SMOOTHING;
smoothingNumNeighbours = SmoothingDefinitions.DEFAULT_NUM_NEIGHBOURS;
smoothingState = SmoothingDefinitions.NONE;
smoothedVocalTractFile = "";
isSourceVocalTractSpectrumFromModel = false;
isResynthesizeVocalTractFromSourceModel = false;
isVocalTractMatchUsingTargetModel = false;
pitchMappingFile = "";
isPitchFromTargetFile = false;
pitchFromTargetMethod = ProsodyTransformerParams.SENTENCE_MEAN_STDDEV;
isDurationFromTargetFile = false;
durationFromTargetMethod = ProsodyTransformerParams.SENTENCE_DURATION;
isEnergyFromTargetFile = false;
isLsfsFromTargetFile = false;
targetAlignmentFileType = BaselineTransformerParams.LABELS;
}
public BaselineTransformerParams(BaselineTransformerParams existing) {
inputFolder = existing.inputFolder;
outputBaseFolder = existing.outputBaseFolder;
outputFolder = existing.outputFolder;
outputFolderInfoString = existing.outputFolderInfoString;
isSourceToTarget = existing.isSourceToTarget;
isDisplayProcessingFrameCount = existing.isDisplayProcessingFrameCount;
prosodyParams = new ProsodyTransformerParams(existing.prosodyParams);
lsfParams = new LsfFileHeader(existing.lsfParams);
ptcParams = new PitchFileHeader(existing.ptcParams);
energyParams = new EnergyFileHeader(existing.energyParams);
mfccParams = new MfccFileHeader(existing.mfccParams);
isForcedAnalysis = existing.isForcedAnalysis;
isVocalTractTransformation = existing.isVocalTractTransformation;
isSeparateProsody = existing.isSeparateProsody;
isSaveVocalTractOnlyVersion = existing.isSaveVocalTractOnlyVersion;
isFixedRateVocalTractConversion = existing.isFixedRateVocalTractConversion;
isTemporalSmoothing = existing.isTemporalSmoothing;
smoothingMethod = existing.smoothingMethod;
smoothingNumNeighbours = existing.smoothingNumNeighbours;
smoothingState = existing.smoothingState;
smoothedVocalTractFile = existing.smoothedVocalTractFile;
isSourceVocalTractSpectrumFromModel = existing.isSourceVocalTractSpectrumFromModel;
isResynthesizeVocalTractFromSourceModel = existing.isResynthesizeVocalTractFromSourceModel;
isVocalTractMatchUsingTargetModel = existing.isVocalTractMatchUsingTargetModel;
pitchMappingFile = existing.pitchMappingFile;
isPitchFromTargetFile = existing.isPitchFromTargetFile;
pitchFromTargetMethod = existing.pitchFromTargetMethod;
isDurationFromTargetFile = existing.isDurationFromTargetFile;
durationFromTargetMethod = existing.durationFromTargetMethod;
isEnergyFromTargetFile = existing.isEnergyFromTargetFile;
isLsfsFromTargetFile = existing.isLsfsFromTargetFile;
targetAlignmentFileType = existing.targetAlignmentFileType;
}
}

Просмотреть файл

@ -23,227 +23,199 @@ import marytts.signalproc.analysis.Label;
import marytts.signalproc.analysis.Labels;
import marytts.util.string.StringUtils;
/**
* A wrapper class for representing phonetic context
*
* @author Oytun T&uumlrk
*/
public class Context {
public int numLeftNeighbours;
public int numRightNeighbours;
public String allContext;
public String[] leftContexts;
public String currentContext;
public String[] rightContexts;
private double[] scores;
public static final char leftContextSeparator = '.';
public static final char rightContextSeparator = ',';
public static final double LOWEST_CONTEXT_SCORE = 1.0;
public Context(Context existing)
{
numLeftNeighbours = existing.numLeftNeighbours;
numRightNeighbours = existing.numRightNeighbours;
allContext = existing.allContext;
setLeftContext(existing.leftContexts);
currentContext = existing.currentContext;
setRightContext(existing.rightContexts);
setScores();
setAllContext();
}
//Create a full context entry using a concatenated allContext entry
public Context(String allContextIn)
{
allContext = allContextIn;
parseAllContext();
}
public Context(Labels labels, int currentLabelIndex, int totalNeighbours)
{
this(labels, currentLabelIndex, totalNeighbours, totalNeighbours);
}
// leftContexts[0] = labels.items[currentLabelIndex-totalLeftNeighbours].phn
// leftContexts[1] = labels.items[currentLabelIndex-totalLeftNeighbours+1].phn
// ...
// leftContexts[totalLeftNeighbours-2] = labels.items[currentLabelIndex-2].phn
// leftContexts[totalLeftNeighbours-1] = labels.items[currentLabelIndex-1].phn
// currentContext = labels.items[currentLabelIndex].phn
// rightContexts[0] = labels.items[currentLabelIndex+1].phn
// rightContexts[1] = labels.items[currentLabelIndex+2].phn
// ...
// rightContexts[totalRightNeighbours-2] = labels.items[currentLabelIndex+totalRightNeighbours-1].phn
// rightContexts[totalRightNeighbours-1] = labels.items[currentLabelIndex+totalRightNeighbours].phn
//
// Note that non-existing context entries are represented by ""
public Context(Labels labels, int currentLabelIndex, int totalLeftNeighbours, int totalRightNeighbours)
{
leftContexts = null;
rightContexts = null;
currentContext = "";
int i;
if (totalLeftNeighbours>0)
{
leftContexts = new String[totalLeftNeighbours];
for (i=totalLeftNeighbours; i>0; i--)
{
if (labels!=null && currentLabelIndex-i>=0)
leftContexts[totalLeftNeighbours-i] = labels.items[currentLabelIndex-i].phn;
else
leftContexts[totalLeftNeighbours-i] = "";
}
}
currentContext = labels.items[currentLabelIndex].phn;
if (totalRightNeighbours>0)
{
rightContexts = new String[totalRightNeighbours];
for (i=0; i<totalRightNeighbours; i++)
{
if (labels!=null && currentLabelIndex+i+1<labels.items.length)
rightContexts[i] = labels.items[currentLabelIndex+i+1].phn;
else
rightContexts[i] = "";
}
}
setScores();
setAllContext();
}
public void setLeftContext(String[] leftContextIn)
{
leftContexts = null;
if (leftContextIn!=null)
{
leftContexts = new String[leftContextIn.length];
System.arraycopy(leftContextIn, 0, leftContexts, 0, leftContexts.length);
}
}
public void setRightContext(String[] rightContextIn)
{
rightContexts = null;
if (rightContextIn!=null)
{
rightContexts = new String[rightContextIn.length];
System.arraycopy(rightContextIn, 0, rightContexts, 0, rightContexts.length);
}
}
public void setScores()
{
int maxContext = 0;
if (leftContexts!=null)
maxContext = leftContexts.length;
if (rightContexts!=null)
maxContext = Math.max(maxContext, rightContexts.length);
scores = new double[maxContext+1];
double tmpSum = LOWEST_CONTEXT_SCORE;
for (int i=0; i<maxContext+1; i++)
{
scores[i] = tmpSum;
tmpSum = 2*tmpSum+1;
}
}
public double[] getPossibleScores()
{
double[] possibleScores = null;
if (scores!=null)
{
possibleScores = new double[2*(scores.length-1)+1];
double tmpSum = 0.0;
for (int i=0; i<scores.length-2; i++)
{
possibleScores[2*i] = tmpSum + scores[i];
possibleScores[2*i+1] = tmpSum + 2*scores[i];
tmpSum += 2*scores[i];
}
possibleScores[2*(scores.length-1)] = tmpSum + scores[scores.length-1];
}
return possibleScores;
}
//allContext = L[0].L[1]...L[N-2].L[N-1].C,R[0],R[1],R[2],...,R[N-1]
// where L:leftContexts, C:currentContext, R:rightContexts, and "." "," are the left and rightContextSeparators respectively
public void setAllContext()
{
allContext = "";
int i;
for (i=0; i<leftContexts.length; i++)
allContext += leftContexts[i] + leftContextSeparator;
allContext += currentContext + rightContextSeparator;
for (i=0; i<rightContexts.length-1; i++)
allContext += rightContexts[i] + rightContextSeparator;
allContext += rightContexts[rightContexts.length-1];
}
public void parseAllContext()
{
int[] leftInds = StringUtils.find(allContext, leftContextSeparator);
int i, start;
if (leftInds!=null)
{
leftContexts = new String[leftInds.length];
start = 0;
for (i=0; i<leftInds.length; i++)
{
leftContexts[i] = allContext.substring(start, leftInds[i]);
start = leftInds[i]+1;
}
}
else
leftContexts = null;
int[] rightInds = StringUtils.find(allContext, rightContextSeparator);
if (rightInds!=null)
{
rightContexts = new String[rightInds.length];
for (i=0; i<rightInds.length-1; i++)
rightContexts[i] = allContext.substring(rightInds[i]+1, rightInds[i+1]);
rightContexts[rightInds.length-1] = allContext.substring(rightInds[rightInds.length-1]+1, allContext.length());
}
else
rightContexts = null;
if (leftInds!=null)
{
if (rightInds!=null)
currentContext = allContext.substring(leftInds[leftInds.length-1]+1, rightInds[0]);
else
currentContext = allContext.substring(leftInds[leftInds.length-1]+1, allContext.length());
}
else
{
if (rightInds!=null)
currentContext = allContext.substring(0, rightInds[0]);
else
currentContext = allContext;
}
setScores();
}
public double matchScore(Context context)
public int numLeftNeighbours;
public int numRightNeighbours;
public String allContext;
public String[] leftContexts;
public String currentContext;
public String[] rightContexts;
private double[] scores;
public static final char leftContextSeparator = '.';
public static final char rightContextSeparator = ',';
public static final double LOWEST_CONTEXT_SCORE = 1.0;
public Context(Context existing) {
numLeftNeighbours = existing.numLeftNeighbours;
numRightNeighbours = existing.numRightNeighbours;
allContext = existing.allContext;
setLeftContext(existing.leftContexts);
currentContext = existing.currentContext;
setRightContext(existing.rightContexts);
setScores();
setAllContext();
}
// Create a full context entry using a concatenated allContext entry
public Context(String allContextIn) {
allContext = allContextIn;
parseAllContext();
}
public Context(Labels labels, int currentLabelIndex, int totalNeighbours) {
this(labels, currentLabelIndex, totalNeighbours, totalNeighbours);
}
// leftContexts[0] = labels.items[currentLabelIndex-totalLeftNeighbours].phn
// leftContexts[1] = labels.items[currentLabelIndex-totalLeftNeighbours+1].phn
// ...
// leftContexts[totalLeftNeighbours-2] = labels.items[currentLabelIndex-2].phn
// leftContexts[totalLeftNeighbours-1] = labels.items[currentLabelIndex-1].phn
// currentContext = labels.items[currentLabelIndex].phn
// rightContexts[0] = labels.items[currentLabelIndex+1].phn
// rightContexts[1] = labels.items[currentLabelIndex+2].phn
// ...
// rightContexts[totalRightNeighbours-2] = labels.items[currentLabelIndex+totalRightNeighbours-1].phn
// rightContexts[totalRightNeighbours-1] = labels.items[currentLabelIndex+totalRightNeighbours].phn
//
// Note that non-existing context entries are represented by ""
public Context(Labels labels, int currentLabelIndex, int totalLeftNeighbours, int totalRightNeighbours) {
leftContexts = null;
rightContexts = null;
currentContext = "";
int i;
if (totalLeftNeighbours > 0) {
leftContexts = new String[totalLeftNeighbours];
for (i = totalLeftNeighbours; i > 0; i--) {
if (labels != null && currentLabelIndex - i >= 0)
leftContexts[totalLeftNeighbours - i] = labels.items[currentLabelIndex - i].phn;
else
leftContexts[totalLeftNeighbours - i] = "";
}
}
currentContext = labels.items[currentLabelIndex].phn;
if (totalRightNeighbours > 0) {
rightContexts = new String[totalRightNeighbours];
for (i = 0; i < totalRightNeighbours; i++) {
if (labels != null && currentLabelIndex + i + 1 < labels.items.length)
rightContexts[i] = labels.items[currentLabelIndex + i + 1].phn;
else
rightContexts[i] = "";
}
}
setScores();
setAllContext();
}
public void setLeftContext(String[] leftContextIn) {
leftContexts = null;
if (leftContextIn != null) {
leftContexts = new String[leftContextIn.length];
System.arraycopy(leftContextIn, 0, leftContexts, 0, leftContexts.length);
}
}
public void setRightContext(String[] rightContextIn) {
rightContexts = null;
if (rightContextIn != null) {
rightContexts = new String[rightContextIn.length];
System.arraycopy(rightContextIn, 0, rightContexts, 0, rightContexts.length);
}
}
public void setScores() {
int maxContext = 0;
if (leftContexts != null)
maxContext = leftContexts.length;
if (rightContexts != null)
maxContext = Math.max(maxContext, rightContexts.length);
scores = new double[maxContext + 1];
double tmpSum = LOWEST_CONTEXT_SCORE;
for (int i = 0; i < maxContext + 1; i++) {
scores[i] = tmpSum;
tmpSum = 2 * tmpSum + 1;
}
}
public double[] getPossibleScores() {
double[] possibleScores = null;
if (scores != null) {
possibleScores = new double[2 * (scores.length - 1) + 1];
double tmpSum = 0.0;
for (int i = 0; i < scores.length - 2; i++) {
possibleScores[2 * i] = tmpSum + scores[i];
possibleScores[2 * i + 1] = tmpSum + 2 * scores[i];
tmpSum += 2 * scores[i];
}
possibleScores[2 * (scores.length - 1)] = tmpSum + scores[scores.length - 1];
}
return possibleScores;
}
// allContext = L[0].L[1]...L[N-2].L[N-1].C,R[0],R[1],R[2],...,R[N-1]
// where L:leftContexts, C:currentContext, R:rightContexts, and "." "," are the left and rightContextSeparators respectively
public void setAllContext() {
allContext = "";
int i;
for (i = 0; i < leftContexts.length; i++)
allContext += leftContexts[i] + leftContextSeparator;
allContext += currentContext + rightContextSeparator;
for (i = 0; i < rightContexts.length - 1; i++)
allContext += rightContexts[i] + rightContextSeparator;
allContext += rightContexts[rightContexts.length - 1];
}
public void parseAllContext() {
int[] leftInds = StringUtils.find(allContext, leftContextSeparator);
int i, start;
if (leftInds != null) {
leftContexts = new String[leftInds.length];
start = 0;
for (i = 0; i < leftInds.length; i++) {
leftContexts[i] = allContext.substring(start, leftInds[i]);
start = leftInds[i] + 1;
}
} else
leftContexts = null;
int[] rightInds = StringUtils.find(allContext, rightContextSeparator);
if (rightInds != null) {
rightContexts = new String[rightInds.length];
for (i = 0; i < rightInds.length - 1; i++)
rightContexts[i] = allContext.substring(rightInds[i] + 1, rightInds[i + 1]);
rightContexts[rightInds.length - 1] = allContext.substring(rightInds[rightInds.length - 1] + 1, allContext.length());
} else
rightContexts = null;
if (leftInds != null) {
if (rightInds != null)
currentContext = allContext.substring(leftInds[leftInds.length - 1] + 1, rightInds[0]);
else
currentContext = allContext.substring(leftInds[leftInds.length - 1] + 1, allContext.length());
} else {
if (rightInds != null)
currentContext = allContext.substring(0, rightInds[0]);
else
currentContext = allContext;
}
setScores();
}
public double matchScore(Context context)
{
assert leftContexts.length == context.leftContexts.length;
assert rightContexts.length == context.rightContexts.length;
@ -272,41 +244,39 @@ public class Context {
return score;
}
public static void main(String[] args)
{
Label[] items1 = new Label[5];
Label[] items2 = new Label[5];
for (int i=0; i<items1.length; i++) {
items1[i] = new Label();
items2[i] = new Label();
}
Labels labels1 = new Labels(items1);
Labels labels2 = new Labels(items2);
labels1.items[0].phn = "A";
labels1.items[1].phn = "B";
labels1.items[2].phn = "C";
labels1.items[3].phn = "D";
labels1.items[4].phn = "E";
labels2.items[0].phn = "A1";
labels2.items[1].phn = "B";
labels2.items[2].phn = "C";
labels2.items[3].phn = "D1";
labels2.items[4].phn = "E1";
Context c1 = new Context(labels1, 2, 2);
Context c2 = new Context(labels2, 2, 2);
System.out.println(String.valueOf(c1.matchScore(c2)));
Context c3 = new Context("t.u.nl,i,n");
System.out.println(c3.currentContext);
double[] possibleScores = c1.getPossibleScores();
System.out.println("Test completed");
}
}
public static void main(String[] args) {
Label[] items1 = new Label[5];
Label[] items2 = new Label[5];
for (int i = 0; i < items1.length; i++) {
items1[i] = new Label();
items2[i] = new Label();
}
Labels labels1 = new Labels(items1);
Labels labels2 = new Labels(items2);
labels1.items[0].phn = "A";
labels1.items[1].phn = "B";
labels1.items[2].phn = "C";
labels1.items[3].phn = "D";
labels1.items[4].phn = "E";
labels2.items[0].phn = "A1";
labels2.items[1].phn = "B";
labels2.items[2].phn = "C";
labels2.items[3].phn = "D1";
labels2.items[4].phn = "E1";
Context c1 = new Context(labels1, 2, 2);
Context c2 = new Context(labels2, 2, 2);
System.out.println(String.valueOf(c1.matchScore(c2)));
Context c3 = new Context("t.u.nl,i,n");
System.out.println(c3.currentContext);
double[] possibleScores = c1.getPossibleScores();
System.out.println("Test completed");
}
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -24,211 +24,184 @@ import java.io.IOException;
import marytts.util.io.FileUtils;
import marytts.util.string.StringUtils;
/**
* Generic utility class for renaming and copying voice conversion training files
*
* @author Oytun T&uumlrk
*/
public class FileCopier {
public FileCopier()
{
}
//Generate appropriate wav and lab files by copying
// source and target files from sourceInputBaseDir and targetInputBaseDir.
// This is required since source and target files might not have identical filenames
//
// to sourceTrainingBaseDir and targetTrainingBaseDir with appropriate renaming
// The output will be wav and lab files for source and target for parallel voice conversion training
// and when the wav and lab files are sorted according to filenames, they will be
// identical in content,
//
// sourceTargetFile is a text file which has two columns that list the mapping between
// source and target files under input base directories:
//
// sourceFileName1 targetFileName1
// sourceFileName2 targetFileName2
// ...etc
//
// The genearted source files will have identical filenames with the input source files
// Target files will be copied with a new name in the following format:
//
// sourceFileName1_targetFileName1.wav,
// sourceFileName1_targetFileName1.lab, etc.
//
public void copy(String sourceTargetFile, //Input
String sourceInputBaseDir, //Input
String targetInputBaseDir, //Input
String sourceTrainingBaseDir, //Output
String targetTrainingBaseDir) //Output
throws IOException
{
String[][] stNameMap = StringUtils.readTextFileInRows(sourceTargetFile, "UTF-8", 2);
int i;
//Determine source and target input sub directories
sourceInputBaseDir = StringUtils.checkLastSlash(sourceInputBaseDir);
targetInputBaseDir = StringUtils.checkLastSlash(targetInputBaseDir);
String sourceInputWavDir = sourceInputBaseDir + "wav/";
String targetInputWavDir = targetInputBaseDir + "wav/";
String sourceInputLabDir = sourceInputBaseDir + "lab/";
String targetInputLabDir = targetInputBaseDir + "lab/";
if (!FileUtils.exists(sourceInputWavDir))
{
System.out.println("Error! Folder not found: " + sourceInputWavDir);
return;
}
if (!FileUtils.exists(targetInputWavDir))
{
System.out.println("Error! Folder not found: " + targetInputWavDir);
return;
}
if (!FileUtils.exists(sourceInputLabDir))
{
System.out.println("Error! Folder not found: " + sourceInputLabDir);
return;
}
if (!FileUtils.exists(targetInputLabDir))
{
System.out.println("Error! Folder not found: " + targetInputLabDir);
return;
}
//
//Create training sub-folders for source and target
sourceTrainingBaseDir = StringUtils.checkLastSlash(sourceTrainingBaseDir);
targetTrainingBaseDir = StringUtils.checkLastSlash(targetTrainingBaseDir);
FileUtils.createDirectory(sourceTrainingBaseDir);
FileUtils.createDirectory(targetTrainingBaseDir);
//
if (stNameMap!=null)
{
System.out.println("Generating - " + sourceTrainingBaseDir + " and " + targetTrainingBaseDir);
String tmpFileIn, tmpFileOut;
for (i=0; i<stNameMap.length; i++)
{
//Source wav
tmpFileIn = sourceInputWavDir + stNameMap[i][0] + ".wav";
tmpFileOut = sourceTrainingBaseDir + stNameMap[i][0] + ".wav";
if (!FileUtils.exists(tmpFileOut))
{
if (FileUtils.exists(tmpFileIn))
{
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else
{
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
//Source lab
tmpFileIn = sourceInputLabDir + stNameMap[i][0] + ".lab";
tmpFileOut = sourceTrainingBaseDir + stNameMap[i][0] + ".lab";
if (!FileUtils.exists(tmpFileOut))
{
if (FileUtils.exists(tmpFileIn))
{
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else
{
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
//Target wav
tmpFileIn = targetInputWavDir + stNameMap[i][1] + ".wav";
tmpFileOut = targetTrainingBaseDir + stNameMap[i][0] + "_" + stNameMap[i][1] + ".wav";
if (!FileUtils.exists(tmpFileOut))
{
if (FileUtils.exists(tmpFileIn))
{
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else
{
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
//Target lab
tmpFileIn = targetInputLabDir + stNameMap[i][1] + ".lab";
tmpFileOut = targetTrainingBaseDir + stNameMap[i][0] + "_" + stNameMap[i][1] + ".lab";
if (!FileUtils.exists(tmpFileOut))
{
if (FileUtils.exists(tmpFileIn))
{
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
else
{
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
System.out.println(String.valueOf(i+1) + " of " + String.valueOf(stNameMap.length));
}
}
}
public static void main(String[] args) throws Exception
{
FileCopier f = new FileCopier();
String sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir;
sourceTargetFile = "D:/Oytun/DFKI/voices/Interspeech08/mappings-mini-ea.txt";
sourceInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Neutral";
sourceTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/neutral";
//Obadiah_Sad
targetInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Obadiah_Sad";
targetTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/sad";
f.copy(sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir);
//Poppy_Happy
targetInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Poppy_Happy";
targetTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/happy";
f.copy(sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir);
public FileCopier() {
//Spike_Angry
targetInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Spike_Angry";
targetTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/angry";
f.copy(sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir);
}
}
// Generate appropriate wav and lab files by copying
// source and target files from sourceInputBaseDir and targetInputBaseDir.
// This is required since source and target files might not have identical filenames
//
// to sourceTrainingBaseDir and targetTrainingBaseDir with appropriate renaming
// The output will be wav and lab files for source and target for parallel voice conversion training
// and when the wav and lab files are sorted according to filenames, they will be
// identical in content,
//
// sourceTargetFile is a text file which has two columns that list the mapping between
// source and target files under input base directories:
//
// sourceFileName1 targetFileName1
// sourceFileName2 targetFileName2
// ...etc
//
// The genearted source files will have identical filenames with the input source files
// Target files will be copied with a new name in the following format:
//
// sourceFileName1_targetFileName1.wav,
// sourceFileName1_targetFileName1.lab, etc.
//
public void copy(String sourceTargetFile, // Input
String sourceInputBaseDir, // Input
String targetInputBaseDir, // Input
String sourceTrainingBaseDir, // Output
String targetTrainingBaseDir) // Output
throws IOException {
String[][] stNameMap = StringUtils.readTextFileInRows(sourceTargetFile, "UTF-8", 2);
int i;
// Determine source and target input sub directories
sourceInputBaseDir = StringUtils.checkLastSlash(sourceInputBaseDir);
targetInputBaseDir = StringUtils.checkLastSlash(targetInputBaseDir);
String sourceInputWavDir = sourceInputBaseDir + "wav/";
String targetInputWavDir = targetInputBaseDir + "wav/";
String sourceInputLabDir = sourceInputBaseDir + "lab/";
String targetInputLabDir = targetInputBaseDir + "lab/";
if (!FileUtils.exists(sourceInputWavDir)) {
System.out.println("Error! Folder not found: " + sourceInputWavDir);
return;
}
if (!FileUtils.exists(targetInputWavDir)) {
System.out.println("Error! Folder not found: " + targetInputWavDir);
return;
}
if (!FileUtils.exists(sourceInputLabDir)) {
System.out.println("Error! Folder not found: " + sourceInputLabDir);
return;
}
if (!FileUtils.exists(targetInputLabDir)) {
System.out.println("Error! Folder not found: " + targetInputLabDir);
return;
}
//
// Create training sub-folders for source and target
sourceTrainingBaseDir = StringUtils.checkLastSlash(sourceTrainingBaseDir);
targetTrainingBaseDir = StringUtils.checkLastSlash(targetTrainingBaseDir);
FileUtils.createDirectory(sourceTrainingBaseDir);
FileUtils.createDirectory(targetTrainingBaseDir);
//
if (stNameMap != null) {
System.out.println("Generating - " + sourceTrainingBaseDir + " and " + targetTrainingBaseDir);
String tmpFileIn, tmpFileOut;
for (i = 0; i < stNameMap.length; i++) {
// Source wav
tmpFileIn = sourceInputWavDir + stNameMap[i][0] + ".wav";
tmpFileOut = sourceTrainingBaseDir + stNameMap[i][0] + ".wav";
if (!FileUtils.exists(tmpFileOut)) {
if (FileUtils.exists(tmpFileIn)) {
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
// Source lab
tmpFileIn = sourceInputLabDir + stNameMap[i][0] + ".lab";
tmpFileOut = sourceTrainingBaseDir + stNameMap[i][0] + ".lab";
if (!FileUtils.exists(tmpFileOut)) {
if (FileUtils.exists(tmpFileIn)) {
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
// Target wav
tmpFileIn = targetInputWavDir + stNameMap[i][1] + ".wav";
tmpFileOut = targetTrainingBaseDir + stNameMap[i][0] + "_" + stNameMap[i][1] + ".wav";
if (!FileUtils.exists(tmpFileOut)) {
if (FileUtils.exists(tmpFileIn)) {
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
// Target lab
tmpFileIn = targetInputLabDir + stNameMap[i][1] + ".lab";
tmpFileOut = targetTrainingBaseDir + stNameMap[i][0] + "_" + stNameMap[i][1] + ".lab";
if (!FileUtils.exists(tmpFileOut)) {
if (FileUtils.exists(tmpFileIn)) {
try {
FileUtils.copy(tmpFileIn, tmpFileOut);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
} else {
System.out.println("Error! Input file not found: " + tmpFileIn);
return;
}
}
//
System.out.println(String.valueOf(i + 1) + " of " + String.valueOf(stNameMap.length));
}
}
}
public static void main(String[] args) throws Exception {
FileCopier f = new FileCopier();
String sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir;
sourceTargetFile = "D:/Oytun/DFKI/voices/Interspeech08/mappings-mini-ea.txt";
sourceInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Neutral";
sourceTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/neutral";
// Obadiah_Sad
targetInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Obadiah_Sad";
targetTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/sad";
f.copy(sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir);
// Poppy_Happy
targetInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Poppy_Happy";
targetTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/happy";
f.copy(sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir);
// Spike_Angry
targetInputBaseDir = "D:/Oytun/DFKI/voices/DFKI_German_Spike_Angry";
targetTrainingBaseDir = "D:/Oytun/DFKI/voices/Interspeech08/angry";
f.copy(sourceTargetFile, sourceInputBaseDir, targetInputBaseDir, sourceTrainingBaseDir, targetTrainingBaseDir);
}
}

Просмотреть файл

@ -25,54 +25,41 @@ package marytts.signalproc.adaptation;
* @author Oytun T&uumlrk
*/
public class FileMap {
public int [][] indicesMap;
public FileMap()
{
allocate(0,0);
}
public FileMap(int numGroups)
{
allocate(numGroups,0);
}
public FileMap(int numGroups, int numItems)
{
allocate(numGroups,numItems);
}
public FileMap(FileMap fm)
{
if (fm.indicesMap!=null)
{
indicesMap = new int[fm.indicesMap.length][];
for (int i=0; i<fm.indicesMap.length; i++)
{
if (fm.indicesMap[i]!=null)
{
indicesMap[i] = new int[fm.indicesMap[i].length];
System.arraycopy(fm.indicesMap[i], 0, indicesMap[i], 0, fm.indicesMap[i].length);
}
else
indicesMap[i] = null;
}
}
else
indicesMap = null;
}
public void allocate(int numGroups, int numItems)
{
if (numGroups>0)
{
if (numItems>0)
indicesMap = new int[numGroups][numItems];
else
indicesMap = new int[numGroups][];
}
else
indicesMap = null;
}
}
public int[][] indicesMap;
public FileMap() {
allocate(0, 0);
}
public FileMap(int numGroups) {
allocate(numGroups, 0);
}
public FileMap(int numGroups, int numItems) {
allocate(numGroups, numItems);
}
public FileMap(FileMap fm) {
if (fm.indicesMap != null) {
indicesMap = new int[fm.indicesMap.length][];
for (int i = 0; i < fm.indicesMap.length; i++) {
if (fm.indicesMap[i] != null) {
indicesMap[i] = new int[fm.indicesMap[i].length];
System.arraycopy(fm.indicesMap[i], 0, indicesMap[i], 0, fm.indicesMap[i].length);
} else
indicesMap[i] = null;
}
} else
indicesMap = null;
}
public void allocate(int numGroups, int numItems) {
if (numGroups > 0) {
if (numItems > 0)
indicesMap = new int[numGroups][numItems];
else
indicesMap = new int[numGroups][];
} else
indicesMap = null;
}
}

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше