|
1 |
// Copyright © 2010-2013 ENS de Lyon.
|
|
2 |
// Copyright © 2007-2010 ENS de Lyon, CNRS, INRP, University of
|
|
3 |
// Lyon 2, University of Franche-Comté, University of Nice
|
|
4 |
// Sophia Antipolis, University of Paris 3.
|
|
5 |
//
|
|
6 |
// The TXM platform is free software: you can redistribute it
|
|
7 |
// and/or modify it under the terms of the GNU General Public
|
|
8 |
// License as published by the Free Software Foundation,
|
|
9 |
// either version 2 of the License, or (at your option) any
|
|
10 |
// later version.
|
|
11 |
//
|
|
12 |
// The TXM platform is distributed in the hope that it will be
|
|
13 |
// useful, but WITHOUT ANY WARRANTY; without even the implied
|
|
14 |
// warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
|
|
15 |
// PURPOSE. See the GNU General Public License for more
|
|
16 |
// details.
|
|
17 |
//
|
|
18 |
// You should have received a copy of the GNU General
|
|
19 |
// Public License along with the TXM platform. If not, see
|
|
20 |
// http://www.gnu.org/licenses.
|
|
21 |
//
|
|
22 |
//
|
|
23 |
//
|
|
24 |
// $LastChangedDate: 2016-09-19 10:31:00 +0200 (Mon, 19 Sep 2016) $
|
|
25 |
// $LastChangedRevision: 3298 $
|
|
26 |
// $LastChangedBy: mdecorde $
|
|
27 |
//
|
|
28 |
package org.txm.lexicaltable.core.statsengine.r.data;
|
|
29 |
|
|
30 |
import java.io.BufferedReader;
|
|
31 |
import java.io.File;
|
|
32 |
import java.io.FileInputStream;
|
|
33 |
import java.io.FileReader;
|
|
34 |
import java.io.IOException;
|
|
35 |
import java.io.InputStreamReader;
|
|
36 |
import java.util.ArrayList;
|
|
37 |
import java.util.Collection;
|
|
38 |
import java.util.HashMap;
|
|
39 |
import java.util.List;
|
|
40 |
import java.util.Map;
|
|
41 |
|
|
42 |
|
|
43 |
|
|
44 |
//import org.txm.functions.queryindex.*;
|
|
45 |
import org.txm.index.core.functions.Index;
|
|
46 |
import org.txm.index.core.functions.Line;
|
|
47 |
import org.txm.lexicaltable.core.messages.LexicalTableCoreMessages;
|
|
48 |
import org.txm.lexicaltable.core.statsengine.data.LexicalTable;
|
|
49 |
import org.txm.searchengine.cqp.clientExceptions.CqiClientException;
|
|
50 |
import org.txm.searchengine.cqp.corpus.Corpus;
|
|
51 |
import org.txm.searchengine.cqp.corpus.Lexicon;
|
|
52 |
import org.txm.searchengine.cqp.corpus.Partition;
|
|
53 |
import org.txm.searchengine.cqp.corpus.Property;
|
|
54 |
import org.txm.statsengine.core.StatException;
|
|
55 |
import org.txm.statsengine.core.data.QuantitativeDataStructure;
|
|
56 |
import org.txm.statsengine.core.data.Vector;
|
|
57 |
import org.txm.statsengine.r.core.RException;
|
|
58 |
import org.txm.statsengine.r.core.RWorkspace;
|
|
59 |
import org.txm.statsengine.r.core.RWorkspaceException;
|
|
60 |
import org.txm.statsengine.r.core.data.ContingencyTableImpl;
|
|
61 |
|
|
62 |
import cern.colt.matrix.DoubleFactory2D;
|
|
63 |
import cern.colt.matrix.DoubleMatrix2D;
|
|
64 |
|
|
65 |
|
|
66 |
|
|
67 |
|
|
68 |
// TODO: Auto-generated Javadoc
|
|
69 |
/**
|
|
70 |
* Implementation of the {@link LexicalTable} interface, wrapping a R matrix.
|
|
71 |
*
|
|
72 |
* A lexical table is a contingency table representing the frequencies of
|
|
73 |
* linguistic types accross several sub-parts of a corpus.
|
|
74 |
*
|
|
75 |
* Each column of the lexical table stand for a parts. Each row stand for a
|
|
76 |
* linguistic type. Each cell give the frequency of the corresponding unit into
|
|
77 |
* the corresponding part.
|
|
78 |
*
|
|
79 |
* @author Sylvain Loiseau <sloiseau@ens-lsh.fr>
|
|
80 |
*
|
|
81 |
*/
|
|
82 |
public class LexicalTableImpl extends ContingencyTableImpl implements LexicalTable {
|
|
83 |
|
|
84 |
/** The partition. */
|
|
85 |
private Partition partition;
|
|
86 |
|
|
87 |
/** The corpus. */
|
|
88 |
private Corpus corpus;
|
|
89 |
|
|
90 |
/** The property. */
|
|
91 |
private Property property;
|
|
92 |
|
|
93 |
/** The sortedindex. */
|
|
94 |
private int[] sortedindex;
|
|
95 |
|
|
96 |
/** The constructor_fmin. */
|
|
97 |
private int constructor_fmin;
|
|
98 |
|
|
99 |
|
|
100 |
/**
|
|
101 |
* Instantiates a new lexical table impl.
|
|
102 |
*
|
|
103 |
* @param matrix the matrix
|
|
104 |
* @param partition the partition
|
|
105 |
* @param property the property
|
|
106 |
* @param formNames the form names
|
|
107 |
* @param partNames the part names
|
|
108 |
* @throws RWorkspaceException the r workspace exception
|
|
109 |
*/
|
|
110 |
private LexicalTableImpl(DoubleMatrix2D matrix, Partition partition,
|
|
111 |
Property property, String[] formNames, String[] partNames)
|
|
112 |
throws RWorkspaceException {
|
|
113 |
super(matrix, formNames, partNames);
|
|
114 |
|
|
115 |
this.partition = partition;
|
|
116 |
this.partition.addResult(this);
|
|
117 |
this.property = property;
|
|
118 |
initSortedIndex();
|
|
119 |
}
|
|
120 |
|
|
121 |
/**
|
|
122 |
* Instantiates a new lexical table impl.
|
|
123 |
*
|
|
124 |
* @param matrix the matrix
|
|
125 |
* @param property the property
|
|
126 |
* @param formNames the form names
|
|
127 |
* @param partNames the part names
|
|
128 |
* @throws RWorkspaceException the r workspace exception
|
|
129 |
*/
|
|
130 |
private LexicalTableImpl(DoubleMatrix2D matrix, Property property,
|
|
131 |
String[] formNames, String[] partNames) throws RWorkspaceException {
|
|
132 |
super(matrix, formNames, partNames);
|
|
133 |
|
|
134 |
this.property = property;
|
|
135 |
initSortedIndex();
|
|
136 |
|
|
137 |
}
|
|
138 |
|
|
139 |
/**
|
|
140 |
* Instantiates a new lexical table impl.
|
|
141 |
*
|
|
142 |
* @param mat the mat
|
|
143 |
* @param partition the partition
|
|
144 |
* @param property the property
|
|
145 |
* @param array the array
|
|
146 |
* @param array2 the array2
|
|
147 |
* @throws RWorkspaceException the r workspace exception
|
|
148 |
*/
|
|
149 |
public LexicalTableImpl(int[][] mat, Partition partition,
|
|
150 |
Property property, String[] array, String[] array2) throws RWorkspaceException {
|
|
151 |
this(mat,property, array, array2);
|
|
152 |
this.partition = partition;
|
|
153 |
this.partition.addResult(this);
|
|
154 |
}
|
|
155 |
|
|
156 |
/**
|
|
157 |
* Instantiates a new lexical table impl.
|
|
158 |
*
|
|
159 |
* @param matrix the matrix
|
|
160 |
* @param property the property
|
|
161 |
* @param formNames the form names
|
|
162 |
* @param partNames the part names
|
|
163 |
* @throws RWorkspaceException the r workspace exception
|
|
164 |
*/
|
|
165 |
private LexicalTableImpl(int[][] matrix, Property property,
|
|
166 |
String[] formNames, String[] partNames) throws RWorkspaceException {
|
|
167 |
super(matrix, formNames, partNames);
|
|
168 |
|
|
169 |
this.property = property;
|
|
170 |
initSortedIndex();
|
|
171 |
|
|
172 |
}
|
|
173 |
|
|
174 |
/**
|
|
175 |
* Instantiates a new lexical table impl.
|
|
176 |
*
|
|
177 |
* @param table the table
|
|
178 |
* @param symbol the symbol
|
|
179 |
* @throws RWorkspaceException the r workspace exception
|
|
180 |
*/
|
|
181 |
public LexicalTableImpl(LexicalTableImpl table, String symbol)
|
|
182 |
throws RWorkspaceException {
|
|
183 |
super(symbol);
|
|
184 |
this.partition = table.getPartition();
|
|
185 |
this.partition.addResult(this);
|
|
186 |
this.property = table.getProperty();
|
|
187 |
initSortedIndex();
|
|
188 |
}
|
|
189 |
|
|
190 |
|
|
191 |
/**
|
|
192 |
* Instantiates a new lexical table impl.
|
|
193 |
*
|
|
194 |
* @param table the table
|
|
195 |
* @param symbol the symbol
|
|
196 |
* @throws RWorkspaceException the r workspace exception
|
|
197 |
*/
|
|
198 |
public LexicalTableImpl(String symbol)
|
|
199 |
throws RWorkspaceException {
|
|
200 |
super(symbol);
|
|
201 |
this.partition = null;
|
|
202 |
this.property = null;
|
|
203 |
}
|
|
204 |
|
|
205 |
public LexicalTableImpl(String symbol, Lexicon corpusLexicon, Lexicon subcorpusLexicon) throws StatException {
|
|
206 |
super(symbol);
|
|
207 |
RWorkspace rw = RWorkspace.getRWorkspaceInstance();
|
|
208 |
Vector corpusLexiconV = corpusLexicon.asVector();
|
|
209 |
Vector subcorpusLexiconV = subcorpusLexicon.asVector();
|
|
210 |
|
|
211 |
rw.callFunction("lexicons2LexicalTable", new QuantitativeDataStructure[] { corpusLexiconV, subcorpusLexiconV }, symbol); //$NON-NLS-1$
|
|
212 |
|
|
213 |
this.property = corpusLexicon.getProperty();
|
|
214 |
this.corpus = corpusLexicon.getCorpus();
|
|
215 |
this.corpus.addResult(this);
|
|
216 |
}
|
|
217 |
|
|
218 |
/**
|
|
219 |
* Inits the sorted index.
|
|
220 |
*/
|
|
221 |
public void initSortedIndex() {
|
|
222 |
int ncol = this.getNColumns();
|
|
223 |
sortedindex = new int[ncol];
|
|
224 |
for (int i = 0; i < ncol; i++)
|
|
225 |
sortedindex[i] = i;
|
|
226 |
}
|
|
227 |
|
|
228 |
/* (non-Javadoc)
|
|
229 |
* @see org.txm.stat.data.LexicalTable#getPartition()
|
|
230 |
*/
|
|
231 |
@Override
|
|
232 |
public Partition getPartition() {
|
|
233 |
return partition;
|
|
234 |
}
|
|
235 |
|
|
236 |
/* (non-Javadoc)
|
|
237 |
* @see org.txm.stat.data.LexicalTable#getProperty()
|
|
238 |
*/
|
|
239 |
@Override
|
|
240 |
public Property getProperty() {
|
|
241 |
return property;
|
|
242 |
}
|
|
243 |
|
|
244 |
/**
|
|
245 |
* Create a Complete Lexical Table according to a Partition and a Property.
|
|
246 |
*
|
|
247 |
* @param partition the partition
|
|
248 |
* @param analysisProperty the analysis property
|
|
249 |
* @param Fmin the fmin
|
|
250 |
* @return the lexical table
|
|
251 |
* @throws CqiClientException the cqi client exception
|
|
252 |
* @throws RWorkspaceException the r workspace exception
|
|
253 |
*/
|
|
254 |
public static final LexicalTable getLexicalTable(Partition partition,
|
|
255 |
Property analysisProperty, int Fmin) throws CqiClientException,
|
|
256 |
RWorkspaceException {
|
|
257 |
//long time = System.currentTimeMillis();
|
|
258 |
List<Lexicon> lexicons = new ArrayList<Lexicon>();
|
|
259 |
// Set<String> allLexiconEntry = new HashSet<String>();
|
|
260 |
for (int i = 0; i < partition.getNPart(); i++) {
|
|
261 |
Lexicon l = partition.getParts().get(i).getLexicon(analysisProperty);
|
|
262 |
lexicons.add(l);
|
|
263 |
// allLexiconEntry.addAll(Arrays.asList(l.getForms()));
|
|
264 |
}
|
|
265 |
//System.out.println("time lexicon build "+(System.currentTimeMillis()-time));
|
|
266 |
//time = System.currentTimeMillis();
|
|
267 |
// String[] entries = allLexiconEntry.toArray(new String[]{});
|
|
268 |
Corpus c = partition.getCorpus();
|
|
269 |
Lexicon ll = c.getLexicon(analysisProperty);
|
|
270 |
|
|
271 |
ArrayList<String> filteredForms = new ArrayList<String>();
|
|
272 |
//create a copy and filter line with Fmin;
|
|
273 |
for (int i = 0 ; i < ll.getFreq().length ; i++) {
|
|
274 |
if (ll.getFreq()[i] >= Fmin) {
|
|
275 |
filteredForms.add(ll.getForms()[i]);
|
|
276 |
}
|
|
277 |
}
|
|
278 |
//System.out.println("remove freq too low "+(System.currentTimeMillis()-time));
|
|
279 |
//time = System.currentTimeMillis();
|
|
280 |
Map<String, Integer> entries2index = new HashMap<String, Integer>();
|
|
281 |
for (int i = 0; i < filteredForms.size(); i++) {
|
|
282 |
entries2index.put(filteredForms.get(i), i);
|
|
283 |
}
|
|
284 |
|
|
285 |
//System.out.println("entries2index "+(System.currentTimeMillis()-time));
|
|
286 |
//time = System.currentTimeMillis();
|
|
287 |
int[][] mat = new int[filteredForms.size()][lexicons.size()];//DoubleFactory2D.sparse.make(filteredForms.size(), lexicons.size(), 0);
|
|
288 |
|
|
289 |
|
|
290 |
Integer id= null;
|
|
291 |
for (int i = 0; i < lexicons.size(); i++) {
|
|
292 |
Lexicon l = lexicons.get(i);
|
|
293 |
String[] ents = l.getForms();
|
|
294 |
int[] freqs = l.getFreq();
|
|
295 |
for (int j = 0; j < freqs.length; j++) {
|
|
296 |
id = entries2index.get(ents[j]);
|
|
297 |
// if (entriesFreqs[id] >= 2)
|
|
298 |
if (id != null)
|
|
299 |
mat[id][i] = freqs[j]; //mat.setQuick(id, i, freqs[j]);
|
|
300 |
}
|
|
301 |
}
|
|
302 |
//System.out.println("time build matrix "+(System.currentTimeMillis()-time));
|
|
303 |
//time = System.currentTimeMillis();
|
|
304 |
//System.out.println("Entries size " + filteredForms.size());
|
|
305 |
//System.out.println("mat size " + mat.rows());
|
|
306 |
//System.out.println("mat columns " + mat.columns());
|
|
307 |
|
|
308 |
LexicalTableImpl table = new LexicalTableImpl(mat, partition,
|
|
309 |
analysisProperty, filteredForms.toArray(new String[]{}), partition.getPartShortNames()
|
|
310 |
.toArray(new String[] {}));
|
|
311 |
table.constructor_fmin = Fmin;
|
|
312 |
//System.out.println("time build table lexical "+(System.currentTimeMillis()-time));
|
|
313 |
return table;
|
|
314 |
}
|
|
315 |
|
|
316 |
/**
|
|
317 |
* Creates the lexical table impl.
|
|
318 |
*
|
|
319 |
* @param partindex the partindex
|
|
320 |
* @param symbol the symbol
|
|
321 |
* @return the lexical table
|
|
322 |
* @throws RWorkspaceException the r workspace exception
|
|
323 |
*/
|
|
324 |
static public LexicalTable createLexicalTableImpl(Index partindex,
|
|
325 |
String symbol) throws RWorkspaceException {
|
|
326 |
List<Index> vocabularies = new ArrayList<Index>();
|
|
327 |
vocabularies.add(partindex);
|
|
328 |
|
|
329 |
LexicalTable lt = createLexicalTableImpl(vocabularies, symbol, false);
|
|
330 |
lt.setParent(partindex);
|
|
331 |
return lt;
|
|
332 |
}
|
|
333 |
|
|
334 |
private static int getNline(File f) {
|
|
335 |
try {
|
|
336 |
BufferedReader reader = new BufferedReader(new FileReader(f));
|
|
337 |
int i = 0;
|
|
338 |
|
|
339 |
while(reader.readLine() != null) i++;
|
|
340 |
return i;
|
|
341 |
} catch (Exception e) { return 0; }
|
|
342 |
|
|
343 |
}
|
|
344 |
|
|
345 |
// /**
|
|
346 |
// * Creates the lexical table impl.
|
|
347 |
// *
|
|
348 |
// * @param partindex the partindex
|
|
349 |
// * @param symbol the symbol
|
|
350 |
// * @return the lexical table
|
|
351 |
// * @throws RWorkspaceException the r workspace exception
|
|
352 |
// */
|
|
353 |
// static public LexicalTable createLexicalTableImpl(QueryIndex partindex,
|
|
354 |
// String symbol) throws RWorkspaceException {
|
|
355 |
// List<QueryIndex> qindexes = new ArrayList<QueryIndex>();
|
|
356 |
// qindexes.add(partindex);
|
|
357 |
//
|
|
358 |
// return createLexicalTableImpl(qindexes, symbol);
|
|
359 |
// }
|
|
360 |
|
|
361 |
// /**
|
|
362 |
// * Creates the lexical table impl.
|
|
363 |
// *
|
|
364 |
// * @param query indexes
|
|
365 |
// * @param symbol the symbol
|
|
366 |
// * @param useAllOccurrences
|
|
367 |
// * @return the lexical table
|
|
368 |
// * @throws RWorkspaceException the r workspace exception
|
|
369 |
// */
|
|
370 |
// static public LexicalTable createLexicalTableImpl(
|
|
371 |
// List<QueryIndex> qindexes, String symbol)
|
|
372 |
// throws RWorkspaceException {
|
|
373 |
//
|
|
374 |
// System.out.println(Messages.LexicalTableImpl_1 + qindexes);
|
|
375 |
// QueryIndex partindex = qindexes.get(0);// FRIGO
|
|
376 |
// if (!partindex.isComputedWithPartition())
|
|
377 |
// return null;
|
|
378 |
//
|
|
379 |
// Partition partition = partindex.getPartition();
|
|
380 |
// Property property = null;
|
|
381 |
// try {
|
|
382 |
// property = partindex.getCorpus().getProperties().get(0);
|
|
383 |
// } catch (CqiClientException e) {
|
|
384 |
// // TODO Auto-generated catch block
|
|
385 |
// org.txm.utils.logger.Log.printStackTrace(e);
|
|
386 |
// }
|
|
387 |
//
|
|
388 |
// HashMap<String, QueryIndexLine> alllines = new HashMap<String, QueryIndexLine>();
|
|
389 |
// // merge lines of all indexes
|
|
390 |
// for (QueryIndex voc : qindexes) {
|
|
391 |
// for (QueryIndexLine l : voc.getLines()) {
|
|
392 |
// alllines.put(l.getName(), l);
|
|
393 |
// }
|
|
394 |
// }
|
|
395 |
//
|
|
396 |
// List<String> colnames = partindex.getPartnames();
|
|
397 |
//
|
|
398 |
// Collection<QueryIndexLine> lines = alllines.values();
|
|
399 |
// List<String> rownames = new ArrayList<String>(lines.size());
|
|
400 |
// for (QueryIndexLine l : lines) {
|
|
401 |
// rownames.add(l.getName());
|
|
402 |
// }
|
|
403 |
//
|
|
404 |
// String[] entries = new String[alllines.size()];
|
|
405 |
//
|
|
406 |
// int[][] mat = new int[rownames.size()][colnames.size()];
|
|
407 |
// int[] margins = new int[colnames.size()]; // compute margins
|
|
408 |
// int i = 0;
|
|
409 |
// for (QueryIndexLine l : lines) {
|
|
410 |
// for (int j = 0; j < colnames.size(); j++) {
|
|
411 |
// mat[i][j] = l.getFrequency(j);
|
|
412 |
// margins[j] += l.getFrequency(j);
|
|
413 |
// }
|
|
414 |
// entries[i++] = l.toString();
|
|
415 |
// }
|
|
416 |
//
|
|
417 |
// //System.out.println("mat size : ["+(rownames.size() + extra)+"]["+colnames.size()+"]");
|
|
418 |
// //System.out.println("rownames size : "+rownames.size());
|
|
419 |
// //System.out.println("colnames size : "+colnames.size());
|
|
420 |
// LexicalTableImpl table = new LexicalTableImpl(mat, partition, property,
|
|
421 |
// rownames.toArray(new String[] {}), colnames
|
|
422 |
// .toArray(new String[] {}));
|
|
423 |
// table.constructor_fmin = qindexes.get(0).getFmin();
|
|
424 |
// return table;
|
|
425 |
// }
|
|
426 |
|
|
427 |
|
|
428 |
static public LexicalTable createLexicalTableImpl(File tsvFile) throws IOException, RWorkspaceException {
|
|
429 |
|
|
430 |
BufferedReader reader = new BufferedReader(new InputStreamReader(new FileInputStream(tsvFile) , "UTF-8")); //$NON-NLS-1$
|
|
431 |
String line = reader.readLine();
|
|
432 |
String[] split = line.split("\t"); //$NON-NLS-1$
|
|
433 |
int ncol = split.length;
|
|
434 |
if (ncol <= 1) {
|
|
435 |
throw new IOException(LexicalTableCoreMessages.LexicalTableImpl_5+ncol+")"); //$NON-NLS-1$
|
|
436 |
}
|
|
437 |
|
|
438 |
int nlines = getNline(tsvFile);
|
|
439 |
String[] forms = new String[nlines];
|
|
440 |
int[][] freqs = new int[nlines][ncol-1];
|
|
441 |
|
|
442 |
int i = 0;
|
|
443 |
while(line != null) {
|
|
444 |
split = line.split("\t"); //$NON-NLS-1$
|
|
445 |
if (split.length == ncol) {
|
|
446 |
forms[i] = split[0];
|
|
447 |
for( int j = 1 ; j < ncol ; j++)
|
|
448 |
freqs[i][j-1] = Integer.parseInt(split[j]);
|
|
449 |
} else {
|
|
450 |
throw new IOException(LexicalTableCoreMessages.LexicalTableImpl_8+i+LexicalTableCoreMessages.LexicalTableImpl_9+ncol+LexicalTableCoreMessages.LexicalTableImpl_10);
|
|
451 |
}
|
|
452 |
i++;
|
|
453 |
line = reader.readLine();
|
|
454 |
}
|
|
455 |
|
|
456 |
String[] colnames = new String[ncol-1];
|
|
457 |
for( int j = 0 ; j < ncol - 1 ; j++) colnames[j] = "forms"+(j+1); //$NON-NLS-1$
|
|
458 |
|
|
459 |
LexicalTable lt = LexicalTableImpl.createLexicalTable(freqs, null, forms, colnames, 1);
|
|
460 |
|
|
461 |
return lt;
|
|
462 |
}
|
|
463 |
|
|
464 |
/**
|
|
465 |
* Creates the lexical table impl.
|
|
466 |
*
|
|
467 |
* @param vocabularies the vocabularies
|
|
468 |
* @param symbol the symbol
|
|
469 |
* @param useAllOccurrences
|
|
470 |
* @return the lexical table
|
|
471 |
* @throws RWorkspaceException the r workspace exception
|
|
472 |
*/
|
|
473 |
static public LexicalTable createLexicalTableImpl(
|
|
474 |
List<Index> vocabularies, String symbol, boolean useAllOccurrences)
|
|
475 |
throws RWorkspaceException {
|
|
476 |
|
|
477 |
System.out.println(LexicalTableCoreMessages.LexicalTableImpl_1 + vocabularies);
|
|
478 |
Index partindex = vocabularies.get(0);// FRIGO
|
|
479 |
if (!partindex.isComputedWithPartition())
|
|
480 |
return null;
|
|
481 |
|
|
482 |
Partition partition = partindex.getPartition();
|
|
483 |
Property property = partindex.getProperties().get(0);
|
|
484 |
|
|
485 |
HashMap<String, Line> alllines = new HashMap<String, Line>();
|
|
486 |
// merge lines of all indexes
|
|
487 |
for (Index voc : vocabularies) {
|
|
488 |
for (Line l : voc.getAllLines()) {
|
|
489 |
if (alllines.containsKey(l.getSignature())) {
|
|
490 |
Line ll = alllines.get(l.getSignature());
|
|
491 |
int[] c1 = ll.getFrequencies();
|
|
492 |
int[] c2 = l.getFrequencies();
|
|
493 |
for (int i = 0; i < c1.length; i++)
|
|
494 |
c2[i] += c1[i];
|
|
495 |
ll.setCounts(c2, 0.0f);
|
|
496 |
} else
|
|
497 |
alllines.put(l.toString(), l);
|
|
498 |
}
|
|
499 |
}
|
|
500 |
|
|
501 |
List<String> colnames = partindex.getPartnames();
|
|
502 |
|
|
503 |
Collection<Line> lines = alllines.values();
|
|
504 |
List<String> rownames = new ArrayList<String>(lines.size());
|
|
505 |
for (Line l : lines) {
|
|
506 |
rownames.add(l.toString());
|
|
507 |
}
|
|
508 |
int extra = 0;
|
|
509 |
if(useAllOccurrences)
|
|
510 |
extra = 1;
|
|
511 |
|
|
512 |
String[] entries = new String[alllines.size() + extra];
|
|
513 |
|
|
514 |
|
|
515 |
int[][] mat = new int[rownames.size() + extra][colnames.size()];
|
|
516 |
int[] margins = new int[colnames.size()]; // compute margins
|
|
517 |
int i = 0;
|
|
518 |
for (Line l : lines) {
|
|
519 |
for (int j = 0; j < colnames.size(); j++) {
|
|
520 |
mat[i][j] = l.getFrequency(j);
|
|
521 |
margins[j] += l.getFrequency(j);
|
|
522 |
}
|
|
523 |
entries[i++] = l.toString();
|
|
524 |
}
|
|
525 |
|
|
526 |
if (useAllOccurrences) {
|
|
527 |
try {
|
|
528 |
int[] partitionSizes = partition.getPartSizes();
|
|
529 |
int[] reste = new int[partitionSizes.length];
|
|
530 |
|
|
531 |
//System.out.println("margins : "+Arrays.toString(margins));
|
|
532 |
//System.out.println("partsizes : "+Arrays.toString(partitionSizes));
|
|
533 |
|
|
534 |
for(i = 0 ; i < reste.length ; i++)
|
|
535 |
{
|
|
536 |
reste[i] = partitionSizes[i] - margins[i];
|
|
537 |
if(reste[i] < 0)
|
|
538 |
{
|
|
539 |
System.out.println(LexicalTableCoreMessages.LexicalTableImpl_12+i+LexicalTableCoreMessages.LexicalTableImpl_13);
|
|
540 |
return null;
|
|
541 |
}
|
|
542 |
mat[lines.size()][i] = reste[i];
|
|
543 |
}
|
|
544 |
entries[lines.size()] = "#RESTE#"; //$NON-NLS-1$
|
|
545 |
rownames.add("#RESTE#"); //$NON-NLS-1$
|
|
546 |
//System.out.println("rownames: "+rownames);
|
|
547 |
//System.out.println("reste : "+Arrays.toString(reste));
|
|
548 |
} catch (CqiClientException e) {
|
|
549 |
// TODO Auto-generated catch block
|
|
550 |
org.txm.utils.logger.Log.printStackTrace(e);
|
|
551 |
}
|
|
552 |
|
|
553 |
}
|
|
554 |
//System.out.println("mat size : ["+(rownames.size() + extra)+"]["+colnames.size()+"]");
|
|
555 |
//System.out.println("rownames size : "+rownames.size());
|
|
556 |
//System.out.println("colnames size : "+colnames.size());
|
|
557 |
LexicalTableImpl table = new LexicalTableImpl(mat, partition, property,
|
|
558 |
rownames.toArray(new String[] {}), colnames
|
|
559 |
.toArray(new String[] {}));
|
|
560 |
table.constructor_fmin = vocabularies.get(0).getFmin();
|
|
561 |
return table;
|
|
562 |
}
|
|
563 |
|
|
564 |
/**
|
|
565 |
* Creates the lexical table.
|
|
566 |
*
|
|
567 |
* @param freqs the freqs
|
|
568 |
* @param prop the prop
|
|
569 |
* @param rownames the rownames
|
|
570 |
* @param colnames the colnames
|
|
571 |
* @param Fmin the fmin
|
|
572 |
* @return the lexical table
|
|
573 |
* @throws RWorkspaceException the r workspace exception
|
|
574 |
*/
|
|
575 |
public static LexicalTable createLexicalTable(int[][] freqs, Property prop,
|
|
576 |
String[] rownames, String[] colnames, int Fmin) throws RWorkspaceException {
|
|
577 |
|
|
578 |
ArrayList<Integer> idx = new ArrayList<Integer>();
|
|
579 |
for (int j = 0; j < rownames.length; j++)
|
|
580 |
{
|
|
581 |
int sum = 0;
|
|
582 |
for (int i = 0; i < colnames.length; i++)
|
|
583 |
{
|
|
584 |
sum += freqs[j][i];
|
|
585 |
}
|
|
586 |
if (sum >= Fmin)
|
|
587 |
idx.add(j);
|
|
588 |
}
|
|
589 |
|
|
590 |
DoubleMatrix2D mat = DoubleFactory2D.sparse.make(idx.size(), colnames.length, 0);
|
|
591 |
|
|
592 |
int countline = 0;
|
|
593 |
for (int j : idx)
|
|
594 |
{
|
|
595 |
for (int i = 0; i < colnames.length; i++)
|
|
596 |
{
|
|
597 |
mat.setQuick(countline, i, freqs[j][i]);
|
|
598 |
}
|
|
599 |
countline++;
|
|
600 |
}
|
|
601 |
|
|
602 |
String[] filteredrownames = new String[idx.size()];
|
|
603 |
for(int i = 0 ; i < idx.size() ; i++)
|
|
604 |
filteredrownames[i] = rownames[idx.get(i)];
|
|
605 |
|
|
606 |
LexicalTableImpl table = new LexicalTableImpl(mat, prop, filteredrownames, colnames);
|
|
607 |
table.constructor_fmin = Fmin;
|
|
608 |
return table;
|
|
609 |
}
|
|
610 |
|
|
611 |
|
|
612 |
@Override
|
|
613 |
public LexicalTable getCopy() {
|
|
614 |
try {
|
|
615 |
if (partition != null)
|
|
616 |
return getLexicalTable(this.partition, this.property,this.constructor_fmin);
|
|
617 |
else
|
|
618 |
return null;
|
|
619 |
} catch (Exception e) {
|
|
620 |
org.txm.utils.logger.Log.printStackTrace(e);
|
|
621 |
}
|
|
622 |
return null;
|
|
623 |
}
|
|
624 |
|
|
625 |
/* (non-Javadoc)
|
|
626 |
* @see org.txm.stat.data.LexicalTable#getFmax()
|
|
627 |
*/
|
|
628 |
@Override
|
|
629 |
public int getFmax() {
|
|
630 |
List<Integer> freqs = getFreqs();
|
|
631 |
int max = 0;
|
|
632 |
for (int i : freqs)
|
|
633 |
if (max < i)
|
|
634 |
max = i;
|
|
635 |
return max;
|
|
636 |
}
|
|
637 |
|
|
638 |
/* (non-Javadoc)
|
|
639 |
* @see org.txm.stat.data.LexicalTable#getFmin()
|
|
640 |
*/
|
|
641 |
@Override
|
|
642 |
public int getFmin() {
|
|
643 |
List<Integer> freqs = getFreqs();
|
|
644 |
int min = 999999;
|
|
645 |
for (int i : freqs)
|
|
646 |
if (min > i)
|
|
647 |
min = i;
|
|
648 |
return min;
|
|
649 |
}
|
|
650 |
|
|
651 |
/* (non-Javadoc)
|
|
652 |
*
|
|
653 |
* TODO: = rowmargins ?
|
|
654 |
* @see org.txm.stat.data.LexicalTable#getFreqs()
|
|
655 |
*/
|
|
656 |
@Override
|
|
657 |
public List<Integer> getFreqs() {
|
|
658 |
ArrayList<Integer> freqs = new ArrayList<Integer>();
|
|
659 |
ArrayList<double[]> cols = new ArrayList<double[]>();
|
|
660 |
int Nrows = this.getNRows();
|
|
661 |
int Ncols = this.getNColumns();
|
|
662 |
for (int i = 0; i < Ncols; i++)
|
|
663 |
try {
|
|
664 |
cols.add(this.getCol(i).asDoubleArray());
|
|
665 |
} catch (RException e) {
|
|
666 |
// TODO Auto-generated catch block
|
|
667 |
org.txm.utils.logger.Log.printStackTrace(e);
|
|
668 |
} catch (RWorkspaceException e) {
|
|
669 |
// TODO Auto-generated catch block
|
|
670 |
org.txm.utils.logger.Log.printStackTrace(e);
|
|
671 |
} catch (StatException e) {
|
|
672 |
// TODO Auto-generated catch block
|
|
673 |
org.txm.utils.logger.Log.printStackTrace(e);
|
|
674 |
}
|
|
675 |
int sum = 0;
|
|
676 |
|
|
677 |
for (int i = 0; i < Nrows; i++) {
|
|
678 |
sum = 0;
|
|
679 |
for (int j = 0; j < Ncols; j++)
|
|
680 |
sum += (int) cols.get(j)[i];
|
|
681 |
freqs.add(sum);
|
|
682 |
}
|
|
683 |
return freqs;
|
|
684 |
}
|
|
685 |
|
|
686 |
public void setReference(String refSymbol) {
|
|
687 |
try {
|
|
688 |
RWorkspace rw = RWorkspace.getRWorkspaceInstance();
|
|
689 |
rw.voidEval("missingrownames <- rownames("+symbol+")[!rownames("+symbol+")%in%rownames("+refSymbol+")]"); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$ //$NON-NLS-4$
|
|
690 |
rw.voidEval("missingrows <- matrix(ncol=1, nrow=length(missingrownames))"); //$NON-NLS-1$
|
|
691 |
rw.voidEval("missingrows[,] <- 0"); //$NON-NLS-1$
|
|
692 |
rw.voidEval("rownames(missingrows) <- missingrownames"); //$NON-NLS-1$
|
|
693 |
rw.voidEval(refSymbol+" <- t(t(rbind("+refSymbol+", t(t(missingrows)))))"); //$NON-NLS-1$ //$NON-NLS-2$
|
|
694 |
rw.voidEval("refLines <- t(t("+refSymbol+"[rownames("+symbol+"),]))"); // only keep the same lines //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
|
|
695 |
rw.voidEval("rmargins <- t(t(margin.table("+symbol+", 1)))"); // //$NON-NLS-1$ //$NON-NLS-2$
|
|
696 |
rw.voidEval("refmargin <- margin.table(refLines)"); //$NON-NLS-1$
|
|
697 |
rw.voidEval(symbol+" <- cbind("+symbol+", abs(refLines - rmargins))"); // use abs if refLine does not contains a line from 'symbol' //$NON-NLS-1$ //$NON-NLS-2$
|
|
698 |
rw.voidEval("colnames("+symbol+")[length(colnames("+symbol+"))] <- \"##RESTE##\""); //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
|
|
699 |
} catch (Exception e) {
|
|
700 |
// TODO Auto-generated catch block
|
|
701 |
org.txm.utils.logger.Log.printStackTrace(e);
|
|
702 |
}
|
|
703 |
}
|
|
704 |
|
|
705 |
/* (non-Javadoc)
|
|
706 |
* @see org.txm.stat.data.LexicalTable#getConstructorFmin()
|
|
707 |
*/
|
|
708 |
@Override
|
|
709 |
public int getConstructorFmin() {
|
|
710 |
return constructor_fmin;
|
|
711 |
}
|
|
712 |
|
|
713 |
/* (non-Javadoc)
|
|
714 |
* @see org.txm.stat.data.LexicalTable#getCorpus()
|
|
715 |
*/
|
|
716 |
@Override
|
|
717 |
public Corpus getCorpus() {
|
|
718 |
try {
|
|
719 |
return ((Partition)this.parent).getCorpus();
|
|
720 |
}
|
|
721 |
catch(Exception e) {
|
|
722 |
}
|
|
723 |
return (Corpus) this.parent;
|
|
724 |
}
|
|
725 |
|
|
726 |
/* (non-Javadoc)
|
|
727 |
* @see org.txm.stat.data.LexicalTable#setCorpus(org.txm.searchengine.cqp.corpus.Corpus)
|
|
728 |
*/
|
|
729 |
@Override
|
|
730 |
public void setCorpus(Corpus corpus)
|
|
731 |
{
|
|
732 |
this.corpus = corpus;
|
|
733 |
}
|
|
734 |
|
|
735 |
@Override
|
|
736 |
public int getRowsCount() {
|
|
737 |
return this.getNRows();
|
|
738 |
}
|
|
739 |
|
|
740 |
@Override
|
|
741 |
public int getColumnsCount() {
|
|
742 |
return this.getNColumns();
|
|
743 |
}
|
|
744 |
|
|
745 |
|
|
746 |
@Override
|
|
747 |
public String getName() {
|
|
748 |
return this.getPartition().getSimpleName() + ":" + this.getSimpleName(); //$NON-NLS-1$
|
|
749 |
}
|
|
750 |
|
|
751 |
@Override
|
|
752 |
public String getSimpleName() {
|
|
753 |
return property.getName() + " (" + this.getFmin() + " / " + this.getNRows() + ")";
|
|
754 |
}
|
|
755 |
|
|
756 |
@Override
|
|
757 |
public String getDetails() {
|
|
758 |
// TODO Auto-generated method stub
|
|
759 |
return null;
|
|
760 |
}
|
|
761 |
|
|
762 |
@Override
|
|
763 |
public void clean() {
|
|
764 |
// TODO Auto-generated method stub
|
|
765 |
|
|
766 |
}
|
|
767 |
|
|
768 |
}
|
0 |
769 |
|