Skip to content
Snippets Groups Projects
Commit 214e6a9e authored by Piotr Gawron's avatar Piotr Gawron
Browse files

Merge branch '328-admin-panel-data-mining-status-on-upload' into 'master'

Resolve "Admin panel: Data mining status on upload"

Closes #328

See merge request piotr.gawron/minerva!235
parents 059fd451 1067a139
No related branches found
No related tags found
1 merge request!235Resolve "Admin panel: Data mining status on upload"
Pipeline #
......@@ -63,15 +63,11 @@ import lcsb.mapviewer.model.log.LogType;
import lcsb.mapviewer.model.map.BioEntity;
import lcsb.mapviewer.model.map.MiriamData;
import lcsb.mapviewer.model.map.MiriamType;
import lcsb.mapviewer.model.map.graph.DataMining;
import lcsb.mapviewer.model.map.graph.DataMiningSet;
import lcsb.mapviewer.model.map.layout.Layout;
import lcsb.mapviewer.model.map.layout.LayoutStatus;
import lcsb.mapviewer.model.map.model.Model;
import lcsb.mapviewer.model.map.model.ModelData;
import lcsb.mapviewer.model.map.model.ModelSubmodelConnection;
import lcsb.mapviewer.model.map.reaction.Reaction;
import lcsb.mapviewer.model.map.species.Element;
import lcsb.mapviewer.model.user.ObjectPrivilege;
import lcsb.mapviewer.model.user.PrivilegeType;
import lcsb.mapviewer.model.user.User;
......@@ -103,7 +99,6 @@ import lcsb.mapviewer.services.search.db.drug.IDrugService;
import lcsb.mapviewer.services.search.db.mirna.IMiRNAService;
import lcsb.mapviewer.services.utils.CreateProjectParams;
import lcsb.mapviewer.services.utils.EmailSender;
import lcsb.mapviewer.services.utils.InvalidDataMiningInputFile;
import lcsb.mapviewer.services.utils.data.BuildInLayout;
/**
......@@ -123,16 +118,6 @@ public class ProjectService implements IProjectService {
*/
private static final int OUT_OF_MEMORY_BACKUP_BUFFER_SIZE = 10000;
/**
* How much time (out of 1.00) is used for creation of the data from file.
*/
private static final double CREATION_OF_DATA = 0.50;
/**
* How much time (out of 1.00) is used for uploading of the data from file.
*/
private static final double UPLOAD_OF_DATA = 0.50;
/**
* Default class logger.
*/
......@@ -737,72 +722,10 @@ public class ProjectService implements IProjectService {
}, annotators, params.getAnnotatorsParamsAsMap());
logger.debug("Annotations updated");
}
updateProjectStatus(project, ProjectStatus.EXTENDING_MODEL, 0.0, params);
processDataMining(topModel, topModel.getDataMiningSets(), new IProgressUpdater() {
@Override
public void setProgress(double progress) {
updateProjectStatus(project, ProjectStatus.EXTENDING_MODEL, progress, params);
}
});
updateProjectStatus(project, ProjectStatus.EXTENDING_MODEL, IProgressUpdater.MAX_PROGRESS, params);
logger.debug("Model created");
}
/**
* Process data mining files and assign suggested connections to the model.
*
* @param model
* model where the suggested connections will be added
* @param dataMiningSets
* set of files to process
* @param progressUpdater
* callback function informing higher layer about progress
* @throws InvalidDataMiningInputFile
* thrown when one of the files is invalid
*/
private void processDataMining(Model model, List<DataMiningSet> dataMiningSets,
final IProgressUpdater progressUpdater) throws InvalidDataMiningInputFile {
Set<Element> nodes = new HashSet<>();
nodes.addAll(model.getElements());
for (ModelSubmodelConnection connection : model.getSubmodelConnections()) {
nodes.addAll(connection.getSubmodel().getElements());
}
Set<Reaction> reactions = new HashSet<Reaction>();
reactions.addAll(model.getReactions());
for (ModelSubmodelConnection connection : model.getSubmodelConnections()) {
reactions.addAll(connection.getSubmodel().getReactions());
}
int fileCounter = 0;
final int filesCount = dataMiningSets.size();
for (DataMiningSet dmSet : dataMiningSets) {
final double offset = IProgressUpdater.MAX_PROGRESS * fileCounter / filesCount;
IProgressUpdater secondPartUpdater = new IProgressUpdater() {
@Override
public void setProgress(double progress) {
progressUpdater.setProgress((progress * CREATION_OF_DATA) / ((double) filesCount) + offset);
}
};
Set<DataMining> result = dataMiningService.parseData(dmSet, nodes, reactions, secondPartUpdater);
double size = result.size();
double count = 0;
double updateOffset = IProgressUpdater.MAX_PROGRESS * fileCounter / filesCount
+ CREATION_OF_DATA / ((double) filesCount);
for (DataMining missingConnection : result) {
missingConnection.setType(dmSet.getType());
missingConnection.getElement().addDataMining(missingConnection);
count++;
progressUpdater.setProgress(updateOffset + IProgressUpdater.MAX_PROGRESS * count / size * UPLOAD_OF_DATA);
}
}
}
/**
* Updates status of the generating project.
*
......
......@@ -571,33 +571,6 @@ public class ProjectServiceTest extends ServiceTestFunctions {
}
@Test
public void testCreateComplexWithDataMining() throws Exception {
String name = "Some_id";
try {
String filename = "testFiles/complexModel/complex_model_with_data_mining.zip";
Project project = createComplexProject(name, filename);
Model model = modelService.getLastModelByProjectId(name, adminToken);
assertNotNull(model);
assertEquals("main", model.getName());
assertEquals(ProjectStatus.DONE, project.getStatus());
boolean dmFound = false;
for (Element element : model.getElements()) {
if (element.getDataMining().size() > 0) {
dmFound = true;
}
}
assertTrue("Cannot find data mining information for the model", dmFound);
projectService.removeProject(project, null, false, adminToken);
String token = userService.login(Configuration.ANONYMOUS_LOGIN, null);
assertNull(projectService.getProjectByProjectId(name, token));
} catch (Exception e) {
e.printStackTrace();
throw e;
}
}
protected Project createComplexProject(String projectId, String filename) throws IOException, SecurityException {
CreateProjectParams params = new CreateProjectParams();
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment