Commit 47b71954 authored by Jasper Koehorst's avatar Jasper Koehorst
Browse files

sync

parent aa530ac5
Pipeline #25989 failed with stage
in 1 minute and 52 seconds
......@@ -3,3 +3,4 @@ unlock
gradle
.settings
*.yaml
.gradle
......@@ -11,7 +11,6 @@ buildscript {
mavenCentral()
mavenLocal()
maven { url 'https://repo.gradle.org/gradle/libs-releases' }
jcenter()
maven { url "https://plugins.gradle.org/m2/" }
}
dependencies {
......@@ -57,8 +56,8 @@ description = """Kubernetes manager for unlock"""
repositories {
// Use jcenter for resolving your dependencies.
// You can declare any Maven/Ivy/file repository here.
jcenter()
mavenLocal()
mavenCentral()
}
dependencies {
......@@ -90,7 +89,7 @@ dependencies {
compile group: 'log4j', name: 'log4j', version: '1.2.17'
// https://mvnrepository.com/artifact/com.esotericsoftware.yamlbeans/yamlbeans
compile group: 'com.esotericsoftware.yamlbeans', name: 'yamlbeans', version: '1.14'
compile group: 'com.esotericsoftware.yamlbeans', name: 'yamlbeans', version: '1.15'
// locally installed jargon and unlock api
compile group: 'jargon', name: 'core', version: '4.3.0.2'
......
......@@ -21,8 +21,8 @@ mvn install:install-file -Dfile=$DIR/jargon-core-4.3.0.2-RELEASE-jar-with-depend
# ////////////////////////////////////////////////////////////////////////////////////
# // UNLOCK API
# ////////////////////////////////////////////////////////////////////////////////////
wget -nc http://download.systemsbiology.nl/unlock/UnlockOntology.jar -O $DIR/UnlockOntology.jar
mvn install:install-file -Dfile=$DIR/UnlockOntology.jar -DgroupId=nl.munlock -DartifactId=unlockapi -Dversion=1.0.1 -Dpackaging=jar
# wget -nc http://download.systemsbiology.nl/unlock/UnlockOntology.jar -O $DIR/UnlockOntology.jar
# mvn install:install-file -Dfile=$DIR/UnlockOntology.jar -DgroupId=nl.munlock -DartifactId=unlockapi -Dversion=1.0.1 -Dpackaging=jar
# Building mode
if [ "$1" == "test" ]; then
......
This source diff could not be displayed because it is too large. You can view the blob instead.
package nl.munlock;
import htsjdk.samtools.fastq.FastqReader;
import htsjdk.samtools.fastq.FastqRecord;
import nl.munlock.irods.Connection;
import nl.munlock.ontology.domain.SequenceDataSet;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.FilenameUtils;
import org.apache.log4j.*;
import org.irods.jargon.core.checksum.ChecksumValue;
import org.irods.jargon.core.exception.JargonException;
import org.irods.jargon.core.pub.DataObjectAO;
import org.irods.jargon.core.pub.DataObjectChecksumUtilitiesAO;
import org.irods.jargon.core.pub.DataTransferOperations;
import org.irods.jargon.core.pub.domain.AvuData;
import org.irods.jargon.core.pub.io.IRODSFile;
import org.irods.jargon.core.query.GenQueryBuilderException;
import org.irods.jargon.core.query.JargonQueryException;
import org.irods.jargon.core.query.MetaDataAndDomainData;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Scanner;
import java.io.*;
import java.net.MalformedURLException;
import java.net.URL;
import java.util.*;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.zip.GZIPInputStream;
public class Generic {
private static final org.slf4j.Logger log = LoggerFactory.getLogger(Generic.class);
static ArrayList<String> processed = new ArrayList<>();
/**
* Logger initialization with debug option
* @param debug boolean if debug mode should be enabled
......@@ -114,9 +127,60 @@ public class Generic {
return "http://edamontology.org/format_1930";
} else if (extension.matches("(fa|fasta)")) {
return "http://edamontology.org/format_1929";
} else if (extension.matches("ttl")) {
return "http://edamontology.org/format_3255";
} else {
log.error("Unknown file format " + extension);
return null;
}
}
public static String getSHA256(Connection connection, String irodsFilePath) throws JargonException, MalformedURLException {
if (irodsFilePath.startsWith("http")) {
irodsFilePath = new URL(irodsFilePath).getPath();
}
IRODSFile irodsFile = connection.fileFactory.instanceIRODSFile(irodsFilePath);
return getSHA256(connection, irodsFile);
}
public static String getSHA256(Connection connection, IRODSFile irodsFile) throws JargonException {
DataObjectChecksumUtilitiesAO dataObjectChecksumUtilitiesAO = connection.irodsFileSystem.getIRODSAccessObjectFactory().getDataObjectChecksumUtilitiesAO(connection.irodsAccount);
// TODO there is an issue with too many checksum checks
if (!irodsFile.exists()) {
connection.close();
connection.reconnect();
}
ChecksumValue checksumValue = dataObjectChecksumUtilitiesAO.retrieveExistingChecksumForDataObject(irodsFile.getAbsolutePath());
// Todo not sure if this works and it will receive a null for checksum
if (checksumValue == null) {
checksumValue = dataObjectChecksumUtilitiesAO.computeChecksumOnDataObject(irodsFile);
}
return checksumValue.getHexChecksumValue();
}
//
// public static String getSHA256(String irodsFilePath) throws JargonException, MalformedURLException {
// if (irodsFilePath.startsWith("http")) {
// irodsFilePath = new URL(irodsFilePath).getPath();
// }
// IRODSFile irodsFile = connection.fileFactory.instanceIRODSFile(irodsFilePath);
// return getSHA256(irodsFile);
// }
public static String getBase64(Connection connection, String irodsFilePath) throws JargonException, MalformedURLException {
if (irodsFilePath.startsWith("http")) {
irodsFilePath = new URL(irodsFilePath).getPath();
}
IRODSFile irodsFile = connection.fileFactory.instanceIRODSFile(irodsFilePath);
return getBase64(connection, irodsFile);
}
public static String getBase64(Connection connection, IRODSFile irodsFile) throws JargonException {
DataObjectChecksumUtilitiesAO dataObjectChecksumUtilitiesAO = connection.irodsFileSystem.getIRODSAccessObjectFactory().getDataObjectChecksumUtilitiesAO(connection.irodsAccount);
ChecksumValue checksumValue = dataObjectChecksumUtilitiesAO.retrieveExistingChecksumForDataObject(irodsFile.getAbsolutePath());
// Todo not sure if this works and it will receive a null for checksum
if (checksumValue == null) {
checksumValue = dataObjectChecksumUtilitiesAO.computeChecksumOnDataObject(irodsFile);
}
return checksumValue.getBase64ChecksumValue();
}
}
package nl.munlock.irods;
import nl.munlock.options.irods.CommandOptionsIRODS;
import nl.munlock.options.kubernetes.CommandOptionsKubernetes;
import org.irods.jargon.core.connection.ClientServerNegotiationPolicy;
import org.irods.jargon.core.connection.IRODSAccount;
import org.irods.jargon.core.exception.JargonException;
......@@ -19,10 +18,12 @@ public class Connection {
public IRODSAccount irodsAccount;
public IRODSAccessObjectFactory accessObjectFactory;
public IRODSFileFactory fileFactory;
private CommandOptionsIRODS commandOptions;
private static final Logger log = LoggerFactory.getLogger(Connection.class);
public Connection(CommandOptionsIRODS commandOptions) throws JargonException {
connect(commandOptions);
this.commandOptions = commandOptions;
}
private void connect(CommandOptionsIRODS commandOptions) throws JargonException {
......@@ -59,8 +60,12 @@ public class Connection {
}
}
public void reconnect(CommandOptionsIRODS commandOptions) throws JargonException {
public void reconnect() throws JargonException {
this.close();
this.connect(commandOptions);
this.connect(this.commandOptions);
}
public CommandOptionsIRODS getCommandOptionsIRODS() {
return this.commandOptions;
}
}
......@@ -14,7 +14,6 @@ import nl.munlock.yaml.Yaml;
import nl.wur.ssb.RDFSimpleCon.ResultLine;
import nl.wur.ssb.RDFSimpleCon.api.Domain;
import org.apache.commons.io.IOUtils;
import org.apache.jena.ext.com.google.common.io.Files;
import org.apache.jena.riot.RDFDataMgr;
import org.apache.log4j.Logger;
import org.irods.jargon.core.exception.JargonException;
......@@ -31,8 +30,6 @@ import java.io.FileOutputStream;
import java.io.IOException;
import java.util.*;
import static java.util.concurrent.TimeUnit.SECONDS;
public class Search {
private static final Logger log = Generic.getLogger(Yaml.class, Yaml.debug);
// static Domain domain;
......@@ -44,10 +41,8 @@ public class Search {
* @return list of assays from an rdf file
* @throws Exception general exceptions
*/
public static ArrayList<Assay> getAssaysFromRDF(CommandOptionsIRODS commandOptions, Connection connection) throws Exception {
public static ArrayList<Assay> getAssaysFromRDF(CommandOptionsIRODS commandOptions, Connection connection, Domain domain) throws Exception {
log.info("Obtaining assay files");
File domainDirectory = Files.createTempDir();
Domain domain = new Domain("file://"+domainDirectory);
// Get unprocessed files
IRODSGenQueryBuilder queryBuilder = new IRODSGenQueryBuilder(true, null);
......@@ -267,6 +262,7 @@ public class Search {
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_DATA_NAME, QueryConditionOperators.LIKE, "%.yaml");
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_META_DATA_ATTR_NAME, QueryConditionOperators.LIKE, "cwl");
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_META_DATA_ATTR_UNITS, QueryConditionOperators.LIKE, "waiting");
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_META_DATA_ATTR_VALUE, QueryConditionOperators.LIKE, "%workflows/workflow_sapp_microbes.cwl");
queryBuilder.addSelectAsGenQueryValue(RodsGenQueryEnum.COL_COLL_NAME);
queryBuilder.addSelectAsGenQueryValue(RodsGenQueryEnum.COL_DATA_NAME);
......@@ -279,12 +275,32 @@ public class Search {
List<IRODSQueryResultRow> irodsQueryResultSetResults = irodsQueryResultSet.getResults();
int index = 0;
HashSet<String> yamls = new HashSet<>();
for (IRODSQueryResultRow irodsQueryResultSetResult : irodsQueryResultSetResults) {
index = index + 1;
// Yaml file... obtain, parse, check destination... if already exists don't do it?
String yaml = irodsQueryResultSetResult.getColumn(0) + "/" + irodsQueryResultSetResult.getColumn(1);
log.info("Checking yaml " + yaml);
yamls.add(yaml);
}
int index = 0;
for (String yaml : yamls) {
index = index + 1;
log.info("Checking yaml " + index + " of " + yamls.size() + " with path "+ yaml);
// TEMP
String[] filters = {"genomes"}; // basically no filter
boolean status = false;
for (String filter : filters) {
if (yaml.contains(filter)) {
status = true;
}
}
if (!status) continue;
// END TEMP
Generic.downloadFile(connection, new File(yaml));
Scanner scanner = new Scanner(new File("." + yaml));
......@@ -292,25 +308,15 @@ public class Search {
String line = scanner.nextLine();
if (line.startsWith("destination: ")) {
// log.info("Analysing " + line);
String destination = line.split(" ")[1] + "/" + new File(yaml).getName().replaceAll(".yaml",".ttl.gz");
IRODSFile destination = connection.fileFactory.instanceIRODSFile(line.split(" ")[1] + "/" + new File(yaml).getName().replaceAll(".yaml",".ttl.gz"));
IRODSFile destination2 = connection.fileFactory.instanceIRODSFile(line.split(" ")[1] + "/" + new File(yaml).getName().replaceAll(".yaml",".ttl"));
IRODSFile destination3 = connection.fileFactory.instanceIRODSFile(line.split(" ")[1] + "/" + new File(yaml).getName().replaceAll(".yaml","interproscan.ttl"));
IRODSFile destination4 = connection.fileFactory.instanceIRODSFile(line.split(" ")[1] + "/" + new File(yaml).getName().replaceAll(".yaml","interproscan.ttl.gz"));
IRODSFile destination5 = connection.fileFactory.instanceIRODSFile(line.split(" ")[1] + "/" + new File(yaml).getName().replaceAll(".yaml",".hdt.gz"));
// When not in processed, will do a final check again to see if it exists if not assign to kube
String path = irodsQueryResultSetResult.getColumn(0) + "/" + irodsQueryResultSetResult.getColumn(1);
// Amplicon library requires a slighly different destination split
if (search.contains("ampliconlibraries")) {
destination = line.split(" ")[1];
IRODSFile irodsFolder = connection.fileFactory.instanceIRODSFile(destination);
if (irodsFolder.exists() && irodsFolder.list().length < 2) {
Kubernetes.yamls.add(path);
log.info("Processing empty folder " + index + " of " + irodsQueryResultSetResults.size() + " " + destination);
} else if (!irodsFolder.exists()) {
Kubernetes.yamls.add(path);
log.info("Processing " + index + " of " + irodsQueryResultSetResults.size() + " " + destination);
} else {
log.info("Library is already demultiplexed");
fixAVU(connection, yaml);
}
} else if (!connection.fileFactory.instanceIRODSFile(destination).exists()) {
String path = yaml; // irodsQueryResultSetResult.getColumn(0) + "/" + irodsQueryResultSetResult.getColumn(1);
// Check if ttl.gz exists
if (!destination.exists() && !destination2.exists() && !destination3.exists() && !destination4.exists() && !destination5.exists()) {
Kubernetes.yamls.add(path);
log.debug("Processing " + index + " of " + irodsQueryResultSetResults.size() + " " + destination);
} else {
......@@ -324,16 +330,43 @@ public class Search {
Kubernetes.createJobs(commandOptionsKubernetes, connection);
// Reset references...
Kubernetes.yamls = new HashSet<>();
if (yaml.contains("/demultiplexed/")) {
log.info("Demultiplexing job detected, sleeping for 5 minutes due to file transfer?...");
SECONDS.sleep(150);
}
}
}
scanner.close();
}
log.info("Processed " + index + " jobs");
log.info("Processed " + index + " potential jobs");
}
public static HashSet<String> getAllFailedUnprocessed(CommandOptionsKubernetes commandOptionsKubernetes, Connection connection) throws GenQueryBuilderException, JargonException, JargonQueryException, IOException, InterruptedException, ApiException {
// Creating search pattern based on PISOSA
String search = makePath(commandOptionsKubernetes.project, commandOptionsKubernetes.investigation, commandOptionsKubernetes.study, commandOptionsKubernetes.observationUnit, commandOptionsKubernetes.assay, connection);
IRODSGenQueryBuilder queryBuilder = new IRODSGenQueryBuilder(true, null);
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_COLL_NAME, QueryConditionOperators.LIKE, search);
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_DATA_NAME, QueryConditionOperators.LIKE, "%.yaml");
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_META_DATA_ATTR_NAME, QueryConditionOperators.LIKE, "cwl");
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_META_DATA_ATTR_UNITS, QueryConditionOperators.LIKE, "failed");
queryBuilder.addSelectAsGenQueryValue(RodsGenQueryEnum.COL_COLL_NAME);
queryBuilder.addSelectAsGenQueryValue(RodsGenQueryEnum.COL_DATA_NAME);
// Set limit?
IRODSGenQueryFromBuilder query = queryBuilder.exportIRODSQueryFromBuilder(50000);
IRODSGenQueryExecutor irodsGenQueryExecutor = connection.accessObjectFactory.getIRODSGenQueryExecutor(connection.irodsAccount);
IRODSQueryResultSet irodsQueryResultSet = irodsGenQueryExecutor.executeIRODSQuery(query, 0);
List<IRODSQueryResultRow> irodsQueryResultSetResults = irodsQueryResultSet.getResults();
HashSet<String> yamlFiles = new HashSet<>();
for (IRODSQueryResultRow irodsQueryResultSetResult : irodsQueryResultSetResults) {
// Yaml file... obtain, parse, check destination... if already exists don't do it?
String yaml = irodsQueryResultSetResult.getColumn(0) + "/" + irodsQueryResultSetResult.getColumn(1);
yamlFiles.add(yaml);
}
return yamlFiles;
}
public static void getAllUnprocessed(CommandOptionsKubernetes commandOptionsKubernetes, Connection connection) throws GenQueryBuilderException, JargonException, JargonQueryException, IOException, InterruptedException, ApiException {
......@@ -347,7 +380,11 @@ public class Search {
// For hdt check
Set<String> hdts = getAllHDT(search, connection);
log.info("Searching in " + search);
log.info("Searching for failed jobs in " + search);
HashSet<String> yamlFiles = getAllFailedUnprocessed(commandOptionsKubernetes, connection);
log.info("Searching for jobs in " + search);
IRODSGenQueryBuilder queryBuilder = new IRODSGenQueryBuilder(true, null);
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_COLL_NAME, QueryConditionOperators.LIKE, search);
......@@ -366,13 +403,14 @@ public class Search {
List<IRODSQueryResultRow> irodsQueryResultSetResults = irodsQueryResultSet.getResults();
HashSet<String> yamlFiles = new HashSet<>();
for (IRODSQueryResultRow irodsQueryResultSetResult : irodsQueryResultSetResults) {
// Yaml file... obtain, parse, check destination... if already exists don't do it?
String yaml = irodsQueryResultSetResult.getColumn(0) + "/" + irodsQueryResultSetResult.getColumn(1);
yamlFiles.add(yaml);
}
log.info(yamlFiles.size() + " yaml files detected");
int count = 0;
......@@ -458,7 +496,7 @@ public class Search {
}
}
if (avu != null) {
log.info("Updating metadata field");
log.info("Updating metadata field to queue");
dataObjectAO.deleteAVUMetadata(yaml, avu);
avu.setUnit("queue");
dataObjectAO.addAVUMetadata(yaml, avu);
......@@ -477,7 +515,7 @@ public class Search {
}
}
if (avu != null) {
log.info("Updating metadata field");
log.info("Updating metadata field to waiting " + yaml);
dataObjectAO.deleteAVUMetadata(yaml, avu);
avu.setUnit("waiting");
dataObjectAO.addAVUMetadata(yaml, avu);
......@@ -656,7 +694,6 @@ public class Search {
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_DATA_NAME, QueryConditionOperators.LIKE, "%.ttl");
queryBuilder.addConditionAsGenQueryField(RodsGenQueryEnum.COL_COLL_NAME, QueryConditionOperators.LIKE, folder + "%");
// queryBuilder.addSelectAsGenQueryValue(RodsGenQueryEnum.COL_DATA_NAME);
queryBuilder.addSelectAsGenQueryValue(RodsGenQueryEnum.COL_D_MODIFY_TIME);
queryBuilder.addOrderByGenQueryField(RodsGenQueryEnum.COL_D_MODIFY_TIME, GenQueryOrderByField.OrderByType.DESC);
......@@ -668,34 +705,40 @@ public class Search {
List<IRODSQueryResultRow> irodsQueryResultSetResults = irodsQueryResultSet.getResults();
if (irodsQueryResultSetResults.size() == 0)
throw new JargonException("No results found");
if (irodsQueryResultSetResults.size() == 0) {
// throw new JargonException("No results found");
log.error("No results found");
}
Set<String> folders = new HashSet<>();
if (irodsQueryResultSetResults.size() > 0) {
Date dateTTL = irodsQueryResultSetResults.get(0).getColumnAsDateOrNull(0); //.getColumnAsDateOrNull(0);
Date dateTTL = irodsQueryResultSetResults.get(0).getColumnAsDateOrNull(0);
// Get timestamp HDT file
IRODSFile irodsFolder = connection.fileFactory.instanceIRODSFile(folder + "/hdt/");
if (!irodsFolder.exists()) {
return true;
} else {
boolean hdtFound = false;
for (File file : irodsFolder.listFiles()) {
if (file.getName().endsWith(".hdt")) {
hdtFound = true;
IRODSFile irodsHDTFile = connection.fileFactory.instanceIRODSFile(file.getAbsolutePath());
Date dateHDT = new Date(irodsHDTFile.lastModified());
System.err.println(dateTTL);
System.err.println(dateHDT);
if (dateHDT.compareTo(dateTTL) == -1) {
log.info("TTL date: " + dateTTL + " HDT date: " + dateHDT);
// This is a job to be executed as TTL files that are newer has been identified
return true;
}
}
}
// There is no HDT file present so obviously should return true
if (!hdtFound) {
return true;
}
}
}
return false;
......
......@@ -61,6 +61,21 @@ public class Kubernetes {
int totalItems = checkKubernetesJobs(client);
for (String yaml : yamls) {
// Apply filter based on something?
// String[] identifiers = { "GCA_000433555", "GCA_000432115", "GCA_002363195", "GCA_002372055", "GCA_002393105", "GCA_002394455", "GCA_002395985", "GCA_000723465", "GCA_018379695", "GCA_018380525", "GCA_018382835", "GCA_018383015", "GCA_014870165", "GCA_900101355", "GCA_902387655", "GCA_902769425", "GCA_905203575", "GCA_900291485", "GCA_018363595", "GCA_018377865", "GCA_902385595", "GCA_905205725", "GCA_000468015", "GCA_902381675", "GCA_905206235", "GCA_000210095", "GCA_001312825", "GCA_902793455", "GCA_902796065", "GCA_902796705", "GCA_902803775", "GCA_902803945", "GCA_000571935", "GCA_000247525", "GCA_000526795", "GCA_000518765", "GCA_000174895", "GCA_000621845", "GCA_000701945", "GCA_000621785", "GCA_902377395", "GCA_000425525", "GCA_000436995", "GCA_018377795", "GCA_018378145", "GCA_018378825", "GCA_018380965", "GCA_018382215", "GCA_003435865", "GCA_003477585", "GCA_003435955", "GCA_003477475", "GCA_003474435", "GCA_003460775", "GCA_003460765", "GCA_003460725", "GCA_003464025", "GCA_003628495", "GCA_000431355", "GCA_000438435", "GCA_000432075", "GCA_000435855", "GCA_000437415", "GCA_001917045", "GCA_015559025", "GCA_015551645", "GCA_015554365", "GCA_001593025", "GCA_000621805", "GCA_000712055", "GCA_015551945", "GCA_000285855", "GCA_005601135", "GCA_005862145", "GCA_016680995", "GCA_900086605", "GCA_900604945", "GCA_000980385", "GCA_000686125", "GCA_014287355", "GCA_014288065", "GCA_003478065", "GCA_003460665", "GCA_003462665", "GCA_003603885", "GCA_003462585", "GCA_003481655", "GCA_002490945", "GCA_002490805", "GCA_002473365", "GCA_002479715", "GCA_002479605", "GCA_000980705", "GCA_900119155", "GCA_900103235", "GCA_001941135", "GCA_001941055", "GCA_905209785", "GCA_905211305", "GCA_905213675", "GCA_905213835", "GCA_905236825" };
// boolean pass = false;
// for (String identifier : identifiers) {
// if (yaml.contains(identifier)) {
// pass = true;
// break;
// }
// }
// if (!pass) {
// System.err.println("Skipping " + yaml + " as it did not pass temp filter");
// continue;
// }
try {
assayCount = assayCount + 1;
......@@ -97,7 +112,7 @@ public class Kubernetes {
}
TimeUnit.MILLISECONDS.sleep(5);
TimeUnit.SECONDS.sleep(commandOptionsKubernetes.delay);
totalItems = totalItems + 1;
......@@ -114,7 +129,7 @@ public class Kubernetes {
private static void getYamls(CommandOptionsKubernetes commandOptionsKubernetes, Connection connection) throws JargonException, GenQueryBuilderException, JargonQueryException, IOException, ApiException, InterruptedException {
// List all project turtle files...
if (commandOptionsKubernetes.project.contains("references")) {
String path = "/" + commandOptionsKubernetes.zone + "/references/genomes/bacteria%mycoplasma%";
String path = "/" + commandOptionsKubernetes.zone + "/references/genomes/%";
if (commandOptionsKubernetes.reset)
resetFailures(connection, path);
Search.getAllUnprocessedReferences(commandOptionsKubernetes, connection, path);
......@@ -253,8 +268,6 @@ public class Kubernetes {
log.info("CWL: " + cwlFile);
// String uuid = UUID.randomUUID().toString();
// String yamlName = new File(yamlFile).getName();
V1PodTemplateSpec template = new V1PodTemplateSpec();
// Defines the docker container
......
package nl.munlock.objects;
import java.util.HashMap;
public class Workflow {
public int threads = 2;
public int memory = 5000;
public boolean provenance = true;
public int threads;
public int memory;
public boolean provenance;
public String identifier;
public String conda;
public HashMap<String, String> irods = new HashMap<>();
public void setThreads(int threads) {
this.threads = threads;
......@@ -41,4 +46,20 @@ public class Workflow {
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public HashMap<String, String> getIrods() {
return irods;
}
public void setIrods(HashMap<String, String> irods) {
this.irods = irods;
}
public String getConda() {
return conda;
}
public void setConda(String conda) {
this.conda = conda;
}
}
......@@ -5,7 +5,7 @@ public class WorkflowGenomeSync extends Workflow {
public String enaBrowserTools;
public String destination;
public String taxonomy;
public String gca;
public String identifier;
public int codon;
public boolean bacteria;
......@@ -25,12 +25,12 @@ public class WorkflowGenomeSync extends Workflow {
this.taxonomy = taxonomy;
}
public String getGca() {
return gca;
public String getIdentifier() {
return identifier;
}
public void setGca(String gca) {
this.gca = gca;
public void setIdentifier(String identifier) {
this.identifier = identifier;
}
public boolean isBacteria() {
......
......@@ -7,25 +7,24 @@ import static nl.munlock.yaml.NGTax.fixPrimer;
public class WorkflowNgtax extends Workflow {
public String reference_db; // = new ArrayList<>();
public String reference_db;
public String forward_primer;
public String reverse_primer;
public ArrayList<FileClass> forward_reads = new ArrayList<>();
public ArrayList<FileClass> reverse_reads = new ArrayList<>();
// public ArrayList<FileClass> files = new ArrayList<>();
// public HashMap<String, String> hashMapTest = new HashMap<>();
public String destination;
public int rev_read_len;
public int for_read_len;
public String sample;
public double minimum_threshold;
public HashMap<String, String> irods = new HashMap<>();
public void setReference_db(String reference_db) throws Exception {
// FileClass fileClass = new FileClass();
// fileClass.setClazz("File");
// fileClass.setLocation(reference_db);
// this.reference_db.add(fileClass);
private String mock3;
private String mock4;
public String fragment;
public boolean primersRemoved;
public void setReference_db(String reference_db) {
this.reference_db = reference_db;
}
......@@ -49,8 +48,12 @@ public class WorkflowNgtax extends Workflow {
return reverse_primer;
}