# Unique code of this Data Store Server. Not more than 40 characters. data-store-server-code = DSS1 # The root directory of the data store storeroot-dir = data/store # The directory where the command queue file is located; defaults to storeroot-dir commandqueue-dir = # Port port = 8444 # Session timeout in minutes session-timeout = 720 # Path to the keystore keystore.path = etc/openBIS.keystore # Password of the keystore keystore.password = changeit # Key password of the keystore keystore.key-password = changeit # The check interval (in seconds) check-interval = 60 # The time-out for clean up work in the shutdown sequence (in seconds). # Note that that the maximal time for the shutdown sequence to complete can be as large # as twice this time. # Remark: On a network file system, it is not recommended to turn this value to something # lower than 180. shutdown-timeout = 180 # If free disk space goes below value defined here, a notification email will be sent. # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is # specified or if value is negative, the system will not be watching. highwater-mark = -1 # If a data set is successfully registered it sends out an email to the registrator. # If this property is not specified, no email is sent to the registrator. This property # does not affect the mails which are sent, when the data set could not be registered. notify-successful-registration = false # The URL of the openBIS server server-url = https://localhost:8443/openbis/openbis # The username to use when contacting the openBIS server username = etlserver # The password to use when contacting the openBIS server password = etlserver # The base URL for Web client access. download-url = https://localhost:8444 # SMTP properties (must start with 'mail' to be considered). # mail.smtp.host = localhost # mail.from = datastore_server@localhost # ---------------- Database parameters basysbio-databaseEngineCode = postgresql basysbio-basicDatabaseName = basysbio basysbio-databaseKind = productive basysbio-readOnlyGroup = basysbio_readonly basysbio-readWriteGroup = basysbio_readwrite basysbio-scriptFolder = sql # ---------------- Timing parameters for file system operations on remote shares. # Time (in seconds) to wait for any file system operation to finish. Operations exceeding this # timeout will be terminated. timeout = 60 # Number of times that a timed out operation will be tried again (0 means: every file system # operation will only ever be performed once). max-retries = 11 # Time (in seconds) to wait after an operation has been timed out before re-trying. failure-interval = 10 # The period of no write access that needs to pass before an incoming data item is considered # complete and ready to be processed (in seconds) [default: 300]. # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. quiet-period = 10 maintenance-plugins = post-registration-upload post-registration-upload.class = eu.basysbio.cisd.dss.PostRegistrationDatabaseUploadTask post-registration-upload.interval = 1440000 post-registration-upload.database.databaseEngineCode = ${basysbio-databaseEngineCode} post-registration-upload.database.basicDatabaseName = ${basysbio-basicDatabaseName} post-registration-upload.database.databaseKind = ${basysbio-databaseKind} post-registration-upload.database.readOnlyGroup = ${basysbio-readOnlyGroup} post-registration-upload.database.readWriteGroup = ${basysbio-readWriteGroup} post-registration-upload.database.scriptFolder = ${basysbio-scriptFolder} # Data Set Validators data-set-validators = time-series-validator time-point-data-set-types = AgilentRNA2color, NimbleGenRNA1colorBsubT1, \ NimbleGenRNA1colorBsubT1MedianNorm, NimbleGenRNA1colorBsubT1QQnorm, MetaboliteLCMS, \ PhysRFR, PhysOD, LcaMicCFD time-series-validator.data-set-type = TIME_SERIES time-series-validator.path-patterns = * time-series-validator.exclude-path-patterns = request.properties time-series-validator.columns = id, human-readable, controlled-gene, data-column time-series-validator.id.header-pattern = CompoundID|GeneLocus|Abs time-series-validator.id.mandatory = true time-series-validator.id.order = 1 time-series-validator.id.value-validator = ch.systemsx.cisd.etlserver.validation.HeaderBasedValueValidatorFactory time-series-validator.id.header-types = compound, gene-locus, abs time-series-validator.id.compound.header-pattern = CompoundID time-series-validator.id.compound.value-type = unique time-series-validator.id.compound.value-pattern = BSBME:[0-9]+(BSBME:[0-9])*|CHEBI:[0-9]+(_CHEBI:[0-9]+)* time-series-validator.id.gene-locus.header-pattern = GeneLocus time-series-validator.id.gene-locus.value-type = unique time-series-validator.id.gene-locus.value-pattern = BSU[0-9]+|BSU_misc_RNA_[0-9]+|VMG_[0-9]+_[0-9]+|VMG_[0-9]+_[0-9]+_c time-series-validator.id.abs.header-pattern = Abs time-series-validator.id.abs.value-type = unique time-series-validator.id.abs.value-pattern = OD600 time-series-validator.human-readable.header-pattern = HumanReadable time-series-validator.human-readable.order = 2 time-series-validator.controlled-gene.header-pattern = ControlledGene time-series-validator.controlled-gene.order = 3 time-series-validator.data-column.header-validator = eu.basysbio.cisd.dss.DataColumnHeaderValidator time-series-validator.data-column.header-validator.elements = experiment-code, cultivation-method, biological-replicate-code, \ time-point, time-point-type, technical-replicate-code, cel-loc, data-set-type, value-type, scale, bi-id, cg time-series-validator.data-column.header-validator.experiment-code.type = vocabulary time-series-validator.data-column.header-validator.experiment-code.terms = Ma, GI, GM, MG time-series-validator.data-column.header-validator.cultivation-method.type = vocabulary time-series-validator.data-column.header-validator.cultivation-method.terms = BR, SF, MW, MS time-series-validator.data-column.header-validator.biological-replicate-code.type = string time-series-validator.data-column.header-validator.biological-replicate-code.pattern = B[0-9]+(_B[0-9]+)* time-series-validator.data-column.header-validator.time-point.type = integer time-series-validator.data-column.header-validator.time-point-type.type = vocabulary time-series-validator.data-column.header-validator.time-point-type.terms = EX, IN, SI time-series-validator.data-column.header-validator.technical-replicate-code.type = string time-series-validator.data-column.header-validator.technical-replicate-code.pattern = T[0-9]+(_T[0-9]+)* time-series-validator.data-column.header-validator.cel-loc.type = vocabulary time-series-validator.data-column.header-validator.cel-loc.terms = CE, ES, ME, CY, NC time-series-validator.data-column.header-validator.data-set-type.type = vocabulary time-series-validator.data-column.header-validator.data-set-type.terms = ${time-point-data-set-types} time-series-validator.data-column.header-validator.value-type.type = string time-series-validator.data-column.header-validator.value-type.pattern = (Value|Mean|Median|Std|Var|Error|Iqr)\\[(mM|Ratio1|RatioCs)\\] time-series-validator.data-column.header-validator.scale.type = vocabulary time-series-validator.data-column.header-validator.scale.terms = LIN, Log2, Log10, Ln time-series-validator.data-column.header-validator.bi-id.type = string time-series-validator.data-column.header-validator.bi-id.pattern = NB|BBA[0-9]* time-series-validator.data-column.header-validator.cg.type = string time-series-validator.data-column.header-validator.cg.pattern = NC|BSU[0-9]+_[0-9] time-series-validator.data-column.can-define-multiple-columns = true time-series-validator.data-column.value-type = numeric # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' inputs = main-thread, time-point # --------------------------------------------------------------------------- # main thread configuration # --------------------------------------------------------------------------- # The directory to watch for incoming data. main-thread.incoming-dir = ${data-folder}/incoming main-thread.incoming-data-completeness-condition = auto-detection main-thread.delete-unidentified = true main-thread.data-set-info-extractor = eu.basysbio.cisd.dss.TimeSeriesDataSetInfoExtractor main-thread.type-extractor = ch.systemsx.cisd.etlserver.cifex.CifexTypeExtractor main-thread.storage-processor = ch.systemsx.cisd.etlserver.CifexStorageProcessor main-thread.storage-processor.keep-file-regex = .*(?