# Unique code of this Data Store Server. Not more than 40 characters.
data-store-server-code = DSS1

# The root directory of the data store
storeroot-dir = data/store

# The directory where the command queue file is located; defaults to storeroot-dir 
commandqueue-dir =

# Port
port = 8444

# Session timeout in minutes
session-timeout = 720

# Path to the keystore
keystore.path = etc/openBIS.keystore

# Password of the keystore
keystore.password = changeit

# Key password of the keystore
keystore.key-password = changeit

# The check interval (in seconds)
check-interval = 60

# The time-out for clean up work in the shutdown sequence (in seconds).
# Note that that the maximal time for the shutdown sequence to complete can be as large 
# as twice this time.
# Remark: On a network file system, it is not recommended to turn this value to something 
# lower than 180.
shutdown-timeout = 180

#�If free disk space goes below value defined here, a notification email will be sent.
# Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is
# specified or if value is negative, the system will not be watching.
highwater-mark = -1

# If a data set is successfully registered it sends out an email to the registrator. 
# If this property is not specified, no email is sent to the registrator. This property
# does not affect the mails which are sent, when the data set could not be registered.
notify-successful-registration = false

# The URL of the openBIS server
server-url = https://localhost:8443

# The username to use when contacting the openBIS server
username = etlserver

# The password to use when contacting the openBIS server
password = etlserver

#
# CIFEX configuration: Only needed if data export should work without the user having to type in 
# his password. Note that in in order for this to work the CIFEX server needs to be configured to 
# allow calling setSessionUser() from the IP address of this data store server, see configuration 
# option allowed-ips-for-set-session-user in CIFEX's service.properties    
#

# The admin username to use when contacting the CIFEX server 
cifex-admin-username =

# The admin password to use when contacting the CIFEX server 
cifex-admin-password =

# The base URL for Web client access.
download-url = https://localhost:8444

# SMTP properties (must start with 'mail' to be considered). 
# mail.smtp.host = localhost
# mail.from = datastore_server@localhost
# If this property is set a test e-mail will be sent to the specified address after DSS successfully started-up.
# mail.test.address = test@localhost

# ---------------- Timing parameters for file system operations on remote shares.

# Time (in seconds) to wait for any file system operation to finish. Operations exceeding this 
# timeout will be terminated. 
timeout = 60
# Number of times that a timed out operation will be tried again (0 means: every file system 
# operation will only ever be performed once).
max-retries = 11
# Time (in seconds) to wait after an operation has been timed out before re-trying.  
failure-interval = 10 

# The period of no write access that needs to pass before an incoming data item is considered 
# complete and ready to be processed (in seconds) [default: 300]. 
# Valid only when auto-detection method is used to determine if an incoming data are ready to be processed.
# quiet-period = <value in seconds>

# Globally used separator character which separates entities in a data set file name 
data-set-file-name-entity-separator = _

# ---------------------------------------------------------------------------
# reporting and processing plugins configuration
# ---------------------------------------------------------------------------

# Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name.
# If name has 'default-' prefix it will be used by default in data set Data View.
reporting-plugins = demo-reporter

# Label of the plugin which will be shown for the users.
demo-reporter.label = Show Dataset Size
# Comma separated list of dataset type codes which can be handled by this plugin.
demo-reporter.dataset-types = UNKNOWN
# Plugin class specification (together with the list of packages this class belongs to).
demo-reporter.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoReportingPlugin
# The property file. Its content will be passed as a parameter to the plugin.
demo-reporter.properties-file = 

# Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name.
processing-plugins = demo-processor

# The configuration of the processing plugin is the same as the reporting plugins configuration. 
demo-processor.label = Demo Processing
demo-processor.dataset-types = UNKNOWN
demo-processor.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoProcessingPlugin
demo-processor.properties-file = 

# Data set validators used to accept or reject data sets to be registered.
# Comma separated list of validator definitions.
data-set-validators = validator

# Definition of data set validator 'validator'
validator.data-set-type = HCS_IMAGE
validator.path-patterns = **/*.txt
validator.columns = id, description, size
validator.id.header-pattern = ID
validator.id.mandatory = true
validator.id.order = 1
validator.id.value-type = unique
validator.description.header-pattern = Description
validator.description.value-type = string
validator.description.value-pattern = .{0,100}
validator.size.header-pattern = A[0-9]+
validator.size.can-define-multiple-columns = true
validator.size.allow-empty-values = true
validator.size.value-type = numeric
validator.size.value-range = [0,Infinity)


# Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name.
# E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor'
inputs=main-thread

# ---------------------------------------------------------------------------
# 'main-thread' thread configuration
# ---------------------------------------------------------------------------
# The directory to watch for incoming data.
main-thread.incoming-dir = data/incoming

# If 'true' then unidentified and invalid data sets will be deleted instead of being moved to 'unidentified' folder
# Allowed values:
#  - false   - (default) move unidentified or invalid data sets to 'unidentified' folder
#  - true    - delete unidentified or invalid data sets
# delete-unidentified = true

# Determines when the incoming data should be considered complete and ready to be processed.
# Allowed values: 
#  - auto-detection - when no write access will be detected for a specified 'quiet-period'
#  - marker-file		- when an appropriate marker file for the data exists. 
# The default value is 'marker-file'.
main-thread.incoming-data-completeness-condition = marker-file

# The space the samples extracted by this thread belong to. If commented out or empty, then samples
# are considered associated to a database instance (not space private). 
# main-thread.space-code = <change this>

# Path to the script that will be executed after successful data set registration. 
# The script will be called with two parameters: <data-set-code> and <absolute-data-set-path> (in the data store).
# main-thread.post-registration-script = /example/scripts/my-script.sh

# ---------------------------------------------------------------------------
# (optional) image overview plugins configuration
# ---------------------------------------------------------------------------

# Comma separated names of image overview plugins. 
# Each plugin should have configuration properties prefixed with its name.
# Generic properties for each <plugin> include: 
#   <plugin>.class   - Fully qualified plugin class name (mandatory) of a class 
#                      which implements IDatasetImageOverviewPlugin interface.
#   <plugin>.default - If true all data set types not handled by other plugins should be handled 
#                      by the plugin (default = false). 
#   <plugin>.dataset-types - Comma separated list of data set types handled by the plugin 
#                      (optional and ignored if default is true, otherwise mandatory). 
# Example:
#overview-plugins = default-overview, my-overview
#
#default-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin
#default-overview.default = true
# Optional property specific to the plugin
#default-overview.label = default plugin
#
#my-overview.class = org.mycompany.MyOverviewPlugin
#my-overview.dataset-types = MY-DATASET-TYPE-CODE

# ---------------------------------------------------------------------------
# maintenance plugins configuration
# ---------------------------------------------------------------------------

# Comma separated names of maintenance plugins.  
# Each plugin should have configuration properties prefixed with its name.
# Mandatory properties for each <plugin> include: 
#   <plugin>.class - Fully qualified plugin class name
#   <plugin>.interval - The time between plugin executions (in seconds)
# Optional properties for each <plugin> include:
#   <plugin>.start - Time of the first execution (HH:mm)
#   <plugin>.execute-only-once - If true the task will be executed exactly once, 
#                                interval will be ignored. By default set to false.
#maintenance-plugins = 

# Creates hierarchical version of the data store, based on the content of the database
#hierarchy-builder.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater
# The time between rebuilding the hierarchical store structure (in seconds)
#hierarchy-builder.interval = 86400
# The root directory of the hierarchical data store
#hierarchy-builder.hierarchy-root-dir = data/hierarchical-store

# ---------------- Plugin properties
# The extractor class to use for code extraction
main-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor
# Separator used to extract the barcode in the data set file name
main-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator}
# The index of the sample code in the name when splitted by the entity-separator
#main-thread.index-of-sample-code = -1
# The index of the codes of parent data sets (leave that commented out to _not_ have a data set parent)
#index-of-parent-data-set-codes = -2
# The space 
main-thread.data-set-info-extractor.space-code = TEST
# Location of file containing data set properties 
#main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties

# IDataSetInfoExtractor working with CIFEX DataStoreTrigger
# main-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.cifex.CifexDataSetInfoExtractor

# The extractor class to use for type extraction
main-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor
main-thread.type-extractor.file-format-type = TIFF
main-thread.type-extractor.locator-type = RELATIVE_LOCATION
main-thread.type-extractor.data-set-type = HCS_IMAGE
main-thread.type-extractor.is-measured = true

# ITypeExtractor working with CIFEX DataStoreTrigger
# main-thread.type-extractor = ch.systemsx.cisd.etlserver.cifex.CifexTypeExtractor
# main-thread.type-extractor.locator-type = RELATIVE_LOCATION
# main-thread.type-extractor.is-measured = true

# The storage processor (IStorageProcessor implementation)
main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor

# IStorageProcessor deleting files not matching specified regular expression
# main-thread.storage-processor = ch.systemsx.cisd.etlserver.CifexStorageProcessor
# main-thread.storage-processor.keep-file-regex = .*(?<!request\.properties)$ 


# ---------------------------------------------------------------------------
# dss-rpc
# ---------------------------------------------------------------------------
# The dss-rpc section configures the RPC put functionality by providing a mapping between data 
# set type and input thread parameters.
#
# The default input thread is specified by the put-default key. If not specified, the first input 
# thread will be used.
#
# Mappings are specified by dss-rpc.<data-set-code> = <thread-name>
#
# If this section is empty, then the first input thread will be used.
# 
dss-rpc.put-default = main-thread