# Unique code of this Data Store Server. Not more than 40 characters. data-store-server-code = DSS1 # host name of the machine on which the datastore server is running host-address = https://$HOSTNAME # parent directory of the store directory and all the dropboxes root-dir = $DSS_ROOT_DIR # The root directory of the data store storeroot-dir = ${root-dir}/store incoming-root-dir = ${root-dir} # Cache for data set files from other Data Store Servers cache-workspace-folder = ../../data/dss-cache # The session workspace is stored in the storage root by default to have the same amount of space available that the final storage and allow atomic moves session-workspace-root-dir = ${storeroot-dir}/sessionWorkspace # The directory where the command queue file is located; defaults to storeroot-dir but ideally should be on the same computer as the server on a directory that survives upgrades commandqueue-dir = ../../datastore_commandqueue # Comma-separated list of definitions of additional queues for processing processing plugins. # Each entry is of the form : # A corresponding persistent queue is created. All processing plugins with a key matching the corresponding # regular expression are associated with the corresponding queue. # # The key of a processing plugin is its core-plugin name which is the name of the folder containing # 'plugin.properties'. # # In case of archiving is enabled the following three processing plugins are defined: # 'Archiving', 'Copying data sets to archive', and 'Unarchiving' #data-set-command-queue-mapping = archiving:Archiving|Copying data sets to archive # Port port = 8444 use-ssl = true # Session timeout in minutes session-timeout = 720 # Path to the keystore keystore.path = etc/openBIS.keystore # Password of the keystore keystore.password = changeit # Key password of the keystore keystore.key-password = changeit # The check interval (in seconds) check-interval = 5 quiet-period = 10 # The time-out for clean up work in the shutdown sequence (in seconds). # Note that that the maximal time for the shutdown sequence to complete can be as large # as twice this time. # Remark: On a network file system, it is not recommended to turn this value to something # lower than 180. shutdown-timeout = 180 # The minimum time (in seconds) of availability of the data stream since moment when user requested # for the data stream url. If not specified default value (20 seconds) will be used. # minimum-time-to-keep-streams-in-sec = 20 # If free disk space goes below value defined here, a notification email will be sent. # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is # specified or if value is negative, the system will not be watching. There are 2 different kinds # of highwater mark supported: the one that is checking the space on the store, and one that is # checking the amount of free space for recovery state (on the local filesystem). highwater-mark = -1 recovery-highwater-mark = -1 # If a data set is successfully registered it sends out an email to the registrator. # If this property is not specified, no email is sent to the registrator. This property # does not affect the mails which are sent, when the data set could not be registered. notify-successful-registration = false # The URL of the openBIS server server-url = ${host-address}:8443 # The username to use when contacting the openBIS server username = etlserver # The password for the etlserver user who contacts the openBIS server password = etlserver_password # The base URL for Web client access to the data store server. download-url = ${host-address}:${port} # SMTP properties (must start with 'mail' to be considered). # The current configuration saves the emails in the file system in the root directory mail.smtp.host = file://${root-dir} # mail.smtp.host = localhost # mail.from = openbis-dss@localhost # mail.smtp.user = # mail.smtp.password = # Data sources data-sources = path-info-db # Data source for pathinfo database path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder path-info-db.databaseEngineCode = postgresql path-info-db.basicDatabaseName = pathinfo # The host and optionally port. Default is 'localhost'. # path-info-db.urlHostPart = path-info-db.databaseKind = prod path-info-db.scriptFolder = datastore_server/sql # The owner and password of the database _. Default: User who started up DSS # path-info-db.owner = # Owner password # path-info-db.password = # The administrator user of the database server. # Default is defined by the selected database engine which is 'postgres' in case of PostgreSQL # path-info-db.adminUser = # Administrator password # path-info-db.adminPassword = # --------------------------------------------------------------------------- # maintenance plugins configuration # --------------------------------------------------------------------------- # Comma separated names of maintenance plugins. Each plugin should have configuration properties prefixed with its name. maintenance-plugins = post-registration, path-info-deletion # Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask post-registration.interval = 30 post-registration.cleanup-tasks-folder = ${root-dir}/post-registration/cleanup-tasks post-registration.last-seen-data-set-file = ${root-dir}/post-registration/last-seen-data-set.txt post-registration.post-registration-tasks = pathinfo-feeding post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask post-registration.pathinfo-feeding.compute-checksum = true # Maintenance task for deleting entries from pathinfo database after deletion of a data set path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask path-info-deletion.interval = 120 path-info-deletion.data-source = path-info-db path-info-deletion.data-set-table-name = data_sets path-info-deletion.data-set-perm-id = CODE # --------------------------------------------------------------------------- # processing plugins configuration # --------------------------------------------------------------------------- # Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name. processing-plugins = path-info-db-consistency-check # Processing task that checks the consistency between the data store and the meta information stored in the PathInfoDB. # It sends out an email which contains all differences found. path-info-db-consistency-check.label = Path Info DB consistency check path-info-db-consistency-check.dataset-types = .* path-info-db-consistency-check.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetAndPathInfoDBConsistencyCheckProcessingPlugin # --------------------------------------------------------------------------- # dropbox configurations # --------------------------------------------------------------------------- # # Data set files with file types .h5 and .h5ar can be handled as virtual folders of as plain files. # # File of types .h5 are HDF5 container files with some hierarchically structured data. # File of types .h5ar are HDF5 archive files with hierarchically structured file systems. # HDF5 archive files are similar to ZIP files. # How these file are handle inside a data set can be controlled with the following properties # for the web-based data set uploader in core UI and ELN: # dataset-uploader.h5-folders and dataset-uploader.h5ar-folders # # If the property is 'false' files of the corresponding file type will be handled as plain files. # If the property is 'true' files of the corresponding file type will be handled as the root folder # of a virtual file systems if possible. If it isn't possible they will be handled as plain files. # The default behavior is to handle .h5 files as plain files and .h5ar files as folders. # # dataset-uploader.h5-folders = false # dataset-uploader.h5ar-folders = true inputs = default-dropbox dss-rpc.put-default = default-dropbox default-dropbox.incoming-dir = ${incoming-root-dir}/incoming-default default-dropbox.incoming-data-completeness-condition = auto-detection default-dropbox.top-level-data-set-handler = ch.systemsx.cisd.etlserver.registrator.api.v2.JavaTopLevelDataSetHandlerV2 default-dropbox.program-class = ch.systemsx.cisd.etlserver.registrator.DefaultDropbox default-dropbox.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor # --------------------------------------------------------------------------- # Archiver configuration (optional) # --------------------------------------------------------------------------- # Configuration of an archiver task. All properties are prefixed with 'archiver.'. # Archiver class specification (together with the list of packages this class belongs to). #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver # --------------------------------------------------------------------------- # Imaging database for screening and microscopy (optional, only used if screening technology switched on) # --------------------------------------------------------------------------- # Format: [:]. # imaging-database.url-host-part = localhost # imaging-database.kind = prod # imaging-database.owner = # imaging-database.password = # --------------------------------------------------------------------------- # Proteomics database (optional, only used if proteomics technology switched on) # --------------------------------------------------------------------------- # Format: [:]. # proteomics-database.url-host-part = localhost # proteomics-basic-database-name = proteomics # proteomics-database-kind = productive # proteomics-database-owner = # proteomics-database-password = # --------------------------------------------------------------------------- # FTP / SFTP server configuration # --------------------------------------------------------------------------- # The built-in FTP / SFTP server provides users with an alternative way to download their data using # client programs like FileZilla or lftp. # # When set to 'true' an internal ftp / sftp server will be started. Note that to really startup a # server in addition ftp.server.ftp-port and / or ftp.server.sftp-port need to be set to a value other # than 0. # # ftp.server.enable = true # ftp.server.sftp-port = 2222 # --------------------------------------------------------------------------- # Rsync # --------------------------------------------------------------------------- # Typical options to disable coping general, owner and group permissions # rsync-options = --no-p --no-o --no-g # --------------------------------------------------------------------------- # CIFS Server configuration # --------------------------------------------------------------------------- # The build-in CIFS server provides users with an alternative way to download their data using # client programs like Mac Finder or Windows Explorer. # # cifs.server.enable = true # cifs.server.smb-port = 1445 # cifs.server.session-log-flags = Error # --------------------------------------------------------------------------- # INTERNAL CONFIGURATION, # Do not change this part unless you are developing openBIS extensions. # --------------------------------------------------------------------------- jython-version=2.7