# Unique code of this Data Store Server. Not more than 40 characters. data-store-server-code = DSS1 # The root directory of the data store storeroot-dir = data/store # The directory where the command queue file is located; defaults to storeroot-dir commandqueue-dir = # Port port = 8444 # Network settings # Should the DSS web page be served with SSL encryption? use-ssl = true # Should Java NIO selector-based sockets be used? # Set this to false when running on JRE 1.6 as this version has bugs which make NIO very slow. use-nio-selector-socket = true # Session timeout in minutes session-timeout = 720 # The root directory for session workspace directories session-workspace-root-dir = data/sessionWorkspace # Cache for data set files from other Data Store Servers cache-workspace-folder = ../../data/dss-cache # # Data set authorization cache # # The expiration time of entries in the dataset authorization cache in minutes # (set to 0 to disable the dataset authorization cache completely) authorization-cache-expiration-time = 5 # The time period between two runs of the authorization cache cleanup timer in minutes authorization-cache-cleanup-timer-period = 180 # Path to the keystore keystore.path = etc/openBIS.keystore # Password of the keystore keystore.password = changeit # Key password of the keystore keystore.key-password = changeit # The check interval (in seconds) check-interval = 60 # The time-out for clean up work in the shutdown sequence (in seconds). # Note that that the maximal time for the shutdown sequence to complete can be as large # as twice this time. # Remark: On a network file system, it is not recommended to turn this value to something # lower than 180. shutdown-timeout = 180 # If free disk space goes below value defined here, a notification email will be sent. # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is # specified or if value is negative, the system will not be watching. highwater-mark = -1 # If a data set is successfully registered it sends out an email to the registrator. # If this property is not specified, no email is sent to the registrator. This property # does not affect the mails which are sent, when the data set could not be registered. notify-successful-registration = false # The URL of the openBIS server server-url = https://localhost:8443 # Time out for accessing openBIS server. Default value is 5 minutes. #server-timeout-in-minutes = 10 # The username to use when contacting the openBIS server username = etlserver # The password to use when contacting the openBIS server password = etlserver # # JMX memory monitor # # Interval between two runs of the memory monitor (in seconds). # Set to -1 to disable the memory monitor. memorymonitor-monitoring-interval = 60 # Interval between two regular log call of the memory monitor (in seconds). # Set to -1 to disable regular memory usage logging. memorymonitor-log-interval = 3600 # The percentage of memory that, if exceeded, triggers a notify log of the memory manager, # Set to 100 to disable. memorymonitor-high-watermark-percent = 90 # # CIFEX configuration: Only needed if data export should work without the user having to type in # his password. Note that in in order for this to work the CIFEX server needs to be configured to # allow calling setSessionUser() from the IP address of this data store server, see configuration # option allowed-ips-for-set-session-user in CIFEX's service.properties # # The admin username to use when contacting the CIFEX server cifex-admin-username = # The admin password to use when contacting the CIFEX server cifex-admin-password = # The base URL for Web client access. download-url = https://localhost:8444 # The minimum time (in seconds) of availability of the data stream since the time when user requested # for the data stream url. Default: 20s. # data-stream-timeout = 20 # --------------------------------------------------------------------------- # WebStart Applications # --------------------------------------------------------------------------- # The webstart-jar-path is the location of jars for webstart programs. By default, it points to the # lib directory, which is usually the correct value. In some rare circumstances, it may make sense # to override and specify an alternate value. # webstart-jar-path = lib/ # SMTP properties (must start with 'mail' to be considered). # mail.smtp.host = localhost # mail.from = datastore_server@localhost # If this property is set a test e-mail will be sent to the specified address after DSS successfully started-up. # mail.test.address = test@localhost # ---------------- Timing parameters for file system operations on remote shares. # Time (in seconds) to wait for any file system operation to finish. Operations exceeding this # timeout will be terminated. timeout = 60 # Number of times that a timed out operation will be tried again (0 means: every file system # operation will only ever be performed once). max-retries = 11 # Time (in seconds) to wait after an operation has been timed out before re-trying. failure-interval = 10 # The period of no write access that needs to pass before an incoming data item is considered # complete and ready to be processed (in seconds) [default: 300]. # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. # quiet-period = # --------------------------------------------------------------------------- # Data sources data-sources = path-info-db # Data source for pathinfo database path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder path-info-db.databaseEngineCode = postgresql path-info-db.basicDatabaseName = pathinfo path-info-db.databaseKind = demo path-info-db.scriptFolder = datastore_server/sql # --------------------------------------------------------------------------- # Globally used separator character which separates entities in a data set file name data-set-file-name-entity-separator = _ # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' inputs=main-thread # --------------------------------------------------------------------------- # 'main-thread' thread configuration # --------------------------------------------------------------------------- # The directory to watch for incoming data. main-thread.incoming-dir = data/incoming # If 'true' then unidentified and invalid data sets will be deleted instead of being moved to 'unidentified' folder # Allowed values: # - false - (default) move unidentified or invalid data sets to 'unidentified' folder # - true - delete unidentified or invalid data sets # delete-unidentified = true # Determines when the incoming data should be considered complete and ready to be processed. # Allowed values: # - auto-detection - when no write access will be detected for a specified 'quiet-period' # - marker-file - when an appropriate marker file for the data exists. # The default value is 'marker-file'. main-thread.incoming-data-completeness-condition = marker-file # The space the samples extracted by this thread belong to. If commented out or empty, then samples # are considered associated to a database instance (not space private). # main-thread.space-code = # Path to the script that will be executed before data set registration. # The script will be called with two parameters: and (in the incoming dropbox). # NOTE: before starting DSS server make sure the script is accessible and executable. # main-thread.pre-registration-script = /example/scripts/my-script.sh # Path to the script that will be executed after successful data set registration. # The script will be called with two parameters: and (in the data store). # main-thread.post-registration-script = /example/scripts/my-script.sh # The extractor class to use for code extraction main-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor # Separator used to extract the barcode in the data set file name main-thread.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator} # The index of the sample code in the name when splitted by the entity-separator #main-thread.index-of-sample-code = -1 # The index of the codes of parent data sets (leave that commented out to _not_ have a data set parent) #index-of-parent-data-set-codes = -2 # The space main-thread.data-set-info-extractor.space-code = TEST # Location of file containing data set properties #main-thread.data-set-info-extractor.data-set-properties-file-name = data-set.properties # IDataSetInfoExtractor working with CIFEX DataStoreTrigger # main-thread.data-set-info-extractor = ch.systemsx.cisd.etlserver.cifex.CifexDataSetInfoExtractor # The extractor class to use for type extraction main-thread.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor main-thread.type-extractor.file-format-type = TIFF main-thread.type-extractor.locator-type = RELATIVE_LOCATION main-thread.type-extractor.data-set-type = HCS_IMAGE main-thread.type-extractor.is-measured = true # ITypeExtractor working with CIFEX DataStoreTrigger # main-thread.type-extractor = ch.systemsx.cisd.etlserver.cifex.CifexTypeExtractor # main-thread.type-extractor.locator-type = RELATIVE_LOCATION # main-thread.type-extractor.is-measured = true # The storage processor (IStorageProcessor implementation) main-thread.storage-processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor # IStorageProcessor deleting files not matching specified regular expression # main-thread.storage-processor = ch.systemsx.cisd.etlserver.CifexStorageProcessor # main-thread.storage-processor.keep-file-regex = .*(? = # # If this section is empty, then the first input thread will be used. # dss-rpc.put-default = main-thread # --------------------------------------------------------------------------- # (optional) image overview plugins configuration # --------------------------------------------------------------------------- # Comma separated names of image overview plugins. # Each plugin should have configuration properties prefixed with its name. # Generic properties for each include: # .class - Fully qualified plugin class name (mandatory) of a class # which implements IDatasetImageOverviewPlugin interface. # .default - If true all data set types not handled by other plugins should be handled # by the plugin (default = false). # .dataset-types - Comma separated list of data set types handled by the plugin # (optional and ignored if default is true, otherwise mandatory). # Example: #overview-plugins = default-overview, my-overview # #default-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoOverviewPlugin #default-overview.default = true # Optional property specific to the plugin #default-overview.label = default plugin # #my-overview.class = org.mycompany.MyOverviewPlugin #my-overview.dataset-types = MY-DATASET-TYPE-CODE # --------------------------------------------------------------------------- # maintenance plugins configuration # --------------------------------------------------------------------------- # Comma separated names of maintenance plugins. # Each plugin should have configuration properties prefixed with its name. # Mandatory properties for each include: # .class - Fully qualified plugin class name # .interval - The time between plugin executions (in seconds) # Optional properties for each include: # .start - Time of the first execution (HH:mm) # .execute-only-once - If true the task will be executed exactly once, # interval will be ignored. By default set to false. # Configured but not enabled maintenance plugins: # * path-info-feeding, enable this if you migrate from an instance without path info db maintenance-plugins = post-registration, path-info-deletion # Creates hierarchical version of the data store, based on the content of the database #hierarchy-builder.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater # The time between rebuilding the hierarchical store structure (in seconds) #hierarchy-builder.interval = 86400 # The root directory of the hierarchical data store #hierarchy-builder.hierarchy-root-dir = data/hierarchical-store # The naming strategy for the symbolic links #hierarchy-builder.link-naming-strategy.class = ch.systemsx.cisd.etlserver.plugins.TemplateBasedLinkNamingStrategy # The exact form of link names produced by TemplateBasedLinkNamingStrategy is configurable # via the following template. The variables # dataSet, dataSetType, experiment, instance, project, sample, space # will be recognized and replaced in the final link name. #hierarchy-builder.link-naming-strategy.template = ${space}/${project}/${experiment}/${datasettype}+${sample}+${dataset} # When specified for a given this store subpath will be used as the symbolic source #hierarchical-storage-updater.link-source-subpath. = original # Setting this property to "true" for a given will treat the first child item (file or folder) # in the specified location as the symbolic link source. It can be used in conjunction with # the "link-source-subpath." to produce links pointing to a folder with unknown name e.g. # /original/UNKNOWN-NAME-20100307-1350 #hierarchical-storage-updater.link-from-first-child. = true # Maintenance task (performed only once) to create paths of existing data sets in pathinfo database path-info-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask path-info-feeding.compute-checksum = true path-info-feeding.execute-only-once = true # Maintenance task for deleting entries in pathinfo database after deletion of data sets path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask path-info-deletion.interval = 120 path-info-deletion.data-set-table-name = data_sets path-info-deletion.data-source = path-info-db path-info-deletion.data-set-perm-id = CODE # Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask post-registration.interval = 30 post-registration.cleanup-tasks-folder = ../../cleanup-tasks # The following date should the day when the DDS is started up the first time with PathInfoDatabaseFeedingTask. # After PathInfoDatabaseFeedingTask has been performed it can be removed and the following line can be deleted. post-registration.ignore-data-sets-before-date = 2011-04-18 post-registration.last-seen-data-set-file = ../../last-seen-data-set-for-postregistration.txt post-registration.post-registration-tasks = pathinfo-feeding post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask post-registration.pathinfo-feeding.compute-checksum = true # --------------------------------------------------------------------------- # processing plugins configuration # --------------------------------------------------------------------------- # Comma separated names of processing plugins. Each plugin should have configuration properties prefixed with its name. processing-plugins = path-info-db-consistency-check # Processing task that checks the consistency between the data store and the meta information stored in the PathInfoDB. # It sends out an email which contains all differences found. path-info-db-consistency-check.label = Path Info DB consistency check path-info-db-consistency-check.dataset-types = .* path-info-db-consistency-check.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.DataSetAndPathInfoDBConsistencyCheckProcessingPlugin # # FTP / SFTP server configuration # # The built-in FTP / SFTP server provides users with an alternative way to download their data using # client programs like FileZilla or lftp. # # When set to 'true' an internal ftp / sftp server will be started. Note that to really startup a # server in addition ftp.server.ftp-port and / or ftp.server.sftp-port need to be set to a value other # than 0. ftp.server.enable = true # # Port where openBIS listens for incoming FTP connection requests. Remove comment to enable FTP server. #ftp.server.ftp-port = 2121 # # Specifies a range of ports to be used when working in passive mode (ignored by SFTP server). # The default value is "2130-2140", thus only 11 ports will be provide to connecting clients. If # there are more than 11 concurrent users the configuration will have to be adjusted accordingly. #ftp.server.passivemode.port.range = 2130-2140 # # Port where openBIS listens for incoming SFTP connection requests. Comment out to disable SFTP server. ftp.server.sftp-port = 2222 # # When set to true enables the FTP server to run in "active mode" (ignored by SFTP server). # This means data connections will always go to a single pre-configured port on the server. Such a # configuration can ease the server firewall administration (only one additional port should be # opened). However it requires the client machines to be directly visible from the server. #ftp.server.activemode.enable=true # # The active mode port number configuration. Default value is 2122. #ftp.server.activemode.port = 2122 # # Enables explicit FTP over SSL/TLS (ignored by SFTP server). Users can switch on connection # encryption by issuing the FTP "AUTH" command. Similarly to the global 'use-ssl' parameter, when # set to 'true' all 'keystore.*' properties become mandatory. #ftp.server.use-ssl=true # # Allows to enable implicit FTP over SSL/TLS (ignored by SFTP server). While this method ensures # no passwords are send around unencrypted, it is not standardized by an RFC and is therefore # poorly supported by clients. #ftp.server.implicit-ssl=false # # Maximum number of threads used by the internal FTP server (ignored by SFTP server). Defaults to 25. #ftp.server.maxThreads=25 # # The virtual folder structure supplied by the FTP server looks like # ${space}/${project}/${experiment}/ # While the first 3 levels in the hierarchy are not configurable, the format of the '' # can be specified with a template using the following built-in variables : # ${dataSetCode} - the data set code # ${dataSetType} - the data set type # ${dataSetDate} - the creation date of the data set # ${disambiguation} - if ${dataSetCode} is not used, the ${disambiguation} variable must be present # in the template in order to avoid path collisions. # ${fileName} - When ${fileName} is specified in the template, '' will include # the name of a file located directly under the dataset root folder. One entry will be created # for earch such file. # Example : # ftp.server.dataset.display.template=${dataSetDate}-${disambiguation}-${fileName} # Result : # /SPACE/PROJECT/EXPERIMENT/2011-10-10-10-10-A-file1.txt # /SPACE/PROJECT/EXPERIMENT/2011-10-10-10-10-A-file2.pdf # /SPACE/PROJECT/EXPERIMENT/2011-10-10-10-10-B-file1.txt # /SPACE/PROJECT/EXPERIMENT/2011-10-10-10-10-B-file2.pdf # # Optional parameter, default template value is ${dataSetCode} ftp.server.dataset.display.template = ${dataSetType}-${dataSetCode} # # Shows also parent and child data sets inside a data set directory. The name of these data sets are # starting with PARENT- or CHILD- followed by the evaluated ftp.server.dataset.display.template. # Note, this feature only works if the template doesn't contain ${fileName}. # Comment out (or set to false) to disable this feature. ftp.server.dataset.show-parents-and-children = true # # Specifies that the contents of a dataset should be listed starting from a preconfigured subpath. # The property is only configurable per data set type. # Example (will hide a single top level folder starting with 'orig' for datasets of type HCS_IMAGE): # ftp.server.dataset.filelist.subpath.HCS_IMAGE = orig[^/]* #ftp.server.dataset.filelist.subpath.= # # A filter pattern appiled to all files within a data set. Files not matching the pattern will be # hidden and FTP users will not see them. # Example (will only show PNG files for data sets of type HCS_IMAGE): # ftp.server.dataset.filelist.filter.HCS_IMAGE= .*png jython-version=2.7