# Unique code of this Data Store Server. Not more than 40 characters. data-store-server-code = DSS1 data-folder = targets/playground/data # The root directory of the data store storeroot-dir = ${data-folder}/store # The directory for incoming files over rpc rpc-incoming-dir = ${data-folder}/incoming-rpc # The directory where the command queue file is located; defaults to storeroot-dir commandqueue-dir = # Port port = 8889 # Session timeout in minutes session-timeout = 720 # Set to 'false' for development/testing without deployed server. In this mode datastore will not use # SSL when connecting to openbis. Otherwise all 'keystore' properties need to be set for SSL connection # (default when use-ssl property is not set so there is no need to specify it on production servers). use-ssl = false # Path to the keystore keystore.path = ../datastore_server/dist/etc/openBIS.keystore # Password of the keystore keystore.password = changeit # Key password of the keystore keystore.key-password = changeit # The check interval (in seconds) check-interval = 10 # The time-out for clean up work in the shutdown sequence (in seconds). # Note that that the maximal time for the shutdown sequence to complete can be as large # as twice this time. # Remark: On a network file system, it is not recommended to turn this value to something # lower than 180. shutdown-timeout = 180 # If free disk space goes below value defined here, a notification email will be sent. # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is # specified or if value is negative, the system will not be watching. highwater-mark = -1 # If a data set is successfully registered it sends out an email to the registrator. # If this property is not specified, no email is sent to the registrator. This property # does not affect the mails which are sent, when the data set could not be registered. notify-successful-registration = false # The URL of the openBIS server server-url = http://localhost:8888 # The username to use when contacting the openBIS server username = etlserver # The password to use when contacting the openBIS server password = etlserver # The base URL for Web client access. download-url = http://localhost:${port} # SMTP properties (must start with 'mail' to be considered). # mail.smtp.host = localhost # mail.from = datastore_server@localhost mail.smtp.host = file://targets/playground/email mail.from = datastore_server@localhost # ---------------- Timing parameters for file system operations on remote shares. # Time (in seconds) to wait for any file system operation to finish. Operations exceeding this # timeout will be terminated. timeout = 60 # Number of times that a timed out operation will be tried again (0 means: every file system # operation will only ever be performed once). max-retries = 11 # Time (in seconds) to wait after an operation has been timed out before re-trying. failure-interval = 10 # The period of no write access that needs to pass before an incoming data item is considered # complete and ready to be processed (in seconds) [default: 300]. # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. quiet-period = 10 # --------------------------------------------------------------------------- # reporting and processing plugins configuration # --------------------------------------------------------------------------- # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. # If name has 'default-' prefix it will be used by default in data set Data View. #reporting-plugins = tsv-viewer, csv-viewer # --------------------------------------------------------------------------- # plug ins # --------------------------------------------------------------------------- #tsv-viewer.label = TSV View #tsv-viewer.dataset-types = TSV #tsv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin #tsv-viewer.properties-file = # --------------------------------------------------------------------------- # INTERNAL CONFIGURATION, # Do not change this part unless you are developing openBIS extensions. # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # BEGIN SCREENING # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # screening database specification # --------------------------------------------------------------------------- data-sources = imaging-db imaging-db.version-holder-class = ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseVersionHolder imaging-db.databaseEngineCode = postgresql imaging-db.basicDatabaseName = imaging imaging-db.databaseKind = cina imaging-db.scriptFolder = ../screening/source/sql # --------------------------------------------------------------------------- # reporting and processing plugins configuration # --------------------------------------------------------------------------- # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. reporting-plugins = default-plate-image-analysis, plate-image-analysis-graph, csv-viewer default-plate-image-analysis.label = Image Analysis Results default-plate-image-analysis.dataset-types = HCS_IMAGE_ANALYSIS_DATA, HCS_ANALYSIS_PER_GENE default-plate-image-analysis.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisMergedRowsReportingPlugin default-plate-image-analysis.properties-file = plate-image-analysis-graph.label = Image Analysis Graphs plate-image-analysis-graph.dataset-types = HCS_IMAGE_ANALYSIS_DATA, HCS_ANALYSIS_PER_GENE plate-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin plate-image-analysis-graph.servlet-path = datastore_server_graph/ plate-image-analysis-graph.properties-file = etc/tabular-data-graph.properties # --------------------------------------------------------------------------- # screening specific extension servlets # --------------------------------------------------------------------------- # list of additional web servlets which will be exposed plugin-services = screening-image-download-servlet, tabular-data-graph-servlet, screening-dss-api-exporter-servlet # class of the web servlet screening-image-download-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet # URL which will be mapped to this servlet screening-image-download-servlet.path = /datastore_server_screening/* tabular-data-graph-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet tabular-data-graph-servlet.path = /datastore_server_graph/* tabular-data-graph-servlet.properties-file = etc/tabular-data-graph.properties # expose an DSS API interface with RPC screening-dss-api-exporter-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.DssScreeningApiServlet screening-dss-api-exporter-servlet.path = /rmi-datastore-server-screening-api-v1/* csv-viewer.label = CSV View csv-viewer.dataset-types = HCS_IMAGE_ANALYSIS_DATA csv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin csv-viewer.separator = , # --------------------------------------------------------------------------- # screening specific maintenance plugins # --------------------------------------------------------------------------- maintenance-plugins=data-set-clean-up # hierarchical-storage-updater # the plugin which is run periodically to create a mirror structure of the store with the same files # but with user-readable structure of directories hierarchical-storage-updater.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater # specified in seconds. Here : every day hierarchical-storage-updater.interval = 86400 hierarchical-storage-updater.hierarchy-root-dir = ${root-dir}/hierarchical-store # Removes data sets deleted from openBIS also from imaging database data-set-clean-up.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask # specified in seconds. Here : every day data-set-clean-up.interval = 86400 data-set-clean-up.data-source = imaging-db # --------------------------------------------------------------------------- # the overvierw plug-in for images # --------------------------------------------------------------------------- overview-plugins = microscopy-image-overview microscopy-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet microscopy-image-overview.dataset-types = IMAGE # --------------------------------------------------------------------------- # END SCREENING # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # BEGIN DROPBOXES # --------------------------------------------------------------------------- # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' inputs = main-thread # --------------------------------------------------------------------------- # main thread configuration # --------------------------------------------------------------------------- # The directory to watch for incoming data. main-thread.incoming-dir = ${data-folder}/incoming # Determines when the incoming data should be considered complete and ready to be processed. # Allowed values: # - auto-detection - when no write access will be detected for a specified 'quite-period' # - marker-file - when an appropriate marker file for the data exists. # The default value is 'marker-file'. main-thread.incoming-data-completeness-condition = auto-detection main-thread.delete-unidentified = true # ---------------- Plugin properties # The data set handler main-thread.dataset-handler = ch.systemsx.cisd.cina.dss.bundle.CinaBundleDataSetHandler # The extractor class to use for code extraction main-thread.data-set-info-extractor = ch.systemsx.cisd.cina.dss.bundle.CinaBundleDataSetInfoExtractor # The extractor class to use for type extraction main-thread.type-extractor = ch.systemsx.cisd.cina.dss.bundle.CinaBundleTypeExtractor # The storage processor (IStorageProcessor implementation) main-thread.storage-processor = ch.systemsx.cisd.cina.dss.StorageProcessor main-thread.storage-processor.processors = images, sundry main-thread.storage-processor.images = ch.systemsx.cisd.cina.dss.CinaImageStorageProcessor main-thread.storage-processor.images.data-source = imaging-db main-thread.storage-processor.sundry = ch.systemsx.cisd.cina.dss.CinaSundryStorageProcessor # # The dss-rpc section configures the RPC put functionality by providing a mapping between data # set type and input thread parameters. # # The default input thread is specified by the put-default key. If not specified, the first input # thread will be used. # # Mappings are specified by dss-rpc. = # # If this section is empty, then the first input thread will be used. # dss-rpc.put-default = main-thread jython-version=2.7