# Unique code of this Data Store Server. Not more than 40 characters. data-store-server-code = dss-screening-platonic # host name of the machine on which the datastore server is running host-address = http://localhost # parent directory of the store directory and all the dropboxes root-dir = targets/platonic # The root directory of the data store storeroot-dir = ${root-dir}/store # The directory for the python scripts (and other configuration data) python-script-folder = ../test-data/screening/etc # The directory where the command queue file is located; defaults to storeroot-dir commandqueue-dir = # Create any incoming directories that do not already exist incoming-dir-create = true # Port port = 8889 use-ssl = false # Session timeout in minutes session-timeout = 30 # Path to the keystore keystore.path = dist/etc/openBIS.keystore # Password of the keystore keystore.password = changeit # Key password of the keystore keystore.key-password = changeit # The check interval (in seconds) check-interval = 5 # The time-out for clean up work in the shutdown sequence (in seconds). # Note that that the maximal time for the shutdown sequence to complete can be as large # as twice this time. # Remark: On a network file system, it is not recommended to turn this value to something # lower than 180. shutdown-timeout = 2 # If free disk space goes below value defined here, a notification email will be sent. # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is # specified or if value is negative, the system will not be watching. highwater-mark = 1000 # If a data set is successfully registered it sends out an email to the registrator. # If this property is not specified, no email is sent to the registrator. This property # does not affect the mails which are sent, when the data set could not be registered. notify-successful-registration = false # The URL of the openBIS server server-url = ${host-address}:8888 # The username to use when contacting the openBIS server username = etlserver # The password for the etlserver user who contacts the openBIS server password = etlserver_password # The base URL for Web client access to the data store server. download-url = ${host-address}:8889 # SMTP properties (must start with 'mail' to be considered). # The current configuration saves the emails in the file system in the root directory mail.smtp.host = file://${root-dir}/emails # mail.smtp.host = localhost # mail.from = datastore_server@ethz.ch # mail.smtp.user = # mail.smtp.password = core-plugins-folder = ../screening/source/core-plugins enabled-technologies = screening imaging-database.kind = platonic screening-sql-root-folder = source/ # --------------------------------------------------------------------------- # (optional) archiver configuration # --------------------------------------------------------------------------- # Configuration of an archiver task. All properties are prefixed with 'archiver.'. # Archiver class specification (together with the list of packages this class belongs to). #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver # --------------------------------------------------------------------------- # INTERNAL CONFIGURATION, # Do not change this part unless you are developing openBIS extensions. # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # screening database specification # --------------------------------------------------------------------------- data-sources = path-info-db # Data source for pathinfo database path-info-db.version-holder-class = ch.systemsx.cisd.openbis.dss.generic.shared.PathInfoDatabaseVersionHolder path-info-db.databaseEngineCode = postgresql path-info-db.basicDatabaseName = pathinfo path-info-db.databaseKind = platonic path-info-db.scriptFolder = ../datastore_server/source/sql # --------------------------------------------------------------------------- # reporting and processing plugins configuration # --------------------------------------------------------------------------- # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. reporting-plugins = well-image-analysis-graph, csv-viewer well-image-analysis-graph.label = Image Analysis Graphs well-image-analysis-graph.dataset-types = HCS_ANALYSIS_WELL_FEATURES well-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin well-image-analysis-graph.servlet-path = analysis_graph/ well-image-analysis-graph.properties-file = ${python-script-folder}/analysis-graph.properties well-image-analysis-graph.servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet well-image-analysis-graph.servlet.path = /${well-image-analysis-graph.servlet-path}* well-image-analysis-graph.servlet.properties-file = ${well-image-analysis-graph.properties-file} csv-viewer.label = CSV View csv-viewer.dataset-types = HCS_IMAGE_ANALYSIS_DATA csv-viewer.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin csv-viewer.separator = , # --------------------------------------------------------------------------- maintenance-plugins = path-info-deletion, post-registration path-info-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask path-info-feeding.execute-only-once = true # Maintenance task for deleting entries in pathinfo database after deletion of data sets path-info-deletion.class = ch.systemsx.cisd.etlserver.plugins.DeleteFromExternalDBMaintenanceTask path-info-deletion.interval = 120 path-info-deletion.data-source = path-info-db path-info-deletion.data-set-perm-id = CODE path-info-deletion.data-set-table-name = data_sets # Maintenance task for post registration of all paths of a freshly registered data set to be fed into pathinfo database post-registration.class = ch.systemsx.cisd.etlserver.postregistration.PostRegistrationMaintenanceTask post-registration.interval = 30 post-registration.cleanup-tasks-folder = targets/cleanup-tasks # The following date should the day when the DDS is started up the first time with PathInfoDatabaseFeedingTask. # After PathInfoDatabaseFeedingTask has been performed it can be removed and the following line can be deleted. #post-registration.ignore-data-sets-before-date = 2011-04-18 post-registration.last-seen-data-set-file = targets/last-seen-data-set post-registration.post-registration-tasks = pathinfo-feeding post-registration.pathinfo-feeding.class = ch.systemsx.cisd.etlserver.path.PathInfoDatabaseFeedingTask # --------------------------------------------------------------------------- # DROPBOXES CONFIGURATION # --------------------------------------------------------------------------- incoming-root-dir = ${root-dir} # Globally used separator character which separates entities in a data set file name data-set-file-name-entity-separator = _ # The period of no write access that needs to pass before an incoming data item is considered # complete and ready to be processed (in seconds) [default: 300]. # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. quiet-period=10 # code of the default space in openBIS to which the data will be imported import-space-code = PLATONIC # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' inputs= platonic-plate, platonic-plate-splitted, platonic-plate-overlay, platonic-plate-features, platonic-plate-series, platonic-microscope # --------------------------------------------------------------------------- # --- platonic-plate ------------------------------------------------------ # --------------------------------------------------------------------------- # The dropbox for idealized data -- data that doesn't represent real screens, # rather data used for testing purposes platonic-plate.incoming-dir = ${incoming-root-dir}/incoming-platonic-plate platonic-plate.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction platonic-plate.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.v2.JythonPlateDataSetHandlerV2 platonic-plate.script-path = ${python-script-folder}/data-set-handler-plate.py platonic-plate.staging-dir = ${incoming-root-dir} platonic-plate.storage-processor = ch.systemsx.cisd.etlserver.DispatcherStorageProcessor platonic-plate.storage-processor.processors = images, analysis # --- images - accepts all image datasets registrations platonic-plate.storage-processor.images = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor platonic-plate.storage-processor.images.data-source = imaging-db # --- analysis - accepts all other datasets (and assumes they are single CSV files) platonic-plate.storage-processor.analysis = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor platonic-plate.storage-processor.analysis.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor platonic-plate.storage-processor.analysis.data-source = imaging-db platonic-plate.storage-processor.analysis.separator = , platonic-plate.storage-processor.analysis.well-name-row = Well platonic-plate.storage-processor.analysis.well-name-col = Well platonic-plate.storage-processor.analysis.well-name-col-is-alphanum = true platonic-plate-splitted. = platonic-plate. platonic-plate-splitted.incoming-dir = ${incoming-root-dir}/incoming-platonic-plate-splitted platonic-plate-splitted.script-path = ${python-script-folder}/data-set-handler-plate-splitted.py platonic-plate-overlay. = platonic-plate. platonic-plate-overlay.incoming-dir = ${incoming-root-dir}/incoming-platonic-plate-overlays platonic-plate-overlay.script-path = ${python-script-folder}/data-set-handler-plate-overlays.py platonic-plate-features. = platonic-plate. platonic-plate-features.incoming-dir = ${incoming-root-dir}/incoming-platonic-plate-features platonic-plate-features.script-path = ${python-script-folder}/data-set-handler-plate-features.py platonic-plate-series. = platonic-plate. platonic-plate-series.incoming-dir = ${incoming-root-dir}/incoming-platonic-plate-series platonic-plate-series.script-path = ${python-script-folder}/data-set-handler-plate-series.py # --------------------------------------------------------------------------- # --- platonic-microscope ------------------------------------------------- # --------------------------------------------------------------------------- # The dropbox for idealized data -- data that doesn't represent real screens, # rather data used for testing purposes platonic-microscope.incoming-dir = ${incoming-root-dir}/incoming-platonic-microscope platonic-microscope.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction platonic-microscope.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.v2.JythonPlateDataSetHandlerV2 platonic-microscope.script-path = ${python-script-folder}/data-set-handler-microscope.py platonic-microscope.staging-dir = ${incoming-root-dir} platonic-microscope.storage-processor = ch.systemsx.cisd.etlserver.DispatcherStorageProcessor platonic-microscope.storage-processor.processors = images, analysis # --- images - accepts all image datasets registrations platonic-microscope.storage-processor.images = ch.systemsx.cisd.openbis.dss.etl.MicroscopyStorageProcessor platonic-microscope.storage-processor.images.data-source = imaging-db # --- analysis - accepts all other datasets (and assumes they are single CSV files) platonic-microscope.storage-processor.analysis = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor platonic-microscope.storage-processor.analysis.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor platonic-microscope.storage-processor.analysis.data-source = imaging-db platonic-microscope.storage-processor.analysis.separator = , platonic-microscope.storage-processor.analysis.well-name-row = Well platonic-microscope.storage-processor.analysis.well-name-col = Well platonic-microscope.storage-processor.analysis.well-name-col-is-alphanum = true jython-version=2.7