# Unique code of this Data Store Server. Not more than 40 characters. # CHANGE ---------------------------------------------- data-store-server-code = dss-screening # host name of the machine on which the datastore server is running host-address = http://127.0.0.1 # parent directory of the store directory and all the dropboxes root-dir = targets # The root directory of the data store # CHANGE ---------------------------------------------- #storeroot-dir = ${root-dir}/store #storeroot-dir = /Users/tpylak/main/src/workspace/integration-tests/targets/playground-screening/datastore_server_screening/data/store storeroot-dir = ${root-dir}/store-dynamix #storeroot-dir = /Users/tpylak/main/src/lmc-deployment/backup-productive/store # The directory where the command queue file is located; defaults to storeroot-dir commandqueue-dir = # Port port = 8889 use-ssl = false # Session timeout in minutes session-timeout = 30 # Path to the keystore keystore.path = dist/etc/openBIS.keystore # Password of the keystore keystore.password = changeit # Key password of the keystore keystore.key-password = changeit # The check interval (in seconds) check-interval = 5 # The time-out for clean up work in the shutdown sequence (in seconds). # Note that that the maximal time for the shutdown sequence to complete can be as large # as twice this time. # Remark: On a network file system, it is not recommended to turn this value to something # lower than 180. shutdown-timeout = 2 # If free disk space goes below value defined here, a notification email will be sent. # Value must be specified in kilobytes (1048576 = 1024 * 1024 = 1GB). If no high water mark is # specified or if value is negative, the system will not be watching. highwater-mark = 1000 # If a data set is successfully registered it sends out an email to the registrator. # If this property is not specified, no email is sent to the registrator. This property # does not affect the mails which are sent, when the data set could not be registered. notify-successful-registration = false # The URL of the openBIS server server-url = ${host-address}:8888 # The username to use when contacting the openBIS server # CHANGE ---------------------------------------------- #username = etlserver_lmc #username = etlserver_dynamix username = etlserver # The password for the etlserver user who contacts the openBIS server password = etlserver_password # The base URL for Web client access to the data store server. download-url = ${host-address}:8889 # SMTP properties (must start with 'mail' to be considered). # The current configuration saves the emails in the file system in the root directory mail.smtp.host = file://${root-dir}/emails # mail.smtp.host = localhost # mail.from = datastore_server@ethz.ch # mail.smtp.user = # mail.smtp.password = # --------------------------------------------------------------------------- # (optional) archiver configuration # --------------------------------------------------------------------------- # Configuration of an archiver task. All properties are prefixed with 'archiver.'. # Archiver class specification (together with the list of packages this class belongs to). #archiver.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.demo.DemoArchiver # --------------------------------------------------------------------------- # INTERNAL CONFIGURATION, # Do not change this part unless you are developing openBIS extensions. # --------------------------------------------------------------------------- # --------------------------------------------------------------------------- # screening database specification # --------------------------------------------------------------------------- data-sources = imaging-db imaging-db.version-holder-class = ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseVersionHolder imaging-db.databaseEngineCode = postgresql imaging-db.basicDatabaseName = imaging # CHANGE ---------------------------------------------- #imaging-db.databaseKind = dev #imaging-db.databaseKind = integration_tests #imaging-db.databaseKind = lmc_productive_s84 #imaging-db.databaseKind = small imaging-db.databaseKind = dynamix_dev #imaging-db.databaseKind = dnx imaging-db.scriptFolder = source/sql imaging-db.maxActive = 7 imaging-db.maxIdle = 7 # --------------------------------------------------------------------------- # reporting and processing plugins configuration # --------------------------------------------------------------------------- # Comma separated names of reporting plugins. Each plugin should have configuration properties prefixed with its name. reporting-plugins = default-plate-image-analysis, plate-image-analysis-graph default-plate-image-analysis.label = Image Analysis Results default-plate-image-analysis.dataset-types = HCS_ANALYSIS_WELL_FEATURES default-plate-image-analysis.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisMergedRowsReportingPlugin #default-plate-image-analysis.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.standard.TSVViewReportingPlugin default-plate-image-analysis.properties-file = default-plate-image-analysis.separator = , plate-image-analysis-graph.label = Image Analysis Graphs plate-image-analysis-graph.dataset-types = HCS_ANALYSIS_WELL_FEATURES plate-image-analysis-graph.class = ch.systemsx.cisd.openbis.dss.generic.server.plugins.ImageAnalysisGraphReportingPlugin plate-image-analysis-graph.servlet-path = datastore_server_graph/ plate-image-analysis-graph.properties-file = etc/tabular-data-graph.properties # --------------------------------------------------------------------------- # screening specific extension servlets # --------------------------------------------------------------------------- # list of additional web servlets which will be exposed plugin-services = screening-image-download-servlet, tabular-data-graph-servlet, screening-dss-api-exporter-servlet # class of the web servlet screening-image-download-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet # URL which will be mapped to this servlet screening-image-download-servlet.path = /datastore_server_screening/* tabular-data-graph-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.TabularDataGraphServlet tabular-data-graph-servlet.path = /datastore_server_graph/* tabular-data-graph-servlet.properties-file = etc/tabular-data-graph.properties # expose an DSS API interface with RPC screening-dss-api-exporter-servlet.class = ch.systemsx.cisd.openbis.dss.generic.server.DssScreeningApiServlet screening-dss-api-exporter-servlet.path = /rmi-datastore-server-screening-api-v1/* # --------------------------------------------------------------------------- # image overview plugins configuration # --------------------------------------------------------------------------- # Comma separated names of image overview plugins. # Each plugin should have configuration properties prefixed with its name. # Generic properties for each include: # .class - Fully qualified plugin class name (mandatory). # .default - If true all data set types not handled by other plugins should be handled # by the plugin (default = false). # .dataset-types - Comma separated list of data set types handled by the plugin # (optional and ignored if default is true, otherwise mandatory). overview-plugins = microscopy-image-overview microscopy-image-overview.class = ch.systemsx.cisd.openbis.dss.generic.server.MergingImagesDownloadServlet microscopy-image-overview.dataset-types = MICROSCOPY_IMAGE, .*IMG.* # --------------------------------------------------------------------------- maintenance-plugins=data-set-clean-up # hierarchical-storage-updater # the plugin which is run periodically to create a mirror structure of the store with the same files # but with user-readable structure of directories hierarchical-storage-updater.class = ch.systemsx.cisd.etlserver.plugins.HierarchicalStorageUpdater # specified in seconds. Here : every day hierarchical-storage-updater.interval = 86400 hierarchical-storage-updater.hierarchy-root-dir = ${root-dir}/hierarchical-store # Removes data sets deleted from openBIS also from imaging database data-set-clean-up.class = ch.systemsx.cisd.etlserver.plugins.DataSetDeletionMaintenanceTask # specified in seconds. Here : every day data-set-clean-up.interval = 86400 data-set-clean-up.data-source = imaging-db # --------------------------------------------------------------------------- # DROPBOXES CONFIGURATION # --------------------------------------------------------------------------- incoming-root-dir = ${root-dir} # True if incoming directories should be created on server startup if they don't exist. # Default - false (server will fail at startup if one of incoming directories doesn't exist). incoming-dir-create = true # Globally used separator character which separates entities in a data set file name data-set-file-name-entity-separator = _ # The period of no write access that needs to pass before an incoming data item is considered # complete and ready to be processed (in seconds) [default: 300]. # Valid only when auto-detection method is used to determine if an incoming data are ready to be processed. quiet-period = 3 # code of the default space in openBIS to which the data will be imported import-space-code = TEST # Comma separated names of processing threads. Each thread should have configuration properties prefixed with its name. # E.g. 'code-extractor' property for the thread 'my-etl' should be specified as 'my-etl.code-extractor' inputs = images-hcs-jython-dropbox, plate-analysis-dropbox, hcs-all-in-one-dropbox, flexible-hcs-image-dropbox # --------------------------------------------------------------------------- # The directory to watch for incoming data. merged-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-merged-channels merged-channels-images.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction merged-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor merged-channels-images.data-set-info-extractor.entity-separator = . merged-channels-images.data-set-info-extractor.index-of-sample-code = 0 merged-channels-images.data-set-info-extractor.index-of-data-producer-code = merged-channels-images.data-set-info-extractor.space-code = ${import-space-code} # The extractor class to use for type extraction merged-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor merged-channels-images.type-extractor.file-format-type = JPG merged-channels-images.type-extractor.locator-type = RELATIVE_LOCATION merged-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW merged-channels-images.type-extractor.is-measured = true merged-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor # How should the original data be stored? Possible values: # unchanged - nothing is changed, the default # hdf5 - all the data will be packaged into one hdf5 file # hdf5_compressed - like hdf5, but each file is stored in a compressed form merged-channels-images.storage-processor.original-data-storage-format = hdf5_compressed # Should the thumbnails be generated? # It slows down the dataset registration, but increases the performance when the user wants to see the image. # Can be 'true' or 'false', 'false' is the default value merged-channels-images.storage-processor.generate-thumbnails = true merged-channels-images.storage-processor.compress-thumbnails = true # Thumbnails size in pixels # merged-channels-images.storage-processor.thumbnail-max-width = 300 # merged-channels-images.storage-processor.thumbnail-max-height = 200 # DEPRECATED: use 'channel-codes' and 'channel-labels' instead #merged-channels-images.storage-processor.channel-names = gfp, dapi # Codes of the channels in which images have been acquired. Allowed characters: [A-Z0-9_]. Number and order of entries must be consistent with 'channel-labels'. merged-channels-images.storage-processor.channel-codes = GFP, DAPI # Labels of the channels in which images have been acquired. Number and order of entries must be consistent with 'channel-codes'. merged-channels-images.storage-processor.channel-labels = Gfp, Dapi # Format: [width]>x[height], e.g. 3x4. Specifies the grid into which a microscope divided the well to acquire images. merged-channels-images.storage-processor.well_geometry = 3x3 # implementation of the IHCSImageFileExtractor interface which maps images to the location on the plate and particular channel # Here: the extractor requireds that each image name should adhere to the schema: # ____. merged-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor merged-channels-images.storage-processor.validate-plate-name = false # specification of the imaging db merged-channels-images.storage-processor.data-source = imaging-db # Optional comma separated list of color components. # Available values: RED, GREEN or BLUE. # If specified then the channels are extracted from the color components and override 'file-extractor' results. merged-channels-images.storage-processor.extract-single-image-channels = GREEN, BLUE # --------------------------------------------------------------------------- # The directory to watch for incoming data. split-channels-images.incoming-dir = ${incoming-root-dir}/incoming-images-split-channels split-channels-images.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction split-channels-images.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor split-channels-images.data-set-info-extractor.entity-separator = ${data-set-file-name-entity-separator} split-channels-images.data-set-info-extractor.index-of-sample-code = 0 split-channels-images.data-set-info-extractor.space-code = ${import-space-code} # The extractor class to use for type extraction split-channels-images.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor split-channels-images.type-extractor.file-format-type = TIFF split-channels-images.type-extractor.locator-type = RELATIVE_LOCATION split-channels-images.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW split-channels-images.type-extractor.is-measured = true split-channels-images.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor split-channels-images.storage-processor.generate-thumbnails = false # The first specified channel will be blue, the second will be green and the third will be red. # If there are more channels, combinations of colors will be used. split-channels-images.storage-processor.channel-codes = DAPI, GFP split-channels-images.storage-processor.channel-labels = Dapi, Gfp split-channels-images.storage-processor.well_geometry = 3x3 split-channels-images.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor split-channels-images.storage-processor.data-source = imaging-db # --- HCS Images with flexible naming convention, merged channels ------------------------------------------------------------------------ # The directory to watch for incoming data. flexible-hcs-image-dropbox-merged-channels.incoming-dir = ${incoming-root-dir}/incoming-hcs-images-flexible-merged flexible-hcs-image-dropbox-merged-channels.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction flexible-hcs-image-dropbox-merged-channels.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor flexible-hcs-image-dropbox-merged-channels.data-set-info-extractor.entity-separator = . flexible-hcs-image-dropbox-merged-channels.data-set-info-extractor.index-of-sample-code = 0 flexible-hcs-image-dropbox-merged-channels.data-set-info-extractor.space-code = DEMO # The extractor class to use for type extraction flexible-hcs-image-dropbox-merged-channels.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor flexible-hcs-image-dropbox-merged-channels.type-extractor.file-format-type = TIFF flexible-hcs-image-dropbox-merged-channels.type-extractor.locator-type = RELATIVE_LOCATION flexible-hcs-image-dropbox-merged-channels.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW flexible-hcs-image-dropbox-merged-channels.type-extractor.is-measured = true flexible-hcs-image-dropbox-merged-channels.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor flexible-hcs-image-dropbox-merged-channels.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.FlexibleHCSImageFileExtractor flexible-hcs-image-dropbox-merged-channels.storage-processor.data-source = imaging-db flexible-hcs-image-dropbox-merged-channels.storage-processor.channel-names = BLUE, GREEN, RED flexible-hcs-image-dropbox-merged-channels.storage-processor.well_geometry = 2x3 flexible-hcs-image-dropbox-merged-channels.storage-processor.tile_mapping = 1,2,3;4,5,6 # --- HCS Images with flexible naming convention, split channels ------------------------------------------------------------------------ # The directory to watch for incoming data. flexible-hcs-image-dropbox.incoming-dir = ${incoming-root-dir}/incoming-hcs-images-flexible flexible-hcs-image-dropbox.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction flexible-hcs-image-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor flexible-hcs-image-dropbox.data-set-info-extractor.entity-separator = . flexible-hcs-image-dropbox.data-set-info-extractor.index-of-sample-code = 0 flexible-hcs-image-dropbox.data-set-info-extractor.space-code = DEMO # The extractor class to use for type extraction flexible-hcs-image-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor flexible-hcs-image-dropbox.type-extractor.file-format-type = TIFF flexible-hcs-image-dropbox.type-extractor.locator-type = RELATIVE_LOCATION flexible-hcs-image-dropbox.type-extractor.data-set-type = HCS_IMAGE_OVERVIEW flexible-hcs-image-dropbox.type-extractor.is-measured = true flexible-hcs-image-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor flexible-hcs-image-dropbox.storage-processor.file-extractor = ch.systemsx.cisd.openbis.dss.etl.FlexibleHCSImageFileExtractor flexible-hcs-image-dropbox.storage-processor.data-source = imaging-db flexible-hcs-image-dropbox.storage-processor.channel-names = DAPI, FITC, CY5 #flexible-hcs-image-dropbox.storage-processor.channel-names = RED, GREEN, BLUE #flexible-hcs-image-dropbox.storage-processor.extract-single-image-channels = RED, GREEN, BLUE flexible-hcs-image-dropbox.storage-processor.well_geometry = 2x2 flexible-hcs-image-dropbox.storage-processor.tile_mapping = 1,2;3,4 flexible-hcs-image-dropbox.storage-processor.generate-thumbnails = true flexible-hcs-image-dropbox.storage-processor.thumbnail-max-width = 512 flexible-hcs-image-dropbox.storage-processor.thumbnail-max-height = 512 # --- Jython dropbox for HCS images from Incell 2000 ------------------------------------------- # The directory to watch for incoming data. images-hcs-jython-dropbox.incoming-dir = ${incoming-root-dir}/incoming-images-hcs-jython images-hcs-jython-dropbox.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction images-hcs-jython-dropbox.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.JythonPlateDataSetHandler images-hcs-jython-dropbox.script-path = etc/data-set-handler.py images-hcs-jython-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor # specification of the imaging db images-hcs-jython-dropbox.storage-processor.data-source = imaging-db # Optional, true by default. # Set to false to allow datasets in one experiment to use different channels. # In this case 'channel-codes' and 'channel-labels' become optional and are used only to determine the label for each channel code. # It should be set to 'false' for overlay image datasets. images-hcs-jython-dropbox.storage-processor.define-channels-per-experiment = false # -- Should the thumbnails be generated? # It slows down the dataset registration, but increases the performance when the user wants to see the image. # Can be 'true' or 'false', 'false' is the default value # mimages-hcs-jython-dropbox.storage-processor.generate-thumbnails = false # images-hcs-jython-dropbox.storage-processor.compress-thumbnails = true # Thumbnails size in pixels # images-hcs-jython-dropbox.storage-processor.thumbnail-max-width = 300 # images-hcs-jython-dropbox.storage-processor.thumbnail-max-height = 200 # How should the original data be stored? Possible values: # unchanged - nothing is changed, the default # hdf5 - all the data will be packaged into one hdf5 file # hdf5_compressed - like hdf5, but each file is stored in a compressed form # images-hcs-jython-dropbox.storage-processor.original-data-storage-format = hdf5_compressed # ---- GE Explorer image analysis dropbox ----------------------- # The directory to watch for incoming data. plate-analysis-dropbox.incoming-dir = ${incoming-root-dir}/incoming-analysis plate-analysis-dropbox.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction plate-analysis-dropbox.data-set-info-extractor = ch.systemsx.cisd.etlserver.DefaultDataSetInfoExtractor plate-analysis-dropbox.data-set-info-extractor.entity-separator = . plate-analysis-dropbox.data-set-info-extractor.index-of-parent-data-set-codes = 0 plate-analysis-dropbox.data-set-info-extractor.index-of-space-code = 1 plate-analysis-dropbox.data-set-info-extractor.index-of-sample-code = 2 # The extractor class to use for type extraction plate-analysis-dropbox.type-extractor = ch.systemsx.cisd.etlserver.SimpleTypeExtractor plate-analysis-dropbox.type-extractor.file-format-type = CSV plate-analysis-dropbox.type-extractor.locator-type = RELATIVE_LOCATION plate-analysis-dropbox.type-extractor.data-set-type = HCS_ANALYSIS_WELL_FEATURES plate-analysis-dropbox.type-extractor.is-measured = false # The storage processor (IStorageProcessor implementation) plate-analysis-dropbox.storage-processor = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor plate-analysis-dropbox.storage-processor.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor plate-analysis-dropbox.storage-processor.data-source = imaging-db plate-analysis-dropbox.storage-processor.separator = , plate-analysis-dropbox.storage-processor.well-name-row = Well plate-analysis-dropbox.storage-processor.well-name-col = Well plate-analysis-dropbox.storage-processor.well-name-col-is-alphanum = true # --- Dispatcher jython dropbox: HCS images from Incell 2000 + GE Explorer image analysis --------- # The directory to watch for incoming data. hcs-all-in-one-dropbox.incoming-dir = ${incoming-root-dir}/incoming-all-in-one hcs-all-in-one-dropbox.incoming-data-completeness-condition = auto-detection # The extractor class to use for code extraction hcs-all-in-one-dropbox.top-level-data-set-handler = ch.systemsx.cisd.openbis.dss.etl.jython.JythonPlateDataSetHandler hcs-all-in-one-dropbox.script-path = etc/data-set-handler.py hcs-all-in-one-dropbox.staging-dir = ${incoming-root-dir} hcs-all-in-one-dropbox.storage-processor = ch.systemsx.cisd.etlserver.DispatcherStorageProcessor hcs-all-in-one-dropbox.storage-processor.processors = images, analysis # --- images - accepts all image datasets registrations hcs-all-in-one-dropbox.storage-processor.images = ch.systemsx.cisd.openbis.dss.etl.PlateStorageProcessor hcs-all-in-one-dropbox.storage-processor.images.data-source = imaging-db # --- analysis - accepts all other datasets (and assumes they are single CSV files) hcs-all-in-one-dropbox.storage-processor.analysis = ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorStorageProcessor hcs-all-in-one-dropbox.storage-processor.analysis.processor = ch.systemsx.cisd.etlserver.DefaultStorageProcessor hcs-all-in-one-dropbox.storage-processor.analysis.data-source = imaging-db hcs-all-in-one-dropbox.storage-processor.analysis.separator = , hcs-all-in-one-dropbox.storage-processor.analysis.well-name-row = Well hcs-all-in-one-dropbox.storage-processor.analysis.well-name-col = Well hcs-all-in-one-dropbox.storage-processor.analysis.well-name-col-is-alphanum = true