diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
new file mode 100644
index 0000000000000000000000000000000000000000..a3bb7dff1f202ed8a6e5786c4c245b74e4c4effc
--- /dev/null
+++ b/.gitlab-ci.yml
@@ -0,0 +1,65 @@
+stages:          # List of stages for jobs, and their order of execution
+  - prepare
+  - validate
+  - test
+
+variables:
+  ## Name of the resource to test
+  RESOURCE_NAME: "focusProject"
+
+build-postgres-container:
+  stage: prepare
+  script:
+    - docker build -t pg-dachs test/postgres-docker
+
+build-dachs-container:
+  stage: prepare
+  script:
+    - docker build -t dachs test/gavo-docker
+
+validate-resource-description:   # This job also runs in the test stage.
+  image: dachs
+  services:
+    - pg-dachs
+  variables: 
+    POSTGRES_DB: gavo
+    POSTGRES_USER: postgres
+    POSTGRES_PASSWORD: gavo
+    POSTGRES_HOST: pg-dachs
+    POSTGRES_HOST_AUTH_METHOD: trust
+
+  before_script:
+    - ln -sf $PWD /var/gavo/inputs/$RESOURCE_NAME
+
+  stage: validate    # It can run at the same time as unit-test-job (in parallel).
+  script:
+    - gavo validate $RESOURCE_NAME/q.rd
+
+inspect-resource-description:   # This job also runs in the test stage.
+  image: dachs
+  services:
+    - pg-dachs
+  variables: 
+    POSTGRES_DB: gavo
+    POSTGRES_USER: postgres
+    POSTGRES_PASSWORD: gavo
+    POSTGRES_HOST_AUTH_METHOD: trust
+    POSTGRES_HOST: pg-dachs
+
+  before_script:
+    - psql -h pg-dachs -U postgres -d $POSTGRES_DB -a -f test/sql/test_data.sql
+    - ln -sf $PWD /var/gavo/inputs/$RESOURCE_NAME
+
+  stage: test    # It can run at the same time as unit-test-job (in parallel).
+  script:
+    - gavo imp $RESOURCE_NAME/q.rd
+    - gavo pub $RESOURCE_NAME/q.rd
+    - gavo serve start 
+    - mkdir screens
+    - firefox --screenshot $PWD/screens/main.png http://localhost:8080/
+    - firefox --screenshot $PWD/screens/focusProject.png http://localhost:8080/focusProject
+  artifacts: 
+    paths:
+      - screens/*.png
+    expire_in: 1 week
+
diff --git a/README.md b/README.md
index 3b746cfa5348574272f584110e472f6422d4e3fa..fac050e78a8e1e5a70b43ad61a46eaa7227b3c34 100644
--- a/README.md
+++ b/README.md
@@ -17,4 +17,18 @@ To simplify testing and development a CI/CD pipeline is configured and the
 necessary files for adding tests are stored in the `test` directory.
 
 Note that another directory is present and it is called `sql` in that directory
-the configuration files fro the grammars are kept in yaml format.
\ No newline at end of file
+the configuration files fro the grammars are kept in yaml format.
+
+
+## Schema of the resource
+The current tables schema created is 
+![Database schema](docs/schema.png "Database schema")
+
+### Recreate the database schema image
+To recreated it use the utilities stored in `utils/plot_schema.py`
+
+```bash
+utils/plot_schema.py [resource_description] [name_of_the_file_without_extension]
+```
+
+The command above will create a `[name_of_the_file_without_extension].png` file.
diff --git a/docs/schema.png b/docs/schema.png
new file mode 100644
index 0000000000000000000000000000000000000000..a62a946274101eb279e79cc631646fae1fc03a77
Binary files /dev/null and b/docs/schema.png differ
diff --git a/q.rd b/q.rd
new file mode 100644
index 0000000000000000000000000000000000000000..096db3d7f96512061dfc4f6dcd1085a690edf7d2
--- /dev/null
+++ b/q.rd
@@ -0,0 +1,133 @@
+<!-- A template for a simple DaCHS SIAP (image search) service.
+To fill it out, search and replace %.*% 
+
+Note that this doesn't expose all features of DaCHS.  For advanced
+projects, you'll still have to read documentation... -->
+
+
+<resource schema="focusProject">
+  <meta name="creationDate">2023-02-02T10:19:08Z</meta>
+
+  <meta name="title">Focus Project</meta>
+  <meta name="description">
+    A nice decsription goes here
+  </meta>
+  <!-- Take keywords from 
+    http://www.ivoa.net/rdf/uat
+    if at all possible -->
+  <meta name="subject">%keywords; repeat the element as needed%</meta>
+
+  <meta name="creator">%authors in the format Last, F.I; Next, A.%</meta>
+  <meta name="instrument">%telescope or detector or code used%</meta>
+  <meta name="facility">%observatory/probe at which the data was taken%</meta>
+
+  <meta name="source">%ideally, a bibcode%</meta>
+  <meta name="contentLevel">Research</meta>
+  <meta name="type">Catalog</meta>  <!-- or Archive, Survey, Simulation -->
+
+  <!-- Waveband is of Radio, Millimeter, 
+      Infrared, Optical, UV, EUV, X-ray, Gamma-ray, can be repeated -->
+  <meta name="coverage.waveband">%word from controlled vocabulary%</meta>
+
+  <table id="main" onDisk="True" mixin="//scs#q3cindex" adql="True">
+
+    <column name="id" type="text"
+      ucd="meta.id;meta.main"
+      tablehead="Id"
+      description="Main identifier for this object."
+      verbLevel="1"/>
+    <column name="ra" type="double precision"
+      unit="deg" ucd="pos.eq.ra;meta.main"
+      tablehead="RA"
+      description="ICRS right ascension for this object."
+      verbLevel="1"/>
+    <column name="dec" type="double precision"
+      unit="deg" ucd="pos.eq.dec;meta.main"
+      tablehead="Dec"
+      description="ICRS declination for this object."
+      verbLevel="1"/>
+     
+    
+    <column description="Data product type" name="type" required="True" type="text" ucd="meta.id"/>
+    <column description="Format" name="access_format" required="True" type="text" utype="Access.format"/>
+    <column description="Access size" name="access_size" required="True" type="bigint" />
+    <column description="Access URL" name="access_url" required="True" type="text"  utype="Access.reference"/>
+    <column description="Exposure time" name="t_exptime" tablehead="Exposure time" required="True" type="real"/>
+    <column description="Central frequency" name="freq_ref" tablehead="Central frequency" required="True" type="real"/>
+    <column description="Frequency resolution" name="freq_resolution" tablehead="Central frequency" required="True" type="real"/>
+    <column description="Time resolution" name="t_resolution" tablehead="Time resolution" required="True" type="real"/>
+    <column description="Bandwidth" name="freq_bandwidth" tablehead="Bandwidth" required="True" type="real"/>
+    <column description="Sky footprint" name="s_region" type="text" tablehead="Sky footprint" />
+    <column description="FHWM of PSF" name="s_resolution_min" type="double precision"/>
+    
+    <column description="Activity" name="activity" type="text" />
+  </table>
+  
+  <table id="activity" onDisk="True" adql="True">
+    <column name="id" type="text"
+      ucd="meta.id;meta.main"
+      tablehead="Id"
+      description="Main identifier for this object."
+      verbLevel="1"/>
+    <column name="type" type="text"
+      ucd="meta.id"
+      tablehead="Type"
+      description="Type of activity"
+      verbLevel="1"/>
+    <column name="project" type="text" ucd="meta.id" tablehead="Project" description="Project code"/>
+    <column name="collection" type="text" ucd="meta.id" tablehead="Collection"/>
+  </table>
+  <table id="secondary" onDisk="True" adql="True">
+  
+    <column name="activity" description="Activity" type="text" />
+    <column description="Data product type" name="Type" required="True" type="text" ucd="meta.id"/>
+    <column description="Format" name="access_format" required="True" type="text" utype="Access.format"/>
+    
+    <column description="Access URL" name="access_url" required="True" type="text"  utype="Access.reference"/>
+    
+  </table>
+
+  <coverage>
+    <updater sourceTable="main"/>
+  </coverage>
+
+  <!-- if you have data that is continually added to, consider using
+    updating="True" and an ignorePattern here; see also howDoI.html,
+    incremental updating -->
+  <data id="import">
+    <sources pattern="%resdir-relative pattern, like data/*.fits%"/>
+
+    <!-- the fitsProdGrammar should do it for whenever you have
+    halfway usable FITS files.  If they're not halfway usable,
+    consider running a processor to fix them first – you'll hand
+    them out to users, and when DaCHS can't deal with them, chances
+    are their clients can't either -->
+    <sources pattern="sql/main.yml"/>
+        <property key="previewDir">previews</property>
+
+        <customGrammar module="res/sqlgrammar">
+
+        </customGrammar>
+
+    <make table="main">
+      <rowmaker>
+        <map name="id">@id</map>
+        <map name="ra">@ra</map>
+        <map name="dec">@dec</map>
+        <map name="type">@type</map>
+        <map name="access_format">@access_format</map>
+        <map name="access_url">@access_url</map>
+        <map name="access_size">@access_size</map>
+        <map name="t_exptime">@t_exptime</map>
+        <map name="freq_ref">@freq_ref</map>
+        <map name="freq_resolution">@freq_resolution</map>
+        <map name="t_resolution">@t_resolution</map>
+        <map name="freq_bandwidth">@freq_bandwidth</map>
+        <map name="s_region">@s_region</map>
+        <map name="s_resolution_min">@s_resolution_min</map>
+        <map name="activity">@activity</map>  
+      </rowmaker>
+    </make>
+  </data>
+
+</resource>
diff --git a/res/__pycache__/sqlgrammar.cpython-310.pyc b/res/__pycache__/sqlgrammar.cpython-310.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..04377ae95fa74aa2cf4bdebc2b2876adf1f14b55
Binary files /dev/null and b/res/__pycache__/sqlgrammar.cpython-310.pyc differ
diff --git a/res/__pycache__/sqlgrammar.cpython-39.pyc b/res/__pycache__/sqlgrammar.cpython-39.pyc
new file mode 100644
index 0000000000000000000000000000000000000000..385190c501c15c148254ea19e78cdb9af38457e5
Binary files /dev/null and b/res/__pycache__/sqlgrammar.cpython-39.pyc differ
diff --git a/res/sqlgrammar.py b/res/sqlgrammar.py
new file mode 100644
index 0000000000000000000000000000000000000000..065f987161c3e10e95315ae084da1a2cbcf30ce3
--- /dev/null
+++ b/res/sqlgrammar.py
@@ -0,0 +1,51 @@
+from gavo.grammars.customgrammar import CustomRowIterator
+import yaml
+from sqlalchemy import create_engine, text
+from argparse import ArgumentParser
+
+
+def parse_args():
+    parser = ArgumentParser(description='SQL grammar for DACHS')
+    parser.add_argument('operation', choices=['print', 'suggestMeta'])
+    parser.add_argument(
+        'sqlFile', help='YAML file containing the query to execute')
+    return parser.parse_args()
+
+
+def read_sql(conffile_path):
+    with open(conffile_path, 'r') as fin:
+        config = yaml.load(fin, Loader=yaml.SafeLoader)
+        engine = create_engine(config['connection'])
+        with engine.connect() as conn:
+            result = conn.execute(config['sql'])
+
+            return result
+    return None
+
+
+class RowIterator(CustomRowIterator):
+    def __init__(self, grammar, sourceToken, sourceRow=None):
+        CustomRowIterator.__init__(self, grammar, sourceToken, sourceRow)
+
+    def _iterRows(self):
+        result = read_sql(self.sourceToken)
+        for row in result:
+            row_dict = {key: value for key, value in zip(result.keys(), row)}
+            yield row_dict
+
+    def suggestRowMakerContent(self):
+        one_item = next(self._iterRows())
+
+        for key in one_item.keys():
+            print('<map name="%(key)s">@%(key)s</map>' % {'key': key})
+
+
+if __name__ == '__main__':
+    import sys
+    args = parse_args()
+    iterator = RowIterator('', args.sqlFile, '')
+    if args.operation == 'print':
+        for k in iterator._iterRows():
+            print(k)
+    elif args.operation == 'suggestMeta':
+        iterator.suggestRowMakerContent()
diff --git a/sql/main.yml b/sql/main.yml
new file mode 100644
index 0000000000000000000000000000000000000000..874c85e470b5c91f97acb2bfab7cf654ab1351c6
--- /dev/null
+++ b/sql/main.yml
@@ -0,0 +1,3 @@
+connection: 'postgresql+psycopg2://test:test@pg-dachs:5432/lta'
+sql: |
+  select * from main;
diff --git a/test/gavo-docker/Dockerfile b/test/gavo-docker/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..9a294973d17a91c8762e75fd640fb7f83c4bc124
--- /dev/null
+++ b/test/gavo-docker/Dockerfile
@@ -0,0 +1,34 @@
+FROM debian:stable
+
+RUN apt update \ 
+    && apt-get install -y python3-twisted --no-install-recommends \ 
+    postgresql-client \
+    python3-pyparsing \
+    python3-astropy \
+    python3-lxml \
+    python3-psycopg2 \
+    python3-matplotlib \
+    python3-setuptools \
+    python3-docutils \
+    python3-pillow \
+    python3-sqlalchemy \ 
+    python3-graphviz \
+    python3-yaml \
+    firefox-esr \
+    subversion \ 
+    && rm -rf /var/lib/apt/lists/*
+RUN svn co http://svn.ari.uni-heidelberg.de/svn/gavo/python/tags/release-2.6.4/ /src/dachs \
+    && cd /src/dachs \
+    && python3 setup.py install 
+RUN adduser --system gavo \
+    && addgroup --system gavo \
+    && adduser gavo gavo \
+    && adduser `id -nu` gavo \
+    && mkdir -p /var/gavo/etc \ 
+    && echo "host=pg-dachs\nuser=gavo\npassword=gavo\ndatabase=gavo\nport=5432" > /var/gavo/etc/dsn \
+    && mkdir -p /var/gavo/logs /var/gavo/inputs \
+    && chown gavo:gavo /var/gavo/logs
+
+ADD ./entrypoint.sh /entrypoint.sh
+RUN chmod +x /entrypoint.sh
+ENTRYPOINT [ "/entrypoint.sh" ]
\ No newline at end of file
diff --git a/test/gavo-docker/entrypoint.sh b/test/gavo-docker/entrypoint.sh
new file mode 100644
index 0000000000000000000000000000000000000000..75f86990f84e4df1eb41a7a53c20d1b66d735e7c
--- /dev/null
+++ b/test/gavo-docker/entrypoint.sh
@@ -0,0 +1,13 @@
+#!/bin/bash
+
+PGPASSWORD=$POSTGRES_PASSWORD
+IS_INIT=$(psql -h $POSTGRES_HOST -U $POSTGRES_USER -d $POSTGRES_DB -c "select exists(select * from pg_tables where schemaname='dc' and tablename='tablemeta')" -t -A)
+
+if [ "$IS_INIT" == "t" ]; then
+    echo "GAVO database already initialized: skipping initialization..."
+else
+    echo "Initializing gavo database"
+    gavo init -d "host=$POSTGRES_HOST user=$POSTGRES_USER dbname=$POSTGRES_DB password=$POSTGRES_PASSWORD"
+fi
+
+/bin/bash
\ No newline at end of file
diff --git a/test/postgres-docker/Dockerfile b/test/postgres-docker/Dockerfile
new file mode 100644
index 0000000000000000000000000000000000000000..7d924a8a9d1cbffe115cb1399d6ae3e54d7bd022
--- /dev/null
+++ b/test/postgres-docker/Dockerfile
@@ -0,0 +1,10 @@
+FROM postgres:13
+LABEL maintainer="ASTRON SDC"
+
+
+RUN apt-get update \ 
+    && apt-get install -y --no-install-recommends \ 
+                          postgresql-$PG_MAJOR-q3c \
+                          postgresql-$PG_MAJOR-pgsphere \
+    && rm -rf /var/lib/apt/lists/*
+    
diff --git a/test/sql/test_data.sql b/test/sql/test_data.sql
new file mode 100644
index 0000000000000000000000000000000000000000..b4d460f7740f5ce6abba7ab325d63f532eaae1f8
--- /dev/null
+++ b/test/sql/test_data.sql
@@ -0,0 +1,43 @@
+CREATE DATABASE lta;
+\c lta;
+
+CREATE USER test WITH PASSWORD 'test';
+
+GRANT ALL PRIVILEGES ON DATABASE lta to test;
+ALTER DATABASE lta OWNER TO test;
+
+DROP TABLE main;
+CREATE TABLE main (id TEXT, ra DOUBLE PRECISION, dec DOUBLE PRECISION, type TEXT, access_format TEXT, access_size BIGINT, access_url TEXT,t_exptime REAL,
+freq_ref REAL,freq_resolution REAL, t_resolution REAL,freq_bandwidth REAL,s_region TEXT,s_resolution_min DOUBLE PRECISION, activity TEXT);
+
+INSERT INTO main
+ (id, ra, dec, type, access_format, access_size, access_url,t_exptime,freq_ref,freq_resolution, t_resolution,freq_bandwidth,s_region,s_resolution_min, activity) 
+VALUES ('s001',155.5,45.6, 'visibility', 'tar', 1000,  'https://someurl/s001_MS.tar', 5, 15, 1,300, 450, 'CIRCLE()', 35, 'still'),
+       ('s002',145.5,55.6, 'visibility','tar', 1000,  'https://someurl/s002_MS.tar.gz', 5, 15, 1,300, 450, 'CIRCLE()', 35, 'freckles'),
+       ('s001_fits',95.5,145.6, 'image/fits', 'fits', 1000, 'https://someurl/s001.fits', 5, 15, 1,300, 450, 'CIRCLE()', 35, 'freckles'),
+       ('s004',45.5,85.6, 'calibration/table', 'h5', 1000,  'https://someurl/caltable.h5', 5, 15, 1,300, 450, 'CIRCLE()', 35, 'still'),
+       ('s002_fits',105.5,55.6, 'image/fits', 'fits', 1000,  'https://someurl/s002.fits', 5, 15, 1,300, 450, 'CIRCLE()', 35, 'freckles');
+
+DROP TABLE activity;
+CREATE TABLE activity(id TEXT, type TEXT, project TEXT, collection TEXT);
+
+INSERT INTO activity (id, type, project, collection) VALUES
+('A001', 'still','00005S3E','5DE4'),
+('A002', 'still','000183E','5DE4'),
+('A003', 'rapid movements','00234S3f','5xXE4'),
+('A004', 'still','004805S3E','6aE4'),
+('A005', 'freckles','00005S3E','6DE3');
+
+
+DROP TABLE secondary;
+CREATE TABLE secondary(activity TEXT, type TEXT, access_format TEXT, access_url TEXT, access_size BIGINT);
+
+INSERT INTO secondary (activity, type, access_format, access_url, access_size) 
+VALUES( 'still', 'raster','readable', 'www.boring.com', 10000),
+( 'rapid movements', 'raster','readable', 'www.ambigous.com', 10000),
+( 'still', 'raster','readable', 'www.hohoo.com', 20000),
+( 'rapid movements', 'raster','readable', 'www.ambigous.com', 30000),
+( 'still', 'raster','readable', 'www.sentinel.com', 1000);
+GRANT ALL PRIVILEGES ON TABLE main TO test;
+GRANT ALL PRIVILEGES ON TABLE secondary TO test;
+GRANT ALL PRIVILEGES ON TABLE activity TO test;
diff --git a/utils/plot_schema.py b/utils/plot_schema.py
new file mode 100755
index 0000000000000000000000000000000000000000..deca1c5d9927f12b5193323929e3bc289701be61
--- /dev/null
+++ b/utils/plot_schema.py
@@ -0,0 +1,55 @@
+#!/usr/bin/env python3
+
+import graphviz
+from argparse import ArgumentParser
+import xml.etree.ElementTree as ET
+
+
+def parse_args():
+    parser = ArgumentParser(
+        description="Generate database table diagram from resource description")
+    parser.add_argument("rd", help="Resource description file")
+    parser.add_argument("diagram", help="Where to save the diagram")
+
+    return parser.parse_args()
+
+
+def table_to_dict(table):
+    return {column.attrib["name"]: column.attrib["type"] for column in table}
+
+
+def read_schema_tables(filein):
+    parsed = ET.parse(filein)
+    resource = parsed.getroot()
+    schema = resource.attrib["schema"]
+    tables = parsed.findall("table")
+
+    return schema, {table.attrib["id"]: table_to_dict(table) for table in tables}
+
+
+def tables_dict_to_graph(schema, tables):
+    dot = graphviz.Digraph(comment=f"{schema}", format="png")
+    dot.attr("node", shape="record", style="filled",
+             colorscheme="set312", fontsize="12")
+
+    for t_number, (tname, cols) in enumerate(tables.items()):
+
+        columns = "".join([
+            f"<tr><td><b>{name}</b>: </td><td>{type}</td></tr>" for name, type in cols.items()])
+        label = f"""<<table border="0" cellborder="2" cellpadding="4">
+                    <tr><td><b>{tname}</b></td></tr>
+                    <tr><td><table border="0" cellborder="0" cellspacing="0">{columns}</table></td></tr>
+                    </table>>""".replace("\n", "")
+        dot.node(name=tname, label=label, fillcolor=str(t_number + 1))
+    return dot
+
+
+def main():
+    args = parse_args()
+    schema, tables = read_schema_tables(args.rd)
+    dot = tables_dict_to_graph(schema, tables)
+    dot.render(args.diagram, cleanup=True)
+
+
+if __name__ == "__main__":
+    main()