File size: 1,309 Bytes
5300c16
 
e4bd801
 
 
5300c16
 
 
 
 
 
16660e8
afd2a60
7a26d32
c3dab06
afd2a60
c3dab06
2c1d107
691c525
cbab2c9
5aa6463
72d9693
afd2a60
18847ff
4d53d1c
707e4f7
5300c16
691c525
 
 
 
5300c16
7ba1d6e
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
FROM python:3.11

# Download ID is set as a space variable 
# By default it is a download of all Solanum preserved specimen records (c600K)
ARG GBIF_DOWNLOAD_ID=$GBIF_DOWNLOAD_ID
WORKDIR /code

COPY ./requirements.txt /code/requirements.txt

RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt

# Download GBIF occurrences and prepare for use with datasette
RUN mkdir /data
ADD https://api.gbif.org/v1/occurrence/download/request/${GBIF_DOWNLOAD_ID}.zip /data/gbif-occs.zip
RUN ls -lh /data
RUN unzip /data/gbif-occs.zip -d /data
RUN ls -lh /data
COPY ./tab2csv.py /code/tab2csv.py


RUN python tab2csv.py --createcols /data ${GBIF_DOWNLOAD_ID} gbifocc.csv 
RUN sqlite-utils insert /data/gbifocc.csv gbifocc /data/gbifocc.csv --csv
RUN ls -l /code
RUN sqlite-utils tables /code/gbifocc.db --counts
RUN sqlite-utils enable-fts /code/gbifocc.db gbifocc collectorNameAndNumber
RUN chmod 755 /code/gbifocc.db

# Create datasette metadata file
COPY ./getDownloadMetadata.py /code/getDownloadMetadata.py
COPY ./metadata.json /code/metadata.json
RUN python getDownloadMetadata.py /code/metadata.json /code/metadata.json --download_id=$GBIF_DOWNLOAD_ID

CMD ["datasette", "/code/gbifocc.db", "-m", "/code/metadata.json", "--host", "0.0.0.0", "--port", "7860", "--setting", "sql_time_limit_ms", "3500"]