mirror of
https://github.com/paperless-ngx/paperless-ngx.git
synced 2025-04-02 13:45:10 -05:00
Fixes CI unit testing
This commit is contained in:
parent
d856e48045
commit
a7e1ba82d6
3
.github/workflows/ci.yml
vendored
3
.github/workflows/ci.yml
vendored
@ -125,6 +125,9 @@ jobs:
|
||||
name: Install Python dependencies
|
||||
run: |
|
||||
pipenv sync --dev
|
||||
pipenv run python3 -m nltk.downloader snowball_data
|
||||
pipenv run python3 -m nltk.downloader stopwords
|
||||
pipenv run python3 -m nltk.downloader punkt
|
||||
-
|
||||
name: List installed Python dependencies
|
||||
run: |
|
||||
|
@ -53,24 +53,6 @@ map_folders() {
|
||||
export CONSUME_DIR="${PAPERLESS_CONSUMPTION_DIR:-/usr/src/paperless/consume}"
|
||||
}
|
||||
|
||||
nltk_data () {
|
||||
# Store the NLTK data outside the Docker container
|
||||
local nltk_data_dir="${DATA_DIR}/nltk"
|
||||
|
||||
# Download or update the snowball stemmer data
|
||||
python3 -m nltk.downloader -d "${nltk_data_dir}" snowball_data
|
||||
|
||||
# Download or update the stopwords corpus
|
||||
python3 -m nltk.downloader -d "${nltk_data_dir}" stopwords
|
||||
|
||||
# Download or update the punkt tokenizer data
|
||||
python3 -m nltk.downloader -d "${nltk_data_dir}" punkt
|
||||
|
||||
# Set env so nltk can find the downloaded data
|
||||
export NLTK_DATA="${nltk_data_dir}"
|
||||
|
||||
}
|
||||
|
||||
initialize() {
|
||||
|
||||
# Setup environment from secrets before anything else
|
||||
@ -123,8 +105,6 @@ initialize() {
|
||||
done
|
||||
set -e
|
||||
|
||||
nltk_data
|
||||
|
||||
"${gosu_cmd[@]}" /sbin/docker-prepare.sh
|
||||
}
|
||||
|
||||
|
@ -89,6 +89,24 @@ superuser() {
|
||||
fi
|
||||
}
|
||||
|
||||
nltk_data () {
|
||||
# Store the NLTK data outside the Docker container
|
||||
local nltk_data_dir="${DATA_DIR}/nltk"
|
||||
|
||||
# Download or update the snowball stemmer data
|
||||
python3 -m nltk.downloader -d "${nltk_data_dir}" snowball_data
|
||||
|
||||
# Download or update the stopwords corpus
|
||||
python3 -m nltk.downloader -d "${nltk_data_dir}" stopwords
|
||||
|
||||
# Download or update the punkt tokenizer data
|
||||
python3 -m nltk.downloader -d "${nltk_data_dir}" punkt
|
||||
|
||||
# Set env so nltk can find the downloaded data
|
||||
export NLTK_DATA="${nltk_data_dir}"
|
||||
|
||||
}
|
||||
|
||||
do_work() {
|
||||
if [[ "${PAPERLESS_DBENGINE}" == "mariadb" ]]; then
|
||||
wait_for_mariadb
|
||||
@ -100,6 +118,8 @@ do_work() {
|
||||
|
||||
migrations
|
||||
|
||||
nltk_data
|
||||
|
||||
search_index
|
||||
|
||||
superuser
|
||||
|
Loading…
x
Reference in New Issue
Block a user