Skip to content

Commit

Permalink
Merge pull request #897 from endlessm/778-downloader-tests
Browse files Browse the repository at this point in the history
Content downloader tests
  • Loading branch information
dbnicholson authored Oct 27, 2023
2 parents 16954ef + f8563e9 commit 2a12060
Show file tree
Hide file tree
Showing 46 changed files with 10,526 additions and 122 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ jobs:
- name: Python tests
run: |
pipenv run python -m pytest
pipenv run python -m pytest --log-level DEBUG
- name: Build
run: |
Expand Down
1 change: 1 addition & 0 deletions Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ setuptools_scm = { version = ">=6.2", extras = ["toml"] }
tomli = "*"
pytest = "~=7.4"
pytest-django = "~=3.10"
pytest-rerunfailures = "~=12.0"

[packages]
nodeenv = "==1.3.3"
Expand Down
222 changes: 115 additions & 107 deletions Pipfile.lock

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion kolibri_explore_plugin/collectionviews.py
Original file line number Diff line number Diff line change
Expand Up @@ -192,7 +192,9 @@ def get_contentthumbnail_tasks(self):
For all the channels in this content manifest.
"""
return [
get_remotecontentimport_task(channel_id, all_thumbnails=True)
get_remotecontentimport_task(
channel_id, node_ids=[], all_thumbnails=True
)
for channel_id in self.get_channel_ids()
]

Expand Down
38 changes: 25 additions & 13 deletions kolibri_explore_plugin/jobs.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@
from kolibri.core.tasks.job import State
from kolibri.core.tasks.main import job_storage
from kolibri.core.tasks.utils import import_path_to_callable
from kolibri.core.utils.lock import db_lock

from .models import BackgroundTask

Expand Down Expand Up @@ -70,8 +71,6 @@ def get_remotecontentimport_task(
if not channel_name:
channel_metadata = get_channel_metadata(channel_id)
channel_name = channel_metadata.name
if node_ids is None:
node_ids = []
return {
"task": TaskType.REMOTECONTENTIMPORT,
"params": {
Expand Down Expand Up @@ -104,7 +103,10 @@ def enqueue_task(


def enqueue_next_background_task():
"""Locate the next background task and enqueue it for running"""
"""Locate the next background task and enqueue it for running
Returns the enqueued BackgroundTask or None if no task was enqueued.
"""

# If there's already a job in progress, do nothing. We only want a
# single background task running in order to not slow down the user
Expand All @@ -117,26 +119,35 @@ def enqueue_next_background_task():
)
if in_progress_jobs.exists():
logger.debug("Not enqueuing next task as tasks in progress")
return
return None

task = BackgroundTask.objects.filter(job_id="").first()
if not task:
logger.debug("All background tasks completed")
return
return None

# If the enqueued job changes state before the job ID has been recorded,
# the storage hook will skip the event since it won't find the background
# atask. Lock the database until that happens to prevent the storage hook
# from reading the BackgroundTask table.
logger.info(f"Starting BackgroundTask {task}")
params = json.loads(task.params)
job_id = enqueue_task(
task.func,
queue=BACKGROUND_QUEUE,
priority=Priority.REGULAR,
**params,
)
task.update_job_id(job_id)
with db_lock():
job_id = enqueue_task(
task.func,
queue=BACKGROUND_QUEUE,
priority=Priority.REGULAR,
**params,
)
task.update_job_id(job_id)

return task


def storage_update_hook(job, orm_job, state=None, **kwargs):
"""StorageHook update hook"""
logger.debug(f"Running storage hook for {job}")

if state is None:
# We only care about state transitions here.
return
Expand All @@ -149,6 +160,7 @@ def storage_update_hook(job, orm_job, state=None, **kwargs):

# Synchronize the state if needed.
if bg_task.job_state != state:
logger.debug(f"Updating {bg_task} job state to {state}")
bg_task.job_state = state
bg_task.save()

Expand All @@ -165,7 +177,7 @@ def storage_update_hook(job, orm_job, state=None, **kwargs):
bg_task_params = json.loads(bg_task.params)
channel_id = bg_task_params["channel_id"]
thumbnail_task_data = get_remotecontentimport_task(
channel_id, all_thumbnails=True
channel_id, node_ids=[], all_thumbnails=True
)
BackgroundTask.create_from_task_data(thumbnail_task_data)

Expand Down
1 change: 1 addition & 0 deletions kolibri_explore_plugin/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,6 +77,7 @@ def create_from_task_data(cls, task_data):

def update_job_id(self, job_id):
"""Set a new job ID and reset the state to PENDING"""
logger.debug(f"Updating {self} job ID to {job_id}")
self.job_id = job_id
self.job_state = State.PENDING
self.save()
Loading

0 comments on commit 2a12060

Please sign in to comment.