This issue seems to happen when the task is started too quickly after being sent to the backend, so that the call to task_result = get_task_model(self, task_id) results in an unsaved model, but before it is saved again, the execution of the async task has resulted in that task model already having been inserted in the database.
UniqueViolation: duplicate key value violates unique constraint "django_celery_results_taskresult_task_id_key"
DETAIL: Key (task_id)=(10b638c5b2a54f008d634a67c23e2fd6) already exists.
File "django/db/backends/utils.py", line 84, in _execute
return self.cursor.execute(sql, params)
File "newrelic/hooks/database_psycopg2.py", line 64, in execute
return super(CursorWrapper, self).execute(sql, parameters, *args,
File "newrelic/hooks/database_dbapi2.py", line 38, in execute
return self.__wrapped__.execute(sql, parameters,
File "django_prometheus/db/common.py", line 71, in execute
return super().execute(*args, **kwargs)
IntegrityError: duplicate key value violates unique constraint "django_celery_results_taskresult_task_id_key"
DETAIL: Key (task_id)=(10b638c5b2a54f008d634a67c23e2fd6) already exists.
(18 additional frame(s) were not displayed)
...
File "django/db/utils.py", line 90, in __exit__
raise dj_exc_value.with_traceback(traceback) from exc_value
File "django/db/backends/utils.py", line 84, in _execute
return self.cursor.execute(sql, params)
File "newrelic/hooks/database_psycopg2.py", line 64, in execute
return super(CursorWrapper, self).execute(sql, parameters, *args,
File "newrelic/hooks/database_dbapi2.py", line 38, in execute
return self.__wrapped__.execute(sql, parameters,
File "django_prometheus/db/common.py", line 71, in execute
return super().execute(*args, **kwargs)