Source code for faker_file.tests.test_django_integration

from typing import Any, Callable, Dict

from django.conf import settings
from import default_storage
from django.test import TestCase
from faker import Faker
from parametrize import parametrize
from storages.backends.s3boto3 import S3Boto3Storage

import factories

from ..registry import FILE_REGISTRY
from ..storages.aws_s3 import AWSS3Storage
from ..storages.filesystem import FileSystemStorage

__author__ = "Artur Barseghyan <>"
__copyright__ = "2022-2023 Artur Barseghyan"
__license__ = "MIT"
__all__ = ("DjangoIntegrationTestCase",)

# Faker doesn't know anything about Django. That's why, if we want to support
# remote storages, we need to manually check which file storage backend is
# used. If `Boto3` storage backend (of the `django-storages` package) is used
# we use the correspondent `AWSS3Storage` class of the `faker-file`.
# Otherwise, fall back to native file system storage (`FileSystemStorage`) of
# the `faker-file`.
if isinstance(default_storage, S3Boto3Storage):
    STORAGE = AWSS3Storage(
            "key_id": settings.AWS_ACCESS_KEY_ID,
            "key_secret": settings.AWS_SECRET_ACCESS_KEY,
    STORAGE = FileSystemStorage(root_path=settings.MEDIA_ROOT, rel_path="tmp")

[docs]class DjangoIntegrationTestCase(TestCase): """Django integration test case.""" FAKER: Faker
[docs] def tearDown(self, *args, **kwargs) -> None: super().tearDown(*args, **kwargs) FILE_REGISTRY.clean_up()
@parametrize( "factory, kwargs", [ (factories.UploadFactory, {}), (factories.UploadFactory, {"random_file": True}), (factories.UploadFactory, {"pdf_file": True}), (factories.UploadFactory, {"pptx_file": True}), (factories.UploadFactory, {"txt_file": True}), (factories.UploadFactory, {"zip_file": True}), ], ) def test_file( self: "DjangoIntegrationTestCase", factory: Callable, kwargs: Dict[str, Any], ) -> None: """Test file.""" _upload = factory(**kwargs) if kwargs: self.assertTrue(STORAGE.exists(