Docker & tests
This commit is contained in:
parent
2e65e1bf9a
commit
a186cbe263
2
.dockerignore
Normal file
2
.dockerignore
Normal file
@ -0,0 +1,2 @@
|
||||
tests/
|
||||
docs/
|
||||
7
.vscode/settings.json
vendored
Normal file
7
.vscode/settings.json
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
{
|
||||
"python.testing.pytestArgs": [
|
||||
"tests"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.pytestEnabled": true
|
||||
}
|
||||
23
Dockerfile
Normal file
23
Dockerfile
Normal file
@ -0,0 +1,23 @@
|
||||
# Use the official Python image from the Docker Hub
|
||||
FROM python:3.9-slim
|
||||
|
||||
# Set the working directory in the container
|
||||
WORKDIR /app
|
||||
|
||||
# Copy the requirements file into the container
|
||||
COPY requirements.txt .
|
||||
|
||||
# Install the dependencies
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
# Install gunicorn
|
||||
RUN pip install gunicorn
|
||||
|
||||
# Copy the FastAPI project files into the container
|
||||
COPY . .
|
||||
|
||||
# Expose the port the app runs on
|
||||
EXPOSE 8000
|
||||
|
||||
# Command to run the FastAPI app using gunicorn with uvicorn workers
|
||||
CMD ["gunicorn", "-w", "4", "-k", "uvicorn.workers.UvicornWorker", "main:app", "--bind", "0.0.0.0:8000"]
|
||||
41
Jenkinsfile
vendored
Normal file
41
Jenkinsfile
vendored
Normal file
@ -0,0 +1,41 @@
|
||||
pipeline {
|
||||
agent any
|
||||
|
||||
environment {
|
||||
IMAGE_NAME = "darkbird/anime-file-organizer:latest"
|
||||
REGISTRY_URL = "registry.xdarkbird.duckdns.org"
|
||||
}
|
||||
|
||||
stages {
|
||||
stage('Docker build') {
|
||||
steps {
|
||||
sh """
|
||||
docker build --network="host" -t ${IMAGE_NAME} .
|
||||
"""
|
||||
}
|
||||
}
|
||||
stage('Docker tag') {
|
||||
steps {
|
||||
sh """
|
||||
docker image tag ${IMAGE_NAME} ${REGISTRY_URL}/${IMAGE_NAME}
|
||||
"""
|
||||
}
|
||||
}
|
||||
stage('Docker push') {
|
||||
steps {
|
||||
sh """
|
||||
docker push ${REGISTRY_URL}/${IMAGE_NAME}
|
||||
"""
|
||||
}
|
||||
}
|
||||
stage('Docker clean') {
|
||||
steps {
|
||||
sh """
|
||||
docker rmi ${IMAGE_NAME}
|
||||
docker rmi ${REGISTRY_URL}/${IMAGE_NAME}
|
||||
docker image prune -f
|
||||
"""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -17,6 +17,14 @@ Using regex, identify the directory where each file should be located, and the n
|
||||
4. Log storage:\
|
||||
Store all the logs for the current job in order to be able to debug and keep trazability of the process.
|
||||
|
||||
# Development
|
||||
|
||||
```bash
|
||||
git clone https://github.com/Jose134/file-organizer.git
|
||||
cd file-organizer
|
||||
pip install -r requirements.txt[dev]
|
||||
```
|
||||
|
||||
# Roadmap:
|
||||
- [x] Be able to move and rename files based on a regex
|
||||
- [x] Read qbittorrent credentials from .env file
|
||||
|
||||
@ -1 +1 @@
|
||||
^\[Erai-raws\] (.*) - .*$
|
||||
^\[Erai-raws\] (.*)(?= - ).*$
|
||||
@ -1,2 +1,3 @@
|
||||
requests
|
||||
fastapi
|
||||
pytest ; extra == 'dev'
|
||||
@ -17,9 +17,10 @@ def deduplicate_files(target_dir, exclude_files):
|
||||
def _remove_duplicates(czkawka_path, target_dir, exclude_files, delete_method: CZKAWKA_DELETION_METHOD):
|
||||
try:
|
||||
flags = ["video", "--directories", target_dir, "--not-recursive", "--delete-method", delete_method.value]
|
||||
if exclude_files:
|
||||
for file in exclude_files:
|
||||
flags.append("--excluded-items")
|
||||
flags.extend(exclude_files)
|
||||
flags.append(file)
|
||||
|
||||
flags.append("--tolerance")
|
||||
flags.append(os.environ.get("CK_DUPLICATE_TOLERANCE", "2"))
|
||||
print(flags)
|
||||
|
||||
@ -2,9 +2,7 @@ import os
|
||||
import shutil
|
||||
import re
|
||||
|
||||
def group_files_by_prefix(directory, downloading_files):
|
||||
patterns_str = _get_patterns()
|
||||
|
||||
def group_files_by_prefix(directory, downloading_files, patterns):
|
||||
if not os.path.isdir(directory):
|
||||
print(f"The directory {directory} does not exist.")
|
||||
return
|
||||
@ -22,7 +20,7 @@ def group_files_by_prefix(directory, downloading_files):
|
||||
if skip:
|
||||
continue
|
||||
|
||||
for pattern_str in patterns_str:
|
||||
for pattern_str in patterns:
|
||||
pattern = re.compile(pattern_str)
|
||||
match = pattern.match(file)
|
||||
if match:
|
||||
@ -32,15 +30,3 @@ def group_files_by_prefix(directory, downloading_files):
|
||||
os.makedirs(prefix_dir)
|
||||
|
||||
shutil.move(os.path.join(directory, file), os.path.join(prefix_dir, file))
|
||||
|
||||
def _get_patterns():
|
||||
config_file_path = os.path.join(os.getcwd(), 'config', 'patterns.txt')
|
||||
|
||||
patterns = []
|
||||
if os.path.isfile(config_file_path):
|
||||
with open(config_file_path, 'r') as file:
|
||||
patterns = [line.strip() for line in file if line.strip()]
|
||||
else:
|
||||
print(f"The config file {config_file_path} does not exist.")
|
||||
|
||||
return patterns
|
||||
@ -34,6 +34,15 @@ def launch_job(job_id):
|
||||
with open("jobs.txt", "a") as f:
|
||||
f.write(f"{job_id}\n")
|
||||
|
||||
patterns = _load_patterns_file()
|
||||
# downloading = get_qbittorrent_files_downloading(qbit_url, qbit_user, qbit_password)
|
||||
# deduplicate_files(target_dir, downloading)
|
||||
# group_files_by_prefix(target_dir, downloading)
|
||||
|
||||
def _load_patterns_file():
|
||||
config_file_path = os.path.join(os.getcwd(), 'config', 'patterns.txt')
|
||||
if not path.exists(config_file_path):
|
||||
print(f"The config file {config_file_path} does not exist.")
|
||||
return []
|
||||
with open(config_file_path, 'r') as file:
|
||||
return [line.strip() for line in file if line.strip()]
|
||||
@ -2,7 +2,7 @@ import requests
|
||||
from os import environ
|
||||
|
||||
def get_qbittorrent_files_downloading(api_url, user, password):
|
||||
cookies = _login_qbittorrent('http://qbittorrent.xdarkbird.duckdns.org', user, password)
|
||||
cookies = _login_qbittorrent(api_url, user, password)
|
||||
if not cookies:
|
||||
return []
|
||||
|
||||
@ -20,7 +20,7 @@ def get_qbittorrent_files_downloading(api_url, user, password):
|
||||
if file_extension in ALLOWED_EXTENSIONS:
|
||||
files.append(torrent['name'])
|
||||
|
||||
_logout_qbittorrent('http://qbittorrent.xdarkbird.duckdns.org', cookies)
|
||||
_logout_qbittorrent(api_url, cookies)
|
||||
return files
|
||||
|
||||
def _login_qbittorrent(api_url, username, password):
|
||||
|
||||
75
tests/filemoving_test.py
Normal file
75
tests/filemoving_test.py
Normal file
@ -0,0 +1,75 @@
|
||||
import os
|
||||
import shutil
|
||||
import pytest
|
||||
|
||||
_test_filenames = [
|
||||
"[Erai-raws] Fate Stay Night - Heaven s Feel - THE MOVIE III. spring song [1080p CR WEB-DL AVC AAC][MultiSub][BE0BDDA7]",
|
||||
"[Erai-raws] fugukan - 03 [1080p CR WEB-DL AVC AAC][MultiSub][983AA6EF]",
|
||||
"[Erai-raws] Golden Kamuy OVA - 02 [1080p CR WEB-DL AVC AAC][MultiSub][A731ADF8]",
|
||||
"[Erai-raws] Goukon ni Ittara Onna ga Inakatta Hanashi - 12 [1080p HIDIVE WEB-DL AVC AAC][23F96132]",
|
||||
"[Erai-raws] Grisaia Phantom Trigger - 04 [1080p CR WEBRip HEVC EAC3][MultiSub][27B78FFC]",
|
||||
"[Erai-raws] Detective Conan - 1147 [1080p CR WEBRip HEVC EAC3][476E9FBE]",
|
||||
"[Erai-raws] 2_5 Jigen no Ririsa - 23 [1080p HIDIVE WEB-DL AVC AAC][F6A89707]"
|
||||
]
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def test_setup():
|
||||
os.makedirs("tests/test_data", exist_ok=True)
|
||||
for filename in _test_filenames:
|
||||
with open(os.path.join("tests/test_data", filename), 'w') as f:
|
||||
f.write("test content")
|
||||
|
||||
yield
|
||||
|
||||
shutil.rmtree("tests/test_data", ignore_errors=True)
|
||||
|
||||
def test_filemoving_nodownloads():
|
||||
from src.filemoving import group_files_by_prefix
|
||||
patterns = [r"^\[Erai-raws\] (.*)(?= - ).*$"]
|
||||
|
||||
|
||||
expected_dirs = [
|
||||
"Fate Stay Night - Heaven s Feel",
|
||||
"fugukan",
|
||||
"Golden Kamuy OVA",
|
||||
"Goukon ni Ittara Onna ga Inakatta Hanashi",
|
||||
"Grisaia Phantom Trigger",
|
||||
"Detective Conan",
|
||||
"2_5 Jigen no Ririsa"
|
||||
]
|
||||
|
||||
group_files_by_prefix("tests/test_data", [], patterns)
|
||||
|
||||
for i in range(len(expected_dirs)):
|
||||
assert os.path.exists(os.path.join("tests/test_data", expected_dirs[i]))
|
||||
assert os.path.exists(os.path.join("tests/test_data", expected_dirs[i], _test_filenames[i]))
|
||||
assert not os.path.exists(os.path.join("tests/test_data", _test_filenames[i]))
|
||||
|
||||
|
||||
def test_filemoving_ignores_downloads():
|
||||
from src.filemoving import group_files_by_prefix
|
||||
patterns = [r"^\[Erai-raws\] (.*)(?= - ).*$"]
|
||||
|
||||
downloads = [
|
||||
_test_filenames[0],
|
||||
_test_filenames[2],
|
||||
_test_filenames[4]
|
||||
]
|
||||
|
||||
expected_dirs = [
|
||||
"fugukan",
|
||||
"Goukon ni Ittara Onna ga Inakatta Hanashi",
|
||||
"Detective Conan",
|
||||
"2_5 Jigen no Ririsa"
|
||||
]
|
||||
|
||||
|
||||
group_files_by_prefix("tests/test_data", downloads, patterns)
|
||||
|
||||
|
||||
for root, dirs, files in os.walk("tests/test_data"):
|
||||
if root == "tests/test_data":
|
||||
for dir in dirs:
|
||||
assert dir in expected_dirs
|
||||
for file in files:
|
||||
assert file in downloads
|
||||
Loading…
Reference in New Issue
Block a user