fix: disable whisper, faster_whisper, and hugging_face transcriptions in linux build (#659)

This commit is contained in:
Chidi Williams 2024-01-05 19:09:26 +00:00 committed by GitHub
parent 5456774d96
commit c7be2f1578
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
26 changed files with 512 additions and 342 deletions

View file

@ -5,6 +5,3 @@ omit =
[html] [html]
directory = coverage/html directory = coverage/html
[report]
fail_under = 75

View file

@ -88,7 +88,6 @@ jobs:
include: include:
- os: macos-latest - os: macos-latest
- os: windows-latest - os: windows-latest
- os: ubuntu-20.04
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
@ -298,7 +297,6 @@ jobs:
include: include:
- os: macos-latest - os: macos-latest
- os: windows-latest - os: windows-latest
- os: ubuntu-20.04
needs: [ build, test ] needs: [ build, test ]
if: startsWith(github.ref, 'refs/tags/') if: startsWith(github.ref, 'refs/tags/')
steps: steps:

View file

@ -31,8 +31,13 @@ clean:
rm -f buzz/whisper_cpp.py rm -f buzz/whisper_cpp.py
rm -rf dist/* || true rm -rf dist/* || true
COVERAGE_THRESHOLD := 75
ifeq ($(UNAME_S),Linux)
COVERAGE_THRESHOLD := 70
endif
test: buzz/whisper_cpp.py translation_mo test: buzz/whisper_cpp.py translation_mo
pytest -vv --cov=buzz --cov-report=xml --cov-report=html --benchmark-skip pytest -vv --cov=buzz --cov-report=xml --cov-report=html --benchmark-skip --cov-fail-under=${COVERAGE_THRESHOLD}
benchmarks: buzz/whisper_cpp.py translation_mo benchmarks: buzz/whisper_cpp.py translation_mo
pytest -vv --benchmark-only --benchmark-json benchmarks.json pytest -vv --benchmark-only --benchmark-json benchmarks.json

View file

@ -2,22 +2,38 @@ import enum
import hashlib import hashlib
import logging import logging
import os import os
import shutil
import subprocess import subprocess
import sys import sys
import tempfile import tempfile
import warnings import warnings
from dataclasses import dataclass from dataclasses import dataclass
from typing import Optional from typing import Optional
import shutil
import faster_whisper
import huggingface_hub
import requests import requests
import whisper
from PyQt6.QtCore import QObject, pyqtSignal, QRunnable from PyQt6.QtCore import QObject, pyqtSignal, QRunnable
from platformdirs import user_cache_dir from platformdirs import user_cache_dir
from tqdm.auto import tqdm from tqdm.auto import tqdm
whisper = None
faster_whisper = None
huggingface_hub = None
if sys.platform != "linux":
import faster_whisper
import whisper
import huggingface_hub
# Catch exception from whisper.dll not getting loaded.
# TODO: Remove flag and try-except when issue with loading
# the DLL in some envs is fixed.
LOADED_WHISPER_DLL = False
try:
import buzz.whisper_cpp as whisper_cpp # noqa: F401
LOADED_WHISPER_DLL = True
except ImportError:
logging.exception("")
class WhisperModelSize(str, enum.Enum): class WhisperModelSize(str, enum.Enum):
TINY = "tiny" TINY = "tiny"
@ -42,6 +58,38 @@ class ModelType(enum.Enum):
FASTER_WHISPER = "Faster Whisper" FASTER_WHISPER = "Faster Whisper"
OPEN_AI_WHISPER_API = "OpenAI Whisper API" OPEN_AI_WHISPER_API = "OpenAI Whisper API"
def supports_recording(self):
# Live transcription with OpenAI Whisper API not supported
return self != ModelType.OPEN_AI_WHISPER_API
def is_available(self):
if (
# Hide Whisper.cpp option if whisper.dll did not load correctly.
# See: https://github.com/chidiwilliams/buzz/issues/274,
# https://github.com/chidiwilliams/buzz/issues/197
(self == ModelType.WHISPER_CPP and not LOADED_WHISPER_DLL)
# Disable Whisper and Faster Whisper options
# on Linux due to execstack errors on Snap
or (
sys.platform == "linux"
and self
in (
ModelType.WHISPER,
ModelType.FASTER_WHISPER,
ModelType.HUGGING_FACE,
)
)
):
return False
return True
def is_manually_downloadable(self):
return self in (
ModelType.WHISPER,
ModelType.WHISPER_CPP,
ModelType.FASTER_WHISPER,
)
@dataclass() @dataclass()
class TranscriptionModel: class TranscriptionModel:
@ -76,6 +124,13 @@ class TranscriptionModel:
return return
self.open_path(path=os.path.dirname(model_path)) self.open_path(path=os.path.dirname(model_path))
@staticmethod
def default():
model_type = next(
model_type for model_type in ModelType if model_type.is_available()
)
return TranscriptionModel(model_type=model_type)
@staticmethod @staticmethod
def open_path(path: str): def open_path(path: str):
if sys.platform == "win32": if sys.platform == "win32":

View file

@ -1,25 +1,29 @@
import datetime import datetime
import logging import logging
import sys
import threading import threading
from typing import Optional from typing import Optional
import numpy as np import numpy as np
import sounddevice import sounddevice
import whisper
from PyQt6.QtCore import QObject, pyqtSignal from PyQt6.QtCore import QObject, pyqtSignal
from sounddevice import PortAudioError from sounddevice import PortAudioError
from buzz import transformers_whisper from buzz import transformers_whisper, whisper_audio
from buzz.model_loader import ModelType from buzz.model_loader import ModelType
from buzz.transcriber import TranscriptionOptions, WhisperCpp, whisper_cpp_params from buzz.transcriber import TranscriptionOptions, WhisperCpp, whisper_cpp_params
from buzz.transformers_whisper import TransformersWhisper from buzz.transformers_whisper import TransformersWhisper
if sys.platform != "linux":
import whisper
class RecordingTranscriber(QObject): class RecordingTranscriber(QObject):
transcription = pyqtSignal(str) transcription = pyqtSignal(str)
finished = pyqtSignal() finished = pyqtSignal()
error = pyqtSignal(str) error = pyqtSignal(str)
is_running = False is_running = False
SAMPLE_RATE = whisper_audio.SAMPLE_RATE
MAX_QUEUE_SIZE = 10 MAX_QUEUE_SIZE = 10
def __init__( def __init__(
@ -149,17 +153,15 @@ class RecordingTranscriber(QObject):
provided by Whisper if the microphone supports it, or else it uses the device's default provided by Whisper if the microphone supports it, or else it uses the device's default
sample rate. sample rate.
""" """
whisper_sample_rate = whisper.audio.SAMPLE_RATE sample_rate = whisper_audio.SAMPLE_RATE
try: try:
sounddevice.check_input_settings( sounddevice.check_input_settings(device=device_id, samplerate=sample_rate)
device=device_id, samplerate=whisper_sample_rate return sample_rate
)
return whisper_sample_rate
except PortAudioError: except PortAudioError:
device_info = sounddevice.query_devices(device=device_id) device_info = sounddevice.query_devices(device=device_id)
if isinstance(device_info, dict): if isinstance(device_info, dict):
return int(device_info.get("default_samplerate", whisper_sample_rate)) return int(device_info.get("default_samplerate", sample_rate))
return whisper_sample_rate return sample_rate
def stream_callback(self, in_data: np.ndarray, frame_count, time_info, status): def stream_callback(self, in_data: np.ndarray, frame_count, time_info, status):
# Try to enqueue the next block. If the queue is already full, drop the block. # Try to enqueue the next block. If the queue is already full, drop the block.

View file

@ -18,7 +18,7 @@ class KeyringStore:
return "" return ""
return password return password
except (KeyringLocked, KeyringError) as exc: except (KeyringLocked, KeyringError) as exc:
logging.error("Unable to read from keyring: %s", exc) logging.warning("Unable to read from keyring: %s", exc)
return "" return ""
def set_password(self, username: Key, password: str) -> None: def set_password(self, username: Key, password: str) -> None:

View file

@ -17,31 +17,22 @@ from random import randint
from threading import Thread from threading import Thread
from typing import Any, List, Optional, Tuple, Union, Set from typing import Any, List, Optional, Tuple, Union, Set
import faster_whisper
import numpy as np import numpy as np
import openai import openai
import stable_whisper
import tqdm import tqdm
import whisper
from PyQt6.QtCore import QObject, pyqtSignal, pyqtSlot from PyQt6.QtCore import QObject, pyqtSignal, pyqtSlot
from dataclasses_json import dataclass_json, config, Exclude from dataclasses_json import dataclass_json, config, Exclude
from whisper import tokenizer
from . import transformers_whisper from buzz.model_loader import whisper_cpp
from . import transformers_whisper, whisper_audio
from .conn import pipe_stderr from .conn import pipe_stderr
from .locale import _ from .locale import _
from .model_loader import TranscriptionModel, ModelType from .model_loader import TranscriptionModel, ModelType
# Catch exception from whisper.dll not getting loaded. if sys.platform != "linux":
# TODO: Remove flag and try-except when issue with loading import faster_whisper
# the DLL in some envs is fixed. import whisper
LOADED_WHISPER_DLL = False import stable_whisper
try:
import buzz.whisper_cpp as whisper_cpp
LOADED_WHISPER_DLL = True
except ImportError:
logging.exception("")
DEFAULT_WHISPER_TEMPERATURE = (0.0, 0.2, 0.4, 0.6, 0.8, 1.0) DEFAULT_WHISPER_TEMPERATURE = (0.0, 0.2, 0.4, 0.6, 0.8, 1.0)
@ -58,7 +49,108 @@ class Segment:
text: str text: str
LANGUAGES = tokenizer.LANGUAGES LANGUAGES = {
"en": "english",
"zh": "chinese",
"de": "german",
"es": "spanish",
"ru": "russian",
"ko": "korean",
"fr": "french",
"ja": "japanese",
"pt": "portuguese",
"tr": "turkish",
"pl": "polish",
"ca": "catalan",
"nl": "dutch",
"ar": "arabic",
"sv": "swedish",
"it": "italian",
"id": "indonesian",
"hi": "hindi",
"fi": "finnish",
"vi": "vietnamese",
"he": "hebrew",
"uk": "ukrainian",
"el": "greek",
"ms": "malay",
"cs": "czech",
"ro": "romanian",
"da": "danish",
"hu": "hungarian",
"ta": "tamil",
"no": "norwegian",
"th": "thai",
"ur": "urdu",
"hr": "croatian",
"bg": "bulgarian",
"lt": "lithuanian",
"la": "latin",
"mi": "maori",
"ml": "malayalam",
"cy": "welsh",
"sk": "slovak",
"te": "telugu",
"fa": "persian",
"lv": "latvian",
"bn": "bengali",
"sr": "serbian",
"az": "azerbaijani",
"sl": "slovenian",
"kn": "kannada",
"et": "estonian",
"mk": "macedonian",
"br": "breton",
"eu": "basque",
"is": "icelandic",
"hy": "armenian",
"ne": "nepali",
"mn": "mongolian",
"bs": "bosnian",
"kk": "kazakh",
"sq": "albanian",
"sw": "swahili",
"gl": "galician",
"mr": "marathi",
"pa": "punjabi",
"si": "sinhala",
"km": "khmer",
"sn": "shona",
"yo": "yoruba",
"so": "somali",
"af": "afrikaans",
"oc": "occitan",
"ka": "georgian",
"be": "belarusian",
"tg": "tajik",
"sd": "sindhi",
"gu": "gujarati",
"am": "amharic",
"yi": "yiddish",
"lo": "lao",
"uz": "uzbek",
"fo": "faroese",
"ht": "haitian creole",
"ps": "pashto",
"tk": "turkmen",
"nn": "nynorsk",
"mt": "maltese",
"sa": "sanskrit",
"lb": "luxembourgish",
"my": "myanmar",
"bo": "tibetan",
"tl": "tagalog",
"mg": "malagasy",
"as": "assamese",
"tt": "tatar",
"haw": "hawaiian",
"ln": "lingala",
"ha": "hausa",
"ba": "bashkir",
"jw": "javanese",
"su": "sundanese",
"yue": "cantonese",
}
@dataclass() @dataclass()
@ -168,6 +260,7 @@ class FileTranscriber(QObject):
try: try:
segments = self.transcribe() segments = self.transcribe()
except Exception as exc: except Exception as exc:
logging.error(exc)
self.error.emit(exc) self.error.emit(exc)
return return
@ -230,8 +323,8 @@ class WhisperCppFileTranscriber(FileTranscriber):
model_path = self.model_path model_path = self.model_path
logging.debug( logging.debug(
"Starting whisper_cpp file transcription, file path = %s, language = %s, task = %s, model_path = %s, " "Starting whisper_cpp file transcription, file path = %s, language = %s, "
"word level timings = %s", "task = %s, model_path = %s, word level timings = %s",
self.file_path, self.file_path,
self.language, self.language,
self.task, self.task,
@ -239,8 +332,8 @@ class WhisperCppFileTranscriber(FileTranscriber):
self.word_level_timings, self.word_level_timings,
) )
audio = whisper.audio.load_audio(self.file_path) audio = whisper_audio.load_audio(self.file_path)
self.duration_audio_ms = len(audio) * 1000 / whisper.audio.SAMPLE_RATE self.duration_audio_ms = len(audio) * 1000 / whisper_audio.SAMPLE_RATE
whisper_params = whisper_cpp_params( whisper_params = whisper_cpp_params(
language=self.language if self.language is not None else "", language=self.language if self.language is not None else "",
@ -722,7 +815,7 @@ class WhisperCpp:
def transcribe(self, audio: Union[np.ndarray, str], params: Any): def transcribe(self, audio: Union[np.ndarray, str], params: Any):
if isinstance(audio, str): if isinstance(audio, str):
audio = whisper.audio.load_audio(audio) audio = whisper_audio.load_audio(audio)
logging.debug("Loaded audio with length = %s", len(audio)) logging.debug("Loaded audio with length = %s", len(audio))

View file

@ -1,9 +1,13 @@
import sys
from typing import Optional, Union from typing import Optional, Union
import numpy as np import numpy as np
import whisper
from tqdm import tqdm from tqdm import tqdm
from transformers import WhisperProcessor, WhisperForConditionalGeneration
WhisperProcessor = WhisperForConditionalGeneration = None
if sys.platform != "linux":
import whisper
from transformers import WhisperProcessor, WhisperForConditionalGeneration
def load_model(model_name_or_path: str): def load_model(model_name_or_path: str):
@ -13,14 +17,13 @@ def load_model(model_name_or_path: str):
class TransformersWhisper: class TransformersWhisper:
SAMPLE_RATE = whisper.audio.SAMPLE_RATE
N_SAMPLES_IN_CHUNK = whisper.audio.N_SAMPLES
def __init__( def __init__(
self, processor: WhisperProcessor, model: WhisperForConditionalGeneration self, processor: WhisperProcessor, model: WhisperForConditionalGeneration
): ):
self.processor = processor self.processor = processor
self.model = model self.model = model
self.SAMPLE_RATE = whisper.audio.SAMPLE_RATE
self.N_SAMPLES_IN_CHUNK = whisper.audio.N_SAMPLES
# Patch implementation of transcribing with transformers' WhisperProcessor until long-form transcription and # Patch implementation of transcribing with transformers' WhisperProcessor until long-form transcription and
# timestamps are available. See: https://github.com/huggingface/transformers/issues/19887, # timestamps are available. See: https://github.com/huggingface/transformers/issues/19887,

50
buzz/whisper_audio.py Normal file
View file

@ -0,0 +1,50 @@
from subprocess import CalledProcessError, run
import numpy as np
SAMPLE_RATE = 16000
N_FFT = 400
HOP_LENGTH = 160
CHUNK_LENGTH = 30
N_SAMPLES = CHUNK_LENGTH * SAMPLE_RATE # 480000 samples in a 30-second chunk
def load_audio(file: str, sr: int = SAMPLE_RATE):
"""
Open an audio file and read as mono waveform, resampling as necessary
Parameters
----------
file: str
The audio file to open
sr: int
The sample rate to resample the audio if necessary
Returns
-------
A NumPy array containing the audio waveform, in float32 dtype.
"""
# This launches a subprocess to decode audio while down-mixing
# and resampling as necessary. Requires the ffmpeg CLI in PATH.
# fmt: off
cmd = [
"ffmpeg",
"-nostdin",
"-threads", "0",
"-i", file,
"-f", "s16le",
"-ac", "1",
"-acodec", "pcm_s16le",
"-ar", str(sr),
"-"
]
# fmt: on
try:
out = run(cmd, capture_output=True, check=True).stdout
except CalledProcessError as e:
raise RuntimeError(f"Failed to load audio: {e.stderr.decode()}") from e
return np.frombuffer(out, np.int16).flatten().astype(np.float32) / 32768.0

View file

@ -4,7 +4,6 @@ from PyQt6.QtCore import pyqtSignal
from PyQt6.QtWidgets import QComboBox, QWidget from PyQt6.QtWidgets import QComboBox, QWidget
from buzz.model_loader import ModelType from buzz.model_loader import ModelType
from buzz.transcriber import LOADED_WHISPER_DLL
class ModelTypeComboBox(QComboBox): class ModelTypeComboBox(QComboBox):
@ -19,13 +18,11 @@ class ModelTypeComboBox(QComboBox):
super().__init__(parent) super().__init__(parent)
if model_types is None: if model_types is None:
model_types = [model_type for model_type in ModelType] model_types = [
model_type for model_type in ModelType if model_type.is_available()
]
for model_type in model_types: for model_type in model_types:
# Hide Whisper.cpp option is whisper.dll did not load correctly.
# See: https://github.com/chidiwilliams/buzz/issues/274, https://github.com/chidiwilliams/buzz/issues/197
if model_type == ModelType.WHISPER_CPP and LOADED_WHISPER_DLL is False:
continue
self.addItem(model_type.value) self.addItem(model_type.value)
self.currentTextChanged.connect(self.on_text_changed) self.currentTextChanged.connect(self.on_text_changed)

View file

@ -39,15 +39,19 @@ class FileTranscriptionPreferences:
def load(cls, settings: QSettings) -> "FileTranscriptionPreferences": def load(cls, settings: QSettings) -> "FileTranscriptionPreferences":
language = settings.value("language", None) language = settings.value("language", None)
task = settings.value("task", Task.TRANSCRIBE) task = settings.value("task", Task.TRANSCRIBE)
model = settings.value("model", TranscriptionModel()) model: TranscriptionModel = settings.value(
word_level_timings = settings.value("word_level_timings", False) "model", TranscriptionModel.default()
)
word_level_timings = bool(settings.value("word_level_timings", False))
temperature = settings.value("temperature", DEFAULT_WHISPER_TEMPERATURE) temperature = settings.value("temperature", DEFAULT_WHISPER_TEMPERATURE)
initial_prompt = settings.value("initial_prompt", "") initial_prompt = settings.value("initial_prompt", "")
output_formats = settings.value("output_formats", []) output_formats = settings.value("output_formats", []) or []
return FileTranscriptionPreferences( return FileTranscriptionPreferences(
language=language, language=language,
task=task, task=task,
model=model, model=model
if model.model_type.is_available()
else TranscriptionModel.default(),
word_level_timings=word_level_timings, word_level_timings=word_level_timings,
temperature=temperature, temperature=temperature,
initial_prompt=initial_prompt, initial_prompt=initial_prompt,

View file

@ -23,6 +23,8 @@ from buzz.widgets.model_type_combo_box import ModelTypeComboBox
class ModelsPreferencesWidget(QWidget): class ModelsPreferencesWidget(QWidget):
model: Optional[TranscriptionModel]
def __init__( def __init__(
self, self,
progress_dialog_modality=Qt.WindowModality.WindowModal, progress_dialog_modality=Qt.WindowModality.WindowModal,
@ -31,8 +33,19 @@ class ModelsPreferencesWidget(QWidget):
super().__init__(parent) super().__init__(parent)
self.model_downloader: Optional[ModelDownloader] = None self.model_downloader: Optional[ModelDownloader] = None
self.model = TranscriptionModel(
model_type=ModelType.WHISPER, whisper_model_size=WhisperModelSize.TINY model_types = [
model_type
for model_type in ModelType
if model_type.is_available() and model_type.is_manually_downloadable()
]
self.model = (
TranscriptionModel(
model_type=model_types[0], whisper_model_size=WhisperModelSize.TINY
)
if model_types[0] is not None
else None
) )
self.progress_dialog_modality = progress_dialog_modality self.progress_dialog_modality = progress_dialog_modality
@ -40,12 +53,8 @@ class ModelsPreferencesWidget(QWidget):
layout = QFormLayout() layout = QFormLayout()
model_type_combo_box = ModelTypeComboBox( model_type_combo_box = ModelTypeComboBox(
model_types=[ model_types=model_types,
ModelType.WHISPER, default_model=self.model.model_type if self.model is not None else None,
ModelType.WHISPER_CPP,
ModelType.FASTER_WHISPER,
],
default_model=self.model.model_type,
parent=self, parent=self,
) )
model_type_combo_box.changed.connect(self.on_model_type_changed) model_type_combo_box.changed.connect(self.on_model_type_changed)
@ -119,6 +128,10 @@ class ModelsPreferencesWidget(QWidget):
self.model_list_widget.expandToDepth(2) self.model_list_widget.expandToDepth(2)
self.model_list_widget.setHeaderHidden(True) self.model_list_widget.setHeaderHidden(True)
self.model_list_widget.setAlternatingRowColors(True) self.model_list_widget.setAlternatingRowColors(True)
if self.model is None:
return
for model_size in WhisperModelSize: for model_size in WhisperModelSize:
model = TranscriptionModel( model = TranscriptionModel(
model_type=self.model.model_type, whisper_model_size=model_size model_type=self.model.model_type, whisper_model_size=model_size

View file

@ -12,14 +12,12 @@ from buzz.model_loader import (
ModelDownloader, ModelDownloader,
TranscriptionModel, TranscriptionModel,
ModelType, ModelType,
WhisperModelSize,
) )
from buzz.recording import RecordingAmplitudeListener from buzz.recording import RecordingAmplitudeListener
from buzz.recording_transcriber import RecordingTranscriber from buzz.recording_transcriber import RecordingTranscriber
from buzz.settings.settings import Settings from buzz.settings.settings import Settings
from buzz.transcriber import ( from buzz.transcriber import (
TranscriptionOptions, TranscriptionOptions,
LOADED_WHISPER_DLL,
Task, Task,
DEFAULT_WHISPER_TEMPERATURE, DEFAULT_WHISPER_TEMPERATURE,
) )
@ -65,15 +63,20 @@ class RecordingTranscriberWidget(QWidget):
default_language = self.settings.value( default_language = self.settings.value(
key=Settings.Key.RECORDING_TRANSCRIBER_LANGUAGE, default_value="" key=Settings.Key.RECORDING_TRANSCRIBER_LANGUAGE, default_value=""
) )
model_types = [
model_type
for model_type in ModelType
if model_type.is_available() and model_type.supports_recording()
]
default_model: Optional[TranscriptionModel] = None
if len(model_types) > 0:
default_model = TranscriptionModel(model_type=model_types[0])
self.transcription_options = TranscriptionOptions( self.transcription_options = TranscriptionOptions(
model=self.settings.value( model=self.settings.value(
key=Settings.Key.RECORDING_TRANSCRIBER_MODEL, key=Settings.Key.RECORDING_TRANSCRIBER_MODEL,
default_value=TranscriptionModel( default_value=default_model,
model_type=ModelType.WHISPER_CPP
if LOADED_WHISPER_DLL
else ModelType.WHISPER,
whisper_model_size=WhisperModelSize.TINY,
),
), ),
task=self.settings.value( task=self.settings.value(
key=Settings.Key.RECORDING_TRANSCRIBER_TASK, key=Settings.Key.RECORDING_TRANSCRIBER_TASK,
@ -102,12 +105,7 @@ class RecordingTranscriberWidget(QWidget):
transcription_options_group_box = TranscriptionOptionsGroupBox( transcription_options_group_box = TranscriptionOptionsGroupBox(
default_transcription_options=self.transcription_options, default_transcription_options=self.transcription_options,
# Live transcription with OpenAI Whisper API not implemented model_types=model_types,
model_types=[
model_type
for model_type in ModelType
if model_type is not ModelType.OPEN_AI_WHISPER_API
],
parent=self, parent=self,
) )
transcription_options_group_box.transcription_options_changed.connect( transcription_options_group_box.transcription_options_changed.connect(

235
poetry.lock generated
View file

@ -161,21 +161,22 @@ files = [
[[package]] [[package]]
name = "attrs" name = "attrs"
version = "23.1.0" version = "23.2.0"
description = "Classes Without Boilerplate" description = "Classes Without Boilerplate"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{ file = "attrs-23.1.0-py3-none-any.whl", hash = "sha256:1f28b4522cdc2fb4256ac1a020c78acf9cba2c6b461ccd2c126f3aa8e8335d04" }, { file = "attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1" },
{ file = "attrs-23.1.0.tar.gz", hash = "sha256:6279836d581513a26f1bf235f9acd333bc9115683f14f7e8fae46c98fc50e015" }, { file = "attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30" },
] ]
[package.extras] [package.extras]
cov = ["attrs[tests]", "coverage[toml] (>=5.3)"] cov = ["attrs[tests]", "coverage[toml] (>=5.3)"]
dev = ["attrs[docs,tests]", "pre-commit"] dev = ["attrs[tests]", "pre-commit"]
docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"] docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope-interface"]
tests = ["attrs[tests-no-zope]", "zope-interface"] tests = ["attrs[tests-no-zope]", "zope-interface"]
tests-no-zope = ["cloudpickle", "hypothesis", "mypy (>=1.1.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.6)", "pytest-mypy-plugins"]
tests-no-zope = ["attrs[tests-mypy]", "cloudpickle", "hypothesis", "pympler", "pytest (>=4.3.0)", "pytest-xdist[psutil]"]
[[package]] [[package]]
name = "autopep8" name = "autopep8"
@ -489,63 +490,63 @@ cron = ["capturer (>=2.4)"]
[[package]] [[package]]
name = "coverage" name = "coverage"
version = "7.3.4" version = "7.4.0"
description = "Code coverage measurement for Python" description = "Code coverage measurement for Python"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
files = [ files = [
{ file = "coverage-7.3.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aff2bd3d585969cc4486bfc69655e862028b689404563e6b549e6a8244f226df" }, { file = "coverage-7.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36b0ea8ab20d6a7564e89cb6135920bc9188fb5f1f7152e94e8300b7b189441a" },
{ file = "coverage-7.3.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4353923f38d752ecfbd3f1f20bf7a3546993ae5ecd7c07fd2f25d40b4e54571" }, { file = "coverage-7.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0676cd0ba581e514b7f726495ea75aba3eb20899d824636c6f59b0ed2f88c471" },
{ file = "coverage-7.3.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea473c37872f0159294f7073f3fa72f68b03a129799f3533b2bb44d5e9fa4f82" }, { file = "coverage-7.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ca5c71a5a1765a0f8f88022c52b6b8be740e512980362f7fdbb03725a0d6b9" },
{ file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5214362abf26e254d749fc0c18af4c57b532a4bfde1a057565616dd3b8d7cc94" }, { file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7c97726520f784239f6c62506bc70e48d01ae71e9da128259d61ca5e9788516" },
{ file = "coverage-7.3.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f99b7d3f7a7adfa3d11e3a48d1a91bb65739555dd6a0d3fa68aa5852d962e5b1" }, { file = "coverage-7.4.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:815ac2d0f3398a14286dc2cea223a6f338109f9ecf39a71160cd1628786bc6f5" },
{ file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:74397a1263275bea9d736572d4cf338efaade2de9ff759f9c26bcdceb383bb49" }, { file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:80b5ee39b7f0131ebec7968baa9b2309eddb35b8403d1869e08f024efd883566" },
{ file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:f154bd866318185ef5865ace5be3ac047b6d1cc0aeecf53bf83fe846f4384d5d" }, { file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:5b2ccb7548a0b65974860a78c9ffe1173cfb5877460e5a229238d985565574ae" },
{ file = "coverage-7.3.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:e0d84099ea7cba9ff467f9c6f747e3fc3906e2aadac1ce7b41add72e8d0a3712" }, { file = "coverage-7.4.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:995ea5c48c4ebfd898eacb098164b3cc826ba273b3049e4a889658548e321b43" },
{ file = "coverage-7.3.4-cp310-cp310-win32.whl", hash = "sha256:3f477fb8a56e0c603587b8278d9dbd32e54bcc2922d62405f65574bd76eba78a" }, { file = "coverage-7.4.0-cp310-cp310-win32.whl", hash = "sha256:79287fd95585ed36e83182794a57a46aeae0b64ca53929d1176db56aacc83451" },
{ file = "coverage-7.3.4-cp310-cp310-win_amd64.whl", hash = "sha256:c75738ce13d257efbb6633a049fb2ed8e87e2e6c2e906c52d1093a4d08d67c6b" }, { file = "coverage-7.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:5b14b4f8760006bfdb6e08667af7bc2d8d9bfdb648351915315ea17645347137" },
{ file = "coverage-7.3.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:997aa14b3e014339d8101b9886063c5d06238848905d9ad6c6eabe533440a9a7" }, { file = "coverage-7.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:04387a4a6ecb330c1878907ce0dc04078ea72a869263e53c72a1ba5bbdf380ca" },
{ file = "coverage-7.3.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8a9c5bc5db3eb4cd55ecb8397d8e9b70247904f8eca718cc53c12dcc98e59fc8" }, { file = "coverage-7.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ea81d8f9691bb53f4fb4db603203029643caffc82bf998ab5b59ca05560f4c06" },
{ file = "coverage-7.3.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27ee94f088397d1feea3cb524e4313ff0410ead7d968029ecc4bc5a7e1d34fbf" }, { file = "coverage-7.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74775198b702868ec2d058cb92720a3c5a9177296f75bd97317c787daf711505" },
{ file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ce03e25e18dd9bf44723e83bc202114817f3367789052dc9e5b5c79f40cf59d" }, { file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:76f03940f9973bfaee8cfba70ac991825611b9aac047e5c80d499a44079ec0bc" },
{ file = "coverage-7.3.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85072e99474d894e5df582faec04abe137b28972d5e466999bc64fc37f564a03" }, { file = "coverage-7.4.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:485e9f897cf4856a65a57c7f6ea3dc0d4e6c076c87311d4bc003f82cfe199d25" },
{ file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a877810ef918d0d345b783fc569608804f3ed2507bf32f14f652e4eaf5d8f8d0" }, { file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6ae8c9d301207e6856865867d762a4b6fd379c714fcc0607a84b92ee63feff70" },
{ file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9ac17b94ab4ca66cf803f2b22d47e392f0977f9da838bf71d1f0db6c32893cb9" }, { file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bf477c355274a72435ceb140dc42de0dc1e1e0bf6e97195be30487d8eaaf1a09" },
{ file = "coverage-7.3.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:36d75ef2acab74dc948d0b537ef021306796da551e8ac8b467810911000af66a" }, { file = "coverage-7.4.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:83c2dda2666fe32332f8e87481eed056c8b4d163fe18ecc690b02802d36a4d26" },
{ file = "coverage-7.3.4-cp311-cp311-win32.whl", hash = "sha256:47ee56c2cd445ea35a8cc3ad5c8134cb9bece3a5cb50bb8265514208d0a65928" }, { file = "coverage-7.4.0-cp311-cp311-win32.whl", hash = "sha256:697d1317e5290a313ef0d369650cfee1a114abb6021fa239ca12b4849ebbd614" },
{ file = "coverage-7.3.4-cp311-cp311-win_amd64.whl", hash = "sha256:11ab62d0ce5d9324915726f611f511a761efcca970bd49d876cf831b4de65be5" }, { file = "coverage-7.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:26776ff6c711d9d835557ee453082025d871e30b3fd6c27fcef14733f67f0590" },
{ file = "coverage-7.3.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:33e63c578f4acce1b6cd292a66bc30164495010f1091d4b7529d014845cd9bee" }, { file = "coverage-7.4.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:13eaf476ec3e883fe3e5fe3707caeb88268a06284484a3daf8250259ef1ba143" },
{ file = "coverage-7.3.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:782693b817218169bfeb9b9ba7f4a9f242764e180ac9589b45112571f32a0ba6" }, { file = "coverage-7.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846f52f46e212affb5bcf131c952fb4075b55aae6b61adc9856222df89cbe3e2" },
{ file = "coverage-7.3.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c4277ddaad9293454da19121c59f2d850f16bcb27f71f89a5c4836906eb35ef" }, { file = "coverage-7.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26f66da8695719ccf90e794ed567a1549bb2644a706b41e9f6eae6816b398c4a" },
{ file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d892a19ae24b9801771a5a989fb3e850bd1ad2e2b6e83e949c65e8f37bc67a1" }, { file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:164fdcc3246c69a6526a59b744b62e303039a81e42cfbbdc171c91a8cc2f9446" },
{ file = "coverage-7.3.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3024ec1b3a221bd10b5d87337d0373c2bcaf7afd86d42081afe39b3e1820323b" }, { file = "coverage-7.4.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:316543f71025a6565677d84bc4df2114e9b6a615aa39fb165d697dba06a54af9" },
{ file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:a1c3e9d2bbd6f3f79cfecd6f20854f4dc0c6e0ec317df2b265266d0dc06535f1" }, { file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:bb1de682da0b824411e00a0d4da5a784ec6496b6850fdf8c865c1d68c0e318dd" },
{ file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:e91029d7f151d8bf5ab7d8bfe2c3dbefd239759d642b211a677bc0709c9fdb96" }, { file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0e8d06778e8fbffccfe96331a3946237f87b1e1d359d7fbe8b06b96c95a5407a" },
{ file = "coverage-7.3.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:6879fe41c60080aa4bb59703a526c54e0412b77e649a0d06a61782ecf0853ee1" }, { file = "coverage-7.4.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a56de34db7b7ff77056a37aedded01b2b98b508227d2d0979d373a9b5d353daa" },
{ file = "coverage-7.3.4-cp312-cp312-win32.whl", hash = "sha256:fd2f8a641f8f193968afdc8fd1697e602e199931012b574194052d132a79be13" }, { file = "coverage-7.4.0-cp312-cp312-win32.whl", hash = "sha256:51456e6fa099a8d9d91497202d9563a320513fcf59f33991b0661a4a6f2ad450" },
{ file = "coverage-7.3.4-cp312-cp312-win_amd64.whl", hash = "sha256:d1d0ce6c6947a3a4aa5479bebceff2c807b9f3b529b637e2b33dea4468d75fc7" }, { file = "coverage-7.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:cd3c1e4cb2ff0083758f09be0f77402e1bdf704adb7f89108007300a6da587d0" },
{ file = "coverage-7.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36797b3625d1da885b369bdaaa3b0d9fb8865caed3c2b8230afaa6005434aa2f" }, { file = "coverage-7.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e9d1bf53c4c8de58d22e0e956a79a5b37f754ed1ffdbf1a260d9dcfa2d8a325e" },
{ file = "coverage-7.3.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bfed0ec4b419fbc807dec417c401499ea869436910e1ca524cfb4f81cf3f60e7" }, { file = "coverage-7.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:109f5985182b6b81fe33323ab4707011875198c41964f014579cf82cebf2bb85" },
{ file = "coverage-7.3.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f97ff5a9fc2ca47f3383482858dd2cb8ddbf7514427eecf5aa5f7992d0571429" }, { file = "coverage-7.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3cc9d4bc55de8003663ec94c2f215d12d42ceea128da8f0f4036235a119c88ac" },
{ file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:607b6c6b35aa49defaebf4526729bd5238bc36fe3ef1a417d9839e1d96ee1e4c" }, { file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cc6d65b21c219ec2072c1293c505cf36e4e913a3f936d80028993dd73c7906b1" },
{ file = "coverage-7.3.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8e258dcc335055ab59fe79f1dec217d9fb0cdace103d6b5c6df6b75915e7959" }, { file = "coverage-7.4.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a10a4920def78bbfff4eff8a05c51be03e42f1c3735be42d851f199144897ba" },
{ file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:a02ac7c51819702b384fea5ee033a7c202f732a2a2f1fe6c41e3d4019828c8d3" }, { file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b8e99f06160602bc64da35158bb76c73522a4010f0649be44a4e167ff8555952" },
{ file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b710869a15b8caf02e31d16487a931dbe78335462a122c8603bb9bd401ff6fb2" }, { file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:7d360587e64d006402b7116623cebf9d48893329ef035278969fa3bbf75b697e" },
{ file = "coverage-7.3.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:c6a23ae9348a7a92e7f750f9b7e828448e428e99c24616dec93a0720342f241d" }, { file = "coverage-7.4.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:29f3abe810930311c0b5d1a7140f6395369c3db1be68345638c33eec07535105" },
{ file = "coverage-7.3.4-cp38-cp38-win32.whl", hash = "sha256:758ebaf74578b73f727acc4e8ab4b16ab6f22a5ffd7dd254e5946aba42a4ce76" }, { file = "coverage-7.4.0-cp38-cp38-win32.whl", hash = "sha256:5040148f4ec43644702e7b16ca864c5314ccb8ee0751ef617d49aa0e2d6bf4f2" },
{ file = "coverage-7.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:309ed6a559bc942b7cc721f2976326efbfe81fc2b8f601c722bff927328507dc" }, { file = "coverage-7.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:9864463c1c2f9cb3b5db2cf1ff475eed2f0b4285c2aaf4d357b69959941aa555" },
{ file = "coverage-7.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:aefbb29dc56317a4fcb2f3857d5bce9b881038ed7e5aa5d3bcab25bd23f57328" }, { file = "coverage-7.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:936d38794044b26c99d3dd004d8af0035ac535b92090f7f2bb5aa9c8e2f5cd42" },
{ file = "coverage-7.3.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:183c16173a70caf92e2dfcfe7c7a576de6fa9edc4119b8e13f91db7ca33a7923" }, { file = "coverage-7.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:799c8f873794a08cdf216aa5d0531c6a3747793b70c53f70e98259720a6fe2d7" },
{ file = "coverage-7.3.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a4184dcbe4f98d86470273e758f1d24191ca095412e4335ff27b417291f5964" }, { file = "coverage-7.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e7defbb9737274023e2d7af02cac77043c86ce88a907c58f42b580a97d5bcca9" },
{ file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93698ac0995516ccdca55342599a1463ed2e2d8942316da31686d4d614597ef9" }, { file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a1526d265743fb49363974b7aa8d5899ff64ee07df47dd8d3e37dcc0818f09ed" },
{ file = "coverage-7.3.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb220b3596358a86361139edce40d97da7458412d412e1e10c8e1970ee8c09ab" }, { file = "coverage-7.4.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf635a52fc1ea401baf88843ae8708591aa4adff875e5c23220de43b1ccf575c" },
{ file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d5b14abde6f8d969e6b9dd8c7a013d9a2b52af1235fe7bebef25ad5c8f47fa18" }, { file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:756ded44f47f330666843b5781be126ab57bb57c22adbb07d83f6b519783b870" },
{ file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:610afaf929dc0e09a5eef6981edb6a57a46b7eceff151947b836d869d6d567c1" }, { file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:0eb3c2f32dabe3a4aaf6441dde94f35687224dfd7eb2a7f47f3fd9428e421058" },
{ file = "coverage-7.3.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d6ed790728fb71e6b8247bd28e77e99d0c276dff952389b5388169b8ca7b1c28" }, { file = "coverage-7.4.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:bfd5db349d15c08311702611f3dccbef4b4e2ec148fcc636cf8739519b4a5c0f" },
{ file = "coverage-7.3.4-cp39-cp39-win32.whl", hash = "sha256:c15fdfb141fcf6a900e68bfa35689e1256a670db32b96e7a931cab4a0e1600e5" }, { file = "coverage-7.4.0-cp39-cp39-win32.whl", hash = "sha256:53d7d9158ee03956e0eadac38dfa1ec8068431ef8058fe6447043db1fb40d932" },
{ file = "coverage-7.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:38d0b307c4d99a7aca4e00cad4311b7c51b7ac38fb7dea2abe0d182dd4008e05" }, { file = "coverage-7.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:cfd2a8b6b0d8e66e944d47cdec2f47c48fef2ba2f2dff5a9a75757f64172857e" },
{ file = "coverage-7.3.4-pp38.pp39.pp310-none-any.whl", hash = "sha256:b1e0f25ae99cf247abfb3f0fac7ae25739e4cd96bf1afa3537827c576b4847e5" }, { file = "coverage-7.4.0-pp38.pp39.pp310-none-any.whl", hash = "sha256:c530833afc4707fe48524a44844493f36d8727f04dcce91fb978c414a8556cc6" },
{ file = "coverage-7.3.4.tar.gz", hash = "sha256:020d56d2da5bc22a0e00a5b0d54597ee91ad72446fa4cf1b97c35022f6b6dbf0" }, { file = "coverage-7.4.0.tar.gz", hash = "sha256:707c0f58cb1712b8809ece32b68996ee1e609f71bd14615bd8f87a1293cb610e" },
] ]
[package.dependencies] [package.dependencies]
@ -1391,47 +1392,47 @@ numpy = ">=1.22,<1.27"
[[package]] [[package]]
name = "numpy" name = "numpy"
version = "1.26.2" version = "1.26.3"
description = "Fundamental package for array computing in Python" description = "Fundamental package for array computing in Python"
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
files = [ files = [
{ file = "numpy-1.26.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3703fc9258a4a122d17043e57b35e5ef1c5a5837c3db8be396c82e04c1cf9b0f" }, { file = "numpy-1.26.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:806dd64230dbbfaca8a27faa64e2f414bf1c6622ab78cc4264f7f5f028fee3bf" },
{ file = "numpy-1.26.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cc392fdcbd21d4be6ae1bb4475a03ce3b025cd49a9be5345d76d7585aea69440" }, { file = "numpy-1.26.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02f98011ba4ab17f46f80f7f8f1c291ee7d855fcef0a5a98db80767a468c85cd" },
{ file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:36340109af8da8805d8851ef1d74761b3b88e81a9bd80b290bbfed61bd2b4f75" }, { file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d45b3ec2faed4baca41c76617fcdcfa4f684ff7a151ce6fc78ad3b6e85af0a6" },
{ file = "numpy-1.26.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bcc008217145b3d77abd3e4d5ef586e3bdfba8fe17940769f8aa09b99e856c00" }, { file = "numpy-1.26.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bdd2b45bf079d9ad90377048e2747a0c82351989a2165821f0c96831b4a2a54b" },
{ file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:3ced40d4e9e18242f70dd02d739e44698df3dcb010d31f495ff00a31ef6014fe" }, { file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:211ddd1e94817ed2d175b60b6374120244a4dd2287f4ece45d49228b4d529178" },
{ file = "numpy-1.26.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b272d4cecc32c9e19911891446b72e986157e6a1809b7b56518b4f3755267523" }, { file = "numpy-1.26.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b1240f767f69d7c4c8a29adde2310b871153df9b26b5cb2b54a561ac85146485" },
{ file = "numpy-1.26.2-cp310-cp310-win32.whl", hash = "sha256:22f8fc02fdbc829e7a8c578dd8d2e15a9074b630d4da29cda483337e300e3ee9" }, { file = "numpy-1.26.3-cp310-cp310-win32.whl", hash = "sha256:21a9484e75ad018974a2fdaa216524d64ed4212e418e0a551a2d83403b0531d3" },
{ file = "numpy-1.26.2-cp310-cp310-win_amd64.whl", hash = "sha256:26c9d33f8e8b846d5a65dd068c14e04018d05533b348d9eaeef6c1bd787f9919" }, { file = "numpy-1.26.3-cp310-cp310-win_amd64.whl", hash = "sha256:9e1591f6ae98bcfac2a4bbf9221c0b92ab49762228f38287f6eeb5f3f55905ce" },
{ file = "numpy-1.26.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b96e7b9c624ef3ae2ae0e04fa9b460f6b9f17ad8b4bec6d7756510f1f6c0c841" }, { file = "numpy-1.26.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b831295e5472954104ecb46cd98c08b98b49c69fdb7040483aff799a755a7374" },
{ file = "numpy-1.26.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aa18428111fb9a591d7a9cc1b48150097ba6a7e8299fb56bdf574df650e7d1f1" }, { file = "numpy-1.26.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9e87562b91f68dd8b1c39149d0323b42e0082db7ddb8e934ab4c292094d575d6" },
{ file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06fa1ed84aa60ea6ef9f91ba57b5ed963c3729534e6e54055fc151fad0423f0a" }, { file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c66d6fec467e8c0f975818c1796d25c53521124b7cfb760114be0abad53a0a2" },
{ file = "numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:96ca5482c3dbdd051bcd1fce8034603d6ebfc125a7bd59f55b40d8f5d246832b" }, { file = "numpy-1.26.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f25e2811a9c932e43943a2615e65fc487a0b6b49218899e62e426e7f0a57eeda" },
{ file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:854ab91a2906ef29dc3925a064fcd365c7b4da743f84b123002f6139bcb3f8a7" }, { file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:af36e0aa45e25c9f57bf684b1175e59ea05d9a7d3e8e87b7ae1a1da246f2767e" },
{ file = "numpy-1.26.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:f43740ab089277d403aa07567be138fc2a89d4d9892d113b76153e0e412409f8" }, { file = "numpy-1.26.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:51c7f1b344f302067b02e0f5b5d2daa9ed4a721cf49f070280ac202738ea7f00" },
{ file = "numpy-1.26.2-cp311-cp311-win32.whl", hash = "sha256:a2bbc29fcb1771cd7b7425f98b05307776a6baf43035d3b80c4b0f29e9545186" }, { file = "numpy-1.26.3-cp311-cp311-win32.whl", hash = "sha256:7ca4f24341df071877849eb2034948459ce3a07915c2734f1abb4018d9c49d7b" },
{ file = "numpy-1.26.2-cp311-cp311-win_amd64.whl", hash = "sha256:2b3fca8a5b00184828d12b073af4d0fc5fdd94b1632c2477526f6bd7842d700d" }, { file = "numpy-1.26.3-cp311-cp311-win_amd64.whl", hash = "sha256:39763aee6dfdd4878032361b30b2b12593fb445ddb66bbac802e2113eb8a6ac4" },
{ file = "numpy-1.26.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a4cd6ed4a339c21f1d1b0fdf13426cb3b284555c27ac2f156dfdaaa7e16bfab0" }, { file = "numpy-1.26.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:a7081fd19a6d573e1a05e600c82a1c421011db7935ed0d5c483e9dd96b99cf13" },
{ file = "numpy-1.26.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5d5244aabd6ed7f312268b9247be47343a654ebea52a60f002dc70c769048e75" }, { file = "numpy-1.26.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:12c70ac274b32bc00c7f61b515126c9205323703abb99cd41836e8125ea0043e" },
{ file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a3cdb4d9c70e6b8c0814239ead47da00934666f668426fc6e94cce869e13fd7" }, { file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f784e13e598e9594750b2ef6729bcd5a47f6cfe4a12cca13def35e06d8163e3" },
{ file = "numpy-1.26.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa317b2325f7aa0a9471663e6093c210cb2ae9c0ad824732b307d2c51983d5b6" }, { file = "numpy-1.26.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5f24750ef94d56ce6e33e4019a8a4d68cfdb1ef661a52cdaee628a56d2437419" },
{ file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:174a8880739c16c925799c018f3f55b8130c1f7c8e75ab0a6fa9d41cab092fd6" }, { file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:77810ef29e0fb1d289d225cabb9ee6cf4d11978a00bb99f7f8ec2132a84e0166" },
{ file = "numpy-1.26.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f79b231bf5c16b1f39c7f4875e1ded36abee1591e98742b05d8a0fb55d8a3eec" }, { file = "numpy-1.26.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8ed07a90f5450d99dad60d3799f9c03c6566709bd53b497eb9ccad9a55867f36" },
{ file = "numpy-1.26.2-cp312-cp312-win32.whl", hash = "sha256:4a06263321dfd3598cacb252f51e521a8cb4b6df471bb12a7ee5cbab20ea9167" }, { file = "numpy-1.26.3-cp312-cp312-win32.whl", hash = "sha256:f73497e8c38295aaa4741bdfa4fda1a5aedda5473074369eca10626835445511" },
{ file = "numpy-1.26.2-cp312-cp312-win_amd64.whl", hash = "sha256:b04f5dc6b3efdaab541f7857351aac359e6ae3c126e2edb376929bd3b7f92d7e" }, { file = "numpy-1.26.3-cp312-cp312-win_amd64.whl", hash = "sha256:da4b0c6c699a0ad73c810736303f7fbae483bcb012e38d7eb06a5e3b432c981b" },
{ file = "numpy-1.26.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4eb8df4bf8d3d90d091e0146f6c28492b0be84da3e409ebef54349f71ed271ef" }, { file = "numpy-1.26.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1666f634cb3c80ccbd77ec97bc17337718f56d6658acf5d3b906ca03e90ce87f" },
{ file = "numpy-1.26.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1a13860fdcd95de7cf58bd6f8bc5a5ef81c0b0625eb2c9a783948847abbef2c2" }, { file = "numpy-1.26.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:18c3319a7d39b2c6a9e3bb75aab2304ab79a811ac0168a671a62e6346c29b03f" },
{ file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:64308ebc366a8ed63fd0bf426b6a9468060962f1a4339ab1074c228fa6ade8e3" }, { file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b7e807d6888da0db6e7e75838444d62495e2b588b99e90dd80c3459594e857b" },
{ file = "numpy-1.26.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf8aab04a2c0e859da118f0b38617e5ee65d75b83795055fb66c0d5e9e9b818" }, { file = "numpy-1.26.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4d362e17bcb0011738c2d83e0a65ea8ce627057b2fdda37678f4374a382a137" },
{ file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d73a3abcac238250091b11caef9ad12413dab01669511779bc9b29261dd50210" }, { file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b8c275f0ae90069496068c714387b4a0eba5d531aace269559ff2b43655edd58" },
{ file = "numpy-1.26.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b361d369fc7e5e1714cf827b731ca32bff8d411212fccd29ad98ad622449cc36" }, { file = "numpy-1.26.3-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:cc0743f0302b94f397a4a65a660d4cd24267439eb16493fb3caad2e4389bccbb" },
{ file = "numpy-1.26.2-cp39-cp39-win32.whl", hash = "sha256:bd3f0091e845164a20bd5a326860c840fe2af79fa12e0469a12768a3ec578d80" }, { file = "numpy-1.26.3-cp39-cp39-win32.whl", hash = "sha256:9bc6d1a7f8cedd519c4b7b1156d98e051b726bf160715b769106661d567b3f03" },
{ file = "numpy-1.26.2-cp39-cp39-win_amd64.whl", hash = "sha256:2beef57fb031dcc0dc8fa4fe297a742027b954949cabb52a2a376c144e5e6060" }, { file = "numpy-1.26.3-cp39-cp39-win_amd64.whl", hash = "sha256:867e3644e208c8922a3be26fc6bbf112a035f50f0a86497f98f228c50c607bb2" },
{ file = "numpy-1.26.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1cc3d5029a30fb5f06704ad6b23b35e11309491c999838c31f124fee32107c79" }, { file = "numpy-1.26.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3c67423b3703f8fbd90f5adaa37f85b5794d3366948efe9a5190a5f3a83fc34e" },
{ file = "numpy-1.26.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94cc3c222bb9fb5a12e334d0479b97bb2df446fbe622b470928f5284ffca3f8d" }, { file = "numpy-1.26.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46f47ee566d98849323f01b349d58f2557f02167ee301e5e28809a8c0e27a2d0" },
{ file = "numpy-1.26.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:fe6b44fb8fcdf7eda4ef4461b97b3f63c466b27ab151bec2366db8b197387841" }, { file = "numpy-1.26.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a8474703bffc65ca15853d5fd4d06b18138ae90c17c8d12169968e998e448bb5" },
{ file = "numpy-1.26.2.tar.gz", hash = "sha256:f65738447676ab5777f11e6bbbdb8ce11b785e105f690bc45966574816b6d3ea" }, { file = "numpy-1.26.3.tar.gz", hash = "sha256:697df43e2b6310ecc9d95f05d5ef20eacc09c7c4ecc9da3f235d39e71b7da1e4" },
] ]
[[package]] [[package]]
@ -1792,13 +1793,13 @@ files = [
[[package]] [[package]]
name = "pytest" name = "pytest"
version = "7.4.3" version = "7.4.4"
description = "pytest: simple powerful testing with Python" description = "pytest: simple powerful testing with Python"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{ file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac" }, { file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8" },
{ file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5" }, { file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280" },
] ]
[package.dependencies] [package.dependencies]
@ -2089,28 +2090,28 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"]
[[package]] [[package]]
name = "ruff" name = "ruff"
version = "0.1.9" version = "0.1.11"
description = "An extremely fast Python linter and code formatter, written in Rust." description = "An extremely fast Python linter and code formatter, written in Rust."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
files = [ files = [
{ file = "ruff-0.1.9-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:e6a212f436122ac73df851f0cf006e0c6612fe6f9c864ed17ebefce0eff6a5fd" }, { file = "ruff-0.1.11-py3-none-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a7f772696b4cdc0a3b2e527fc3c7ccc41cdcb98f5c80fdd4f2b8c50eb1458196" },
{ file = "ruff-0.1.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:28d920e319783d5303333630dae46ecc80b7ba294aeffedf946a02ac0b7cc3db" }, { file = "ruff-0.1.11-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:934832f6ed9b34a7d5feea58972635c2039c7a3b434fe5ba2ce015064cb6e955" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:104aa9b5e12cb755d9dce698ab1b97726b83012487af415a4512fedd38b1459e" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ea0d3e950e394c4b332bcdd112aa566010a9f9c95814844a7468325290aabfd9" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1e63bf5a4a91971082a4768a0aba9383c12392d0d6f1e2be2248c1f9054a20da" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9bd4025b9c5b429a48280785a2b71d479798a69f5c2919e7d274c5f4b32c3607" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4d0738917c203246f3e275b37006faa3aa96c828b284ebfe3e99a8cb413c8c4b" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e1ad00662305dcb1e987f5ec214d31f7d6a062cae3e74c1cbccef15afd96611d" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:69dac82d63a50df2ab0906d97a01549f814b16bc806deeac4f064ff95c47ddf5" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:4b077ce83f47dd6bea1991af08b140e8b8339f0ba8cb9b7a484c30ebab18a23f" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2aec598fb65084e41a9c5d4b95726173768a62055aafb07b4eff976bac72a592" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c4a88efecec23c37b11076fe676e15c6cdb1271a38f2b415e381e87fe4517f18" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:744dfe4b35470fa3820d5fe45758aace6269c578f7ddc43d447868cfe5078bcb" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5b25093dad3b055667730a9b491129c42d45e11cdb7043b702e97125bcec48a1" },
{ file = "ruff-0.1.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:479ca4250cab30f9218b2e563adc362bd6ae6343df7c7b5a7865300a5156d5a6" }, { file = "ruff-0.1.11-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:231d8fb11b2cc7c0366a326a66dafc6ad449d7fcdbc268497ee47e1334f66f77" },
{ file = "ruff-0.1.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:aa8344310f1ae79af9ccd6e4b32749e93cddc078f9b5ccd0e45bd76a6d2e8bb6" }, { file = "ruff-0.1.11-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:09c415716884950080921dd6237767e52e227e397e2008e2bed410117679975b" },
{ file = "ruff-0.1.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:837c739729394df98f342319f5136f33c65286b28b6b70a87c28f59354ec939b" }, { file = "ruff-0.1.11-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:0f58948c6d212a6b8d41cd59e349751018797ce1727f961c2fa755ad6208ba45" },
{ file = "ruff-0.1.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:e6837202c2859b9f22e43cb01992373c2dbfeae5c0c91ad691a4a2e725392464" }, { file = "ruff-0.1.11-py3-none-musllinux_1_2_i686.whl", hash = "sha256:190a566c8f766c37074d99640cd9ca3da11d8deae2deae7c9505e68a4a30f740" },
{ file = "ruff-0.1.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:331aae2cd4a0554667ac683243b151c74bd60e78fb08c3c2a4ac05ee1e606a39" }, { file = "ruff-0.1.11-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:6464289bd67b2344d2a5d9158d5eb81025258f169e69a46b741b396ffb0cda95" },
{ file = "ruff-0.1.9-py3-none-win32.whl", hash = "sha256:8151425a60878e66f23ad47da39265fc2fad42aed06fb0a01130e967a7a064f4" }, { file = "ruff-0.1.11-py3-none-win32.whl", hash = "sha256:9b8f397902f92bc2e70fb6bebfa2139008dc72ae5177e66c383fa5426cb0bf2c" },
{ file = "ruff-0.1.9-py3-none-win_amd64.whl", hash = "sha256:c497d769164df522fdaf54c6eba93f397342fe4ca2123a2e014a5b8fc7df81c7" }, { file = "ruff-0.1.11-py3-none-win_amd64.whl", hash = "sha256:eb85ee287b11f901037a6683b2374bb0ec82928c5cbc984f575d0437979c521a" },
{ file = "ruff-0.1.9-py3-none-win_arm64.whl", hash = "sha256:0e17f53bcbb4fff8292dfd84cf72d767b5e146f009cccd40c2fad27641f8a7a9" }, { file = "ruff-0.1.11-py3-none-win_arm64.whl", hash = "sha256:97ce4d752f964ba559c7023a86e5f8e97f026d511e48013987623915431c7ea9" },
{ file = "ruff-0.1.9.tar.gz", hash = "sha256:b041dee2734719ddbb4518f762c982f2e912e7f28b8ee4fe1dee0b15d1b6e800" }, { file = "ruff-0.1.11.tar.gz", hash = "sha256:f9d4d88cb6eeb4dfe20f9f0519bd2eaba8119bde87c3d5065c541dbae2b5a2cb" },
] ]
[[package]] [[package]]
@ -2769,4 +2770,4 @@ testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "p
[metadata] [metadata]
lock-version = "2.0" lock-version = "2.0"
python-versions = ">=3.9.13,<3.11" python-versions = ">=3.9.13,<3.11"
content-hash = "fbf6f74ef9a08a29eee546c598925e282607d282773bd9204ac9ee9c7aece129" content-hash = "7fa77e9810e1dfc8deb6d5df8ceada1267a6d832c6a0bbbf7d7d8e9363815369"

View file

@ -14,23 +14,29 @@ packages = [
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = ">=3.9.13,<3.11" python = ">=3.9.13,<3.11"
sounddevice = "^0.4.5" sounddevice = "^0.4.5"
torch = "1.12.1"
transformers = "~4.24.0"
appdirs = "^1.4.4" appdirs = "^1.4.4"
humanize = "^4.4.0" humanize = "^4.4.0"
PyQt6 = "^6.4.0" PyQt6 = "^6.4.0"
stable-ts = "1.0.2"
openai = "^0.27.1" openai = "^0.27.1"
faster-whisper = "^0.4.1"
keyring = "^23.13.1" keyring = "^23.13.1"
openai-whisper = "v20231106"
platformdirs = "^3.5.3" platformdirs = "^3.5.3"
dataclasses-json = "^0.5.9" dataclasses-json = "^0.5.9"
ffmpeg-python = "0.2.0" ffmpeg-python = "0.2.0"
numpy = "^1.21.2"
# Only install on non-Linux to prevent execstack errors
stable-ts = { version = "1.0.2", markers = "sys_platform != 'linux'" }
faster-whisper = { version = "^0.4.1", markers = "sys_platform != 'linux'" }
openai-whisper = { version = "v20231106", markers = "sys_platform != 'linux'" }
torch = { version = "1.12.1", markers = "sys_platform != 'linux'" }
transformers = { version = "~4.24.0", markers = "sys_platform != 'linux'" }
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
autopep8 = "^1.7.0" autopep8 = "^1.7.0"
pyinstaller = "^5.4.1" pyinstaller = "^5.4.1"
# Lock to 2023.11 to fix error in 2023.12:
# AttributeError: module 'dataclasses' has no attribute '__version__'
pyinstaller-hooks-contrib = "2023.11"
six = "^1.16.0" six = "^1.16.0"
pytest = "^7.1.3" pytest = "^7.1.3"
pytest-cov = "^4.0.0" pytest-cov = "^4.0.0"

View file

@ -22,7 +22,6 @@ from buzz.widgets.transcriber.hugging_face_search_line_edit import (
from buzz.widgets.transcriber.languages_combo_box import LanguagesComboBox from buzz.widgets.transcriber.languages_combo_box import LanguagesComboBox
from buzz.widgets.transcriber.temperature_validator import TemperatureValidator from buzz.widgets.transcriber.temperature_validator import TemperatureValidator
from buzz.widgets.about_dialog import AboutDialog from buzz.widgets.about_dialog import AboutDialog
from buzz.model_loader import ModelType
from buzz.settings.settings import Settings from buzz.settings.settings import Settings
from buzz.transcriber import ( from buzz.transcriber import (
TranscriptionOptions, TranscriptionOptions,
@ -246,7 +245,4 @@ class TestTranscriptionOptionsGroupBox:
widget.model_type_combo_box.setCurrentIndex(1) widget.model_type_combo_box.setCurrentIndex(1)
transcription_options: TranscriptionOptions = ( mock_transcription_options_changed.assert_called()
mock_transcription_options_changed.call_args[0][0]
)
assert transcription_options.model.model_type == ModelType.WHISPER_CPP

View file

@ -6,7 +6,8 @@ from unittest.mock import MagicMock
import numpy as np import numpy as np
import sounddevice import sounddevice
import whisper
from buzz import whisper_audio
mock_query_devices = [ mock_query_devices = [
{ {
@ -114,11 +115,11 @@ class MockInputStream(MagicMock):
self.thread.start() self.thread.start()
def target(self): def target(self):
sample_rate = whisper.audio.SAMPLE_RATE sample_rate = whisper_audio.SAMPLE_RATE
file_path = os.path.join( file_path = os.path.join(
os.path.dirname(__file__), "../testdata/whisper-french.mp3" os.path.dirname(__file__), "../testdata/whisper-french.mp3"
) )
audio = whisper.load_audio(file_path, sr=sample_rate) audio = whisper_audio.load_audio(file_path, sr=sample_rate)
chunk_duration_secs = 1 chunk_duration_secs = 1

View file

@ -1,95 +0,0 @@
import platform
from unittest.mock import Mock
import pytest
from buzz.model_loader import WhisperModelSize, ModelType, TranscriptionModel
from buzz.transcriber import (
FileTranscriptionOptions,
FileTranscriptionTask,
Task,
WhisperCppFileTranscriber,
TranscriptionOptions,
WhisperFileTranscriber,
FileTranscriber,
)
from tests.model_loader import get_model_path
def get_task(model: TranscriptionModel):
file_transcription_options = FileTranscriptionOptions(
file_paths=["testdata/whisper-french.mp3"]
)
transcription_options = TranscriptionOptions(
language="fr", task=Task.TRANSCRIBE, word_level_timings=False, model=model
)
model_path = get_model_path(transcription_options.model)
return FileTranscriptionTask(
file_path="testdata/audio-long.mp3",
transcription_options=transcription_options,
file_transcription_options=file_transcription_options,
model_path=model_path,
)
def transcribe(qtbot, transcriber: FileTranscriber):
mock_completed = Mock()
transcriber.completed.connect(mock_completed)
with qtbot.waitSignal(transcriber.completed, timeout=10 * 60 * 1000):
transcriber.run()
segments = mock_completed.call_args[0][0]
return segments
@pytest.mark.parametrize(
"transcriber",
[
pytest.param(
WhisperCppFileTranscriber(
task=(
get_task(
TranscriptionModel(
model_type=ModelType.WHISPER_CPP,
whisper_model_size=WhisperModelSize.TINY,
)
)
)
),
id="Whisper.cpp - Tiny",
),
pytest.param(
WhisperFileTranscriber(
task=(
get_task(
TranscriptionModel(
model_type=ModelType.WHISPER,
whisper_model_size=WhisperModelSize.TINY,
)
)
)
),
id="Whisper - Tiny",
),
pytest.param(
WhisperFileTranscriber(
task=(
get_task(
TranscriptionModel(
model_type=ModelType.FASTER_WHISPER,
whisper_model_size=WhisperModelSize.TINY,
)
)
)
),
id="Faster Whisper - Tiny",
marks=pytest.mark.skipif(
platform.system() == "Darwin",
reason="Error with libiomp5 already initialized on GH action runner: https://github.com/chidiwilliams/buzz/actions/runs/4657331262/jobs/8241832087",
),
),
],
)
def test_should_transcribe_and_benchmark(qtbot, benchmark, transcriber):
segments = benchmark(transcribe, qtbot, transcriber)
assert len(segments) > 0

View file

@ -3,6 +3,7 @@ import os
import pathlib import pathlib
import platform import platform
import shutil import shutil
import sys
import tempfile import tempfile
import time import time
from typing import List from typing import List
@ -141,11 +142,16 @@ class TestWhisperCppFileTranscriber:
) )
mock_progress = Mock(side_effect=lambda value: print("progress: ", value)) mock_progress = Mock(side_effect=lambda value: print("progress: ", value))
mock_completed = Mock() mock_completed = Mock()
mock_error = Mock()
transcriber.progress.connect(mock_progress) transcriber.progress.connect(mock_progress)
transcriber.completed.connect(mock_completed) transcriber.completed.connect(mock_completed)
with qtbot.waitSignal(transcriber.completed, timeout=10 * 60 * 1000): transcriber.error.connect(mock_error)
with qtbot.wait_signal(transcriber.completed, timeout=10 * 60 * 1000):
transcriber.run() transcriber.run()
mock_error.assert_not_called()
mock_progress.assert_called() mock_progress.assert_called()
segments = [ segments = [
segment segment
@ -307,6 +313,9 @@ class TestWhisperFileTranscriber:
), ),
], ],
) )
@pytest.mark.skipif(
sys.platform == "linux", reason="Avoid execstack errors on Snap"
)
def test_transcribe( def test_transcribe(
self, self,
qtbot: QtBot, qtbot: QtBot,
@ -356,6 +365,9 @@ class TestWhisperFileTranscriber:
assert len(segments[i].text) > 0 assert len(segments[i].text) > 0
logging.debug(f"{segments[i].start} {segments[i].end} {segments[i].text}") logging.debug(f"{segments[i].start} {segments[i].end} {segments[i].text}")
@pytest.mark.skipif(
sys.platform == "linux", reason="Avoid execstack errors on Snap"
)
def test_transcribe_from_folder_watch_source(self, qtbot): def test_transcribe_from_folder_watch_source(self, qtbot):
file_path = tempfile.mktemp(suffix=".mp3") file_path = tempfile.mktemp(suffix=".mp3")
shutil.copy("testdata/whisper-french.mp3", file_path) shutil.copy("testdata/whisper-french.mp3", file_path)

View file

@ -1,6 +1,11 @@
import sys
import pytest
from buzz.transformers_whisper import load_model from buzz.transformers_whisper import load_model
@pytest.mark.skipif(sys.platform == "linux", reason="Not supported on Linux")
class TestTransformersWhisper: class TestTransformersWhisper:
def test_should_transcribe(self): def test_should_transcribe(self):
model = load_model("openai/whisper-tiny") model = load_model("openai/whisper-tiny")

View file

@ -11,7 +11,6 @@ class TestFileTranscriberWidget:
widget = FileTranscriberWidget( widget = FileTranscriberWidget(
file_paths=["testdata/whisper-french.mp3"], file_paths=["testdata/whisper-french.mp3"],
default_output_file_name="", default_output_file_name="",
parent=None,
) )
qtbot.add_widget(widget) qtbot.add_widget(widget)
assert widget.windowTitle() == "whisper-french.mp3" assert widget.windowTitle() == "whisper-french.mp3"
@ -20,7 +19,6 @@ class TestFileTranscriberWidget:
widget = FileTranscriberWidget( widget = FileTranscriberWidget(
file_paths=["testdata/whisper-french.mp3"], file_paths=["testdata/whisper-french.mp3"],
default_output_file_name="", default_output_file_name="",
parent=None,
) )
qtbot.add_widget(widget) qtbot.add_widget(widget)

View file

@ -79,12 +79,11 @@ class TestMainWindow:
assert open_transcript_action.isEnabled() assert open_transcript_action.isEnabled()
window.close() window.close()
# @pytest.mark.skip(reason='Timing out or crashing')
def test_should_run_and_cancel_transcription_task(self, qtbot, tasks_cache): def test_should_run_and_cancel_transcription_task(self, qtbot, tasks_cache):
window = MainWindow(tasks_cache=tasks_cache) window = MainWindow(tasks_cache=tasks_cache)
qtbot.add_widget(window) qtbot.add_widget(window)
self._start_new_transcription(window) self._start_new_transcription(window, long_audio=True)
table_widget: QTableWidget = window.findChild(QTableWidget) table_widget: QTableWidget = window.findChild(QTableWidget)
@ -205,12 +204,16 @@ class TestMainWindow:
window.close() window.close()
@staticmethod @staticmethod
def _start_new_transcription(window: MainWindow): def _start_new_transcription(window: MainWindow, long_audio: bool = False):
with patch( with patch(
"PyQt6.QtWidgets.QFileDialog.getOpenFileNames" "PyQt6.QtWidgets.QFileDialog.getOpenFileNames"
) as open_file_names_mock: ) as open_file_names_mock:
open_file_names_mock.return_value = ( open_file_names_mock.return_value = (
[get_test_asset("whisper-french.mp3")], [
get_test_asset(
"audio-long.mp3" if long_audio else "whisper-french.mp3"
)
],
"", "",
) )
new_transcription_action = TestMainWindow._get_toolbar_action( new_transcription_action = TestMainWindow._get_toolbar_action(
@ -226,11 +229,18 @@ class TestMainWindow:
@staticmethod @staticmethod
def get_assert_task_status_callback( def get_assert_task_status_callback(
table_widget: QTableWidget, row_index: int, expected_status: str table_widget: QTableWidget,
row_index: int,
expected_status: str,
long_audio: bool = False,
): ):
def assert_task_status(): def assert_task_status():
assert table_widget.rowCount() > 0 assert table_widget.rowCount() > 0
assert table_widget.item(row_index, 1).text() == "whisper-french.mp3" assert (
table_widget.item(row_index, 1).text() == "audio-long.mp3"
if long_audio
else "whisper-french.mp3"
)
assert expected_status in table_widget.item(row_index, 4).text() assert expected_status in table_widget.item(row_index, 4).text()
return assert_task_status return assert_task_status

View file

@ -1,14 +1,38 @@
import sys
import pytest
from buzz.widgets.model_type_combo_box import ModelTypeComboBox from buzz.widgets.model_type_combo_box import ModelTypeComboBox
class TestModelTypeComboBox: class TestModelTypeComboBox:
def test_should_display_items(self, qtbot): @pytest.mark.parametrize(
"model_types",
[
pytest.param(
[
"Whisper",
"Whisper.cpp",
"Hugging Face",
"Faster Whisper",
"OpenAI Whisper API",
],
marks=pytest.mark.skipif(
sys.platform == "linux", reason="Skip on Linux"
),
),
pytest.param(
["Whisper.cpp", "OpenAI Whisper API"],
marks=pytest.mark.skipif(
sys.platform != "linux", reason="Skip on non-Linux"
),
),
],
)
def test_should_display_items(self, qtbot, model_types):
widget = ModelTypeComboBox() widget = ModelTypeComboBox()
qtbot.add_widget(widget) qtbot.add_widget(widget)
assert widget.count() == 5 assert widget.count() == len(model_types)
assert widget.itemText(0) == "Whisper" for index, model_type in enumerate(model_types):
assert widget.itemText(1) == "Whisper.cpp" assert widget.itemText(index) == model_type
assert widget.itemText(2) == "Hugging Face"
assert widget.itemText(3) == "Faster Whisper"
assert widget.itemText(4) == "OpenAI Whisper API"

View file

@ -25,7 +25,7 @@ class TestFolderWatchPreferencesWidget:
file_transcription_options=FileTranscriptionPreferences( file_transcription_options=FileTranscriptionPreferences(
language=None, language=None,
task=Task.TRANSCRIBE, task=Task.TRANSCRIBE,
model=TranscriptionModel(), model=TranscriptionModel.default(),
word_level_timings=False, word_level_timings=False,
temperature=DEFAULT_WHISPER_TEMPERATURE, temperature=DEFAULT_WHISPER_TEMPERATURE,
initial_prompt="", initial_prompt="",

View file

@ -6,8 +6,6 @@ from PyQt6.QtWidgets import QComboBox, QPushButton
from pytestqt.qtbot import QtBot from pytestqt.qtbot import QtBot
from buzz.model_loader import ( from buzz.model_loader import (
get_whisper_file_path,
WhisperModelSize,
TranscriptionModel, TranscriptionModel,
ModelType, ModelType,
) )
@ -20,9 +18,11 @@ from tests.model_loader import get_model_path
class TestModelsPreferencesWidget: class TestModelsPreferencesWidget:
@pytest.fixture(scope="class") @pytest.fixture(scope="class")
def clear_model_cache(self): def clear_model_cache(self):
file_path = get_whisper_file_path(size=WhisperModelSize.TINY) for model_type in ModelType:
if os.path.isfile(file_path): if model_type.is_available():
os.remove(file_path) path = TranscriptionModel(model_type=model_type).get_local_model_path()
if path and os.path.isfile(path):
os.remove(path)
def test_should_show_model_list(self, qtbot): def test_should_show_model_list(self, qtbot):
widget = ModelsPreferencesWidget() widget = ModelsPreferencesWidget()
@ -55,11 +55,7 @@ class TestModelsPreferencesWidget:
) )
qtbot.add_widget(widget) qtbot.add_widget(widget)
model = TranscriptionModel( assert widget.model.get_local_model_path() is None
model_type=ModelType.WHISPER, whisper_model_size=WhisperModelSize.TINY
)
assert model.get_local_model_path() is None
available_item = widget.model_list_widget.topLevelItem(1) available_item = widget.model_list_widget.topLevelItem(1)
assert available_item.text(0) == "Available for Download" assert available_item.text(0) == "Available for Download"
@ -87,20 +83,15 @@ class TestModelsPreferencesWidget:
or _available_item.child(0).text(0) != "Tiny" or _available_item.child(0).text(0) != "Tiny"
) )
# model file exists assert os.path.isfile(widget.model.get_local_model_path())
assert os.path.isfile(get_whisper_file_path(size=model.whisper_model_size))
qtbot.wait_until(callback=downloaded_model, timeout=60_000) qtbot.wait_until(callback=downloaded_model, timeout=60_000)
@pytest.fixture(scope="class") @pytest.fixture(scope="class")
def whisper_tiny_model_path(self) -> str: def default_model_path(self) -> str:
return get_model_path( return get_model_path(transcription_model=(TranscriptionModel.default()))
transcription_model=TranscriptionModel(
model_type=ModelType.WHISPER, whisper_model_size=WhisperModelSize.TINY
)
)
def test_should_show_downloaded_model(self, qtbot, whisper_tiny_model_path): def test_should_show_downloaded_model(self, qtbot, default_model_path):
widget = ModelsPreferencesWidget() widget = ModelsPreferencesWidget()
widget.show() widget.show()
qtbot.add_widget(widget) qtbot.add_widget(widget)

View file

@ -4,7 +4,7 @@ from tempfile import mkdtemp
from pytestqt.qtbot import QtBot from pytestqt.qtbot import QtBot
from buzz.model_loader import TranscriptionModel from buzz.model_loader import TranscriptionModel, ModelType
from buzz.transcriber import ( from buzz.transcriber import (
Task, Task,
DEFAULT_WHISPER_TEMPERATURE, DEFAULT_WHISPER_TEMPERATURE,
@ -24,6 +24,12 @@ from buzz.widgets.transcription_task_folder_watcher import (
class TestTranscriptionTaskFolderWatcher: class TestTranscriptionTaskFolderWatcher:
def default_model(self):
model_type = next(
model_type for model_type in ModelType if model_type.is_available()
)
return TranscriptionModel(model_type=model_type)
def test_should_add_task_not_in_tasks(self, qtbot: QtBot): def test_should_add_task_not_in_tasks(self, qtbot: QtBot):
input_directory = mkdtemp() input_directory = mkdtemp()
watcher = TranscriptionTaskFolderWatcher( watcher = TranscriptionTaskFolderWatcher(
@ -35,7 +41,7 @@ class TestTranscriptionTaskFolderWatcher:
file_transcription_options=FileTranscriptionPreferences( file_transcription_options=FileTranscriptionPreferences(
language=None, language=None,
task=Task.TRANSCRIBE, task=Task.TRANSCRIBE,
model=TranscriptionModel(), model=self.default_model(),
word_level_timings=False, word_level_timings=False,
temperature=DEFAULT_WHISPER_TEMPERATURE, temperature=DEFAULT_WHISPER_TEMPERATURE,
initial_prompt="", initial_prompt="",
@ -76,7 +82,7 @@ class TestTranscriptionTaskFolderWatcher:
file_transcription_options=FileTranscriptionPreferences( file_transcription_options=FileTranscriptionPreferences(
language=None, language=None,
task=Task.TRANSCRIBE, task=Task.TRANSCRIBE,
model=TranscriptionModel(), model=self.default_model(),
word_level_timings=False, word_level_timings=False,
temperature=DEFAULT_WHISPER_TEMPERATURE, temperature=DEFAULT_WHISPER_TEMPERATURE,
initial_prompt="", initial_prompt="",