Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Delete line action, plus undo implemented for various commands #180

Merged
merged 64 commits into from
Jun 1, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
64 commits
Select commit Hold shift + click to select a range
e411d2e
Provisional delete lines command
machinewrapped May 25, 2024
7041afd
Undo stack
machinewrapped May 25, 2024
5364aaa
Type hints
machinewrapped May 25, 2024
885d5be
Return valid type when deletion fails
machinewrapped May 25, 2024
7def0e6
Reset selection when lines are deleted
machinewrapped May 25, 2024
6a519b2
Use queued connections as much as possible
machinewrapped May 26, 2024
c313c49
Undo for merge lines
machinewrapped May 26, 2024
34c1167
Allow multiple model updates per command
machinewrapped May 26, 2024
fe45704
Undo for split/merge scene commands
machinewrapped May 26, 2024
b4957e3
Fix undo for merge scenes
machinewrapped May 26, 2024
d09f4ec
Fix for undo merge scenes
machinewrapped May 26, 2024
6bd403f
More precise viewmodel update for line merge
machinewrapped May 26, 2024
fb46ffc
Display subtitle line number in debugger
machinewrapped May 26, 2024
258b728
Fix for undo merge batches
machinewrapped May 28, 2024
8ec7399
Rename Reset -> Clear
machinewrapped May 28, 2024
dd3c95f
Only write project after batching if the flag is set
machinewrapped May 28, 2024
a0adfeb
Allow merging a single line without exception
machinewrapped May 28, 2024
5262be1
Unit tests for merge commands + undo
machinewrapped May 28, 2024
d79379e
Merge branch 'main' into delete-line
machinewrapped May 28, 2024
64fdbf4
Debug repr
machinewrapped May 28, 2024
ff3f48e
Moved batch settings back into test case
machinewrapped May 29, 2024
1b103e5
Split scene test
machinewrapped May 29, 2024
9c1c91e
Rename test file/class
machinewrapped May 29, 2024
c4c85f2
Fix for split batch undo
machinewrapped May 29, 2024
c10b1b9
Test for SplitBatchCommand
machinewrapped May 29, 2024
913130f
Undo for AutoSplitBatchCommand + tests
machinewrapped May 29, 2024
7b685c5
Just pass line numbers to MergeLinesCommand
machinewrapped May 29, 2024
41faf35
Merge lines test
machinewrapped May 29, 2024
bfc0b8a
Removed obsolete import
machinewrapped May 29, 2024
709e4d2
Redo commands
machinewrapped May 30, 2024
d812121
Queue thread safety
machinewrapped May 30, 2024
49c4119
Added base class for subtitle tests
machinewrapped May 30, 2024
131a9c3
Deleted lines test
machinewrapped May 30, 2024
aef7b8f
Validate that undo restores contents
machinewrapped May 30, 2024
969121b
Renamed file to match the test
machinewrapped May 30, 2024
7ddcb90
Validate batch sizes
machinewrapped May 30, 2024
173202d
Removed a TODO
machinewrapped May 30, 2024
8d385a1
More delete tests
machinewrapped May 30, 2024
03c9df5
Separate test for batching
machinewrapped May 30, 2024
9b86053
Cleanup
machinewrapped May 30, 2024
175e029
Consolidate options in base class
machinewrapped May 31, 2024
5804dd9
Reparse translations tests
machinewrapped Jun 1, 2024
8908be1
Made can_undo true by default
machinewrapped Jun 1, 2024
89de309
Don't renumber everything on save
machinewrapped Jun 1, 2024
ea0f10f
Removed some dead lines
machinewrapped Jun 1, 2024
ecaff61
Undo for reparse translations
machinewrapped Jun 1, 2024
69c9811
Fixed undo for reparse translations
machinewrapped Jun 1, 2024
b270e53
Update undo/redo tooltips
machinewrapped Jun 1, 2024
50a530a
Clear the undo stack when a command can't be undone
machinewrapped Jun 1, 2024
20564d0
Restore summaries on undo merge batches
machinewrapped Jun 1, 2024
1ff5ecc
Restore summaries on undo merge scenes
machinewrapped Jun 1, 2024
0096841
Fix for undo delete lines
machinewrapped Jun 1, 2024
114354c
Don't save project file automatically after reparse
machinewrapped Jun 1, 2024
ca4684c
Try to update proxy model when underlying model changes
machinewrapped Jun 1, 2024
3feaa9b
Don't modify parser translated when matching to originals
machinewrapped Jun 1, 2024
428e158
Don't renumber lines on load either
machinewrapped Jun 1, 2024
c2e9d35
Don't renumber lines on post-process
machinewrapped Jun 1, 2024
8270652
Update a TODO
machinewrapped Jun 1, 2024
17d82ed
Ensure scenes and batches are numbered correctly
machinewrapped Jun 1, 2024
70f95e2
Fix merging/preserving line translations
machinewrapped Jun 1, 2024
d208d52
Fix merge test logs
machinewrapped Jun 1, 2024
4a9581c
Validate merged translations
machinewrapped Jun 1, 2024
7b3ded4
Undid -> Undone
machinewrapped Jun 1, 2024
9b56755
Make InsertLines do what it says
machinewrapped Jun 1, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 14 additions & 2 deletions GUI/Command.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,15 +20,16 @@ def __init__(self, datamodel : ProjectDataModel = None):
QRunnable.__init__(self)
QObject.__init__(self)
self.datamodel = datamodel
self.can_undo : bool = True
self.is_blocking : bool = True
self.started : bool = False
self.executed : bool = False
self.aborted : bool = False
self.terminal : bool = False
self.callback = None
self.undo_callback = None
self.model_update = ModelUpdate()
self.commands_to_queue : list = []
self.model_updates : list[ModelUpdate] = []
self.commands_to_queue : list[Command] = []

def SetDataModel(self, datamodel):
self.datamodel = datamodel
Expand All @@ -44,6 +45,14 @@ def Abort(self):
self.aborted = True
self.on_abort()

def AddModelUpdate(self) -> ModelUpdate:
update = ModelUpdate()
self.model_updates.append(update)
return update

def ClearModelUpdates(self):
self.model_updates = []

@Slot()
def run(self):
if self.aborted:
Expand Down Expand Up @@ -98,3 +107,6 @@ def __init__(self, command : Command, *args: object) -> None:
super().__init__(*args)
self.command = command

class UndoError(CommandError):
pass

143 changes: 119 additions & 24 deletions GUI/CommandQueue.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging

from PySide6.QtCore import QObject, Signal, QThreadPool, QRecursiveMutex, QMutexLocker
from PySide6.QtCore import Qt, QObject, Signal, QThreadPool, QRecursiveMutex, QMutexLocker

from GUI.Command import Command
from GUI.ProjectDataModel import ProjectDataModel
Expand All @@ -25,18 +25,21 @@ class CommandQueue(QObject):
"""
commandAdded = Signal(object)
commandExecuted = Signal(object, bool)

commandUndone = Signal(object)

def __init__(self, parent):
super().__init__(parent)

self.logger = logging.getLogger("CommandQueue")

self.queue = []
self.undo_stack = []
self.queue : list[Command] = []
self.undo_stack : list[Command] = []
self.redo_stack : list[Command] = []

self.mutex = QRecursiveMutex()

self.command_pool = QThreadPool(self)

self.SetMaxThreadCount(1)

def SetMaxThreadCount(self, count):
Expand All @@ -46,16 +49,43 @@ def SetMaxThreadCount(self, count):
self.command_pool.setMaxThreadCount(count)

@property
def queue_size(self):
def queue_size(self) -> int:
"""
Number of commands in the queue
"""
with QMutexLocker(self.mutex):
return len(self.queue)

@property
def has_commands(self) -> bool:
"""
Check if the queue has any commands
"""
with QMutexLocker(self.mutex):
return len(self.queue) > 0

@property
def has_blocking_commands(self) -> bool:
with QMutexLocker(self.mutex):
return any( command.is_blocking for command in self.queue )

@property
def can_undo(self) -> bool:
with QMutexLocker(self.mutex):
return len(self.undo_stack) > 0 and self.undo_stack[-1].can_undo

@property
def can_redo(self) -> bool:
with QMutexLocker(self.mutex):
return len(self.redo_stack) > 0

def Stop(self):
"""
Shut the background thread down
"""
if self.queue_size > 0:
self._clear_command_queue()

self.command_pool.waitForDone()

def AddCommand(self, command: Command, datamodel: ProjectDataModel = None, callback=None, undo_callback=None):
Expand All @@ -67,18 +97,53 @@ def AddCommand(self, command: Command, datamodel: ProjectDataModel = None, callb

self.logger.debug(f"Adding a {type(command).__name__} command to the queue")
command.setParent(self)
command.setAutoDelete(False)

with QMutexLocker(self.mutex):
if isinstance(command, ClearCommandQueue):
command.execute()
self._clear_command_queue()
else:
self._queue_command(command, datamodel, callback, undo_callback)
self._clear_redo_stack()

self.commandAdded.emit(command)

self._start_command_queue()

def UndoLastCommand(self):
"""
Undo the last command in the undo stack
"""
with QMutexLocker(self.mutex):
command = self.undo_stack.pop()
if not command.can_undo:
self.logger.error(f"Cannot undo the last {type(command).__name__} command")
return

self.redo_stack.append(command)

self.logger.info(f"Undoing {type(command).__name__}")
command.undo()

self.commandUndone.emit(command)

def RedoLastCommand(self):
"""
Redo the last command in the redo stack
"""
with QMutexLocker(self.mutex):
command = self.redo_stack.pop()
if not command:
self.logger.warning("No commands to redo")
return

self._queue_command(command)

self.logger.info(f"Redoing {type(command).__name__}")
self.commandAdded.emit(command)
self._start_command_queue()

def Contains(self, command_type: type = None, type_list : list[type] = None):
"""
Check if the queue contains a command type(s)
Expand All @@ -89,32 +154,39 @@ def Contains(self, command_type: type = None, type_list : list[type] = None):
if type_list:
if any( self.Contains(type) for type in type_list ):
return True

return command_type and any( [ isinstance(command, command_type) ] for command in self.queue )

def AnyCommands(self):
"""
Any commands in the queue?
"""
return True if self.queue else False

def AnyBlocking(self):

with QMutexLocker(self.mutex):
return command_type and any( [ isinstance(command, command_type) ] for command in self.queue )

def ClearUndoStack(self):
"""
Any blocking commands in the queue?
Clear the undo stack
"""
return any( command.is_blocking for command in self.queue )
with QMutexLocker(self.mutex):
for command in self.undo_stack:
command.deleteLater()
for command in self.redo_stack:
command.deleteLater()

self.undo_stack = []
self.redo_stack = []

def _on_command_executed(self, command: Command, success: bool):
"""
Handle command callbacks, and queuing further actions
"""
if not command.aborted:
self.logger.debug(f"A {type(command).__name__} command was completed")


command.commandExecuted.disconnect(self._on_command_executed)

with QMutexLocker(self.mutex):
self.undo_stack.append(command)
self.queue.remove(command)

if not command.can_undo:
self.ClearUndoStack()

self.commandExecuted.emit(command, success)

can_proceed = not command.aborted and not command.terminal
Expand All @@ -128,7 +200,7 @@ def _on_command_executed(self, command: Command, success: bool):

if not command.aborted:
self._start_command_queue()

def _queue_command(self, command: Command, datamodel: ProjectDataModel = None, callback=None, undo_callback=None):
"""
Add a command to the worker thread queue
Expand All @@ -142,9 +214,13 @@ def _queue_command(self, command: Command, datamodel: ProjectDataModel = None, c
if undo_callback:
command.SetUndoCallback(undo_callback)

command.commandExecuted.connect(self._on_command_executed)
command.started = False
command.commandExecuted.connect(self._on_command_executed, Qt.ConnectionType.QueuedConnection)

def _start_command_queue(self):
"""
Start the command queue
"""
if not self.queue:
return

Expand All @@ -160,12 +236,31 @@ def _start_command_queue(self):
break

def _clear_command_queue(self):
"""
Abort any running commands and clear the command queue
"""
self.logger.debug(f"Clearing command queue")

# Remove commands that haven't been started
self.queue = [command for command in self.queue if command.started]
with QMutexLocker(self.mutex):
# Remove commands that haven't been started
self.queue = [command for command in self.queue if command.started]

# Pop the remaining commands from the queue and abort
while True:
with QMutexLocker(self.mutex):
if not self.queue:
break

command = self.queue.pop(0)

# Request termination of remaining commands
for command in self.queue:
command.Abort()

def _clear_redo_stack(self):
"""
Remove commands from the redo stack and delete them
"""
with QMutexLocker(self.mutex):
for command in self.redo_stack:
command.deleteLater()

self.redo_stack = []
36 changes: 32 additions & 4 deletions GUI/Commands/AutoSplitBatchCommand.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@ def __init__(self, scene_number : int, batch_number : int, datamodel: ProjectDat
super().__init__(datamodel)
self.scene_number = scene_number
self.batch_number = batch_number
self.split_line = None

def execute(self):
logging.info(f"Auto-splitting batch {str(self.scene_number)} batch {str(self.batch_number)}")
Expand Down Expand Up @@ -38,13 +39,40 @@ def execute(self):
validator.ValidateBatch(new_batch)

# Remove lines from the original batch that are in the new batch now
model_update = self.AddModelUpdate()
for line_removed in range(new_batch.first_line_number, new_batch.last_line_number + 1):
self.model_update.lines.remove((self.scene_number, self.batch_number, line_removed))
model_update.lines.remove((self.scene_number, self.batch_number, line_removed))

for batch_number in range(self.batch_number + 1, len(scene.batches)):
self.model_update.batches.update((self.scene_number, batch_number), { 'number' : batch_number + 1})
model_update.batches.update((self.scene_number, batch_number), { 'number' : batch_number + 1})

self.model_update.batches.update((self.scene_number, self.batch_number), { 'errors' : split_batch.errors })
self.model_update.batches.add((self.scene_number, new_batch_number), scene.GetBatch(new_batch_number))
model_update.batches.update((self.scene_number, self.batch_number), { 'errors' : split_batch.errors })
model_update.batches.add((self.scene_number, new_batch_number), scene.GetBatch(new_batch_number))

self.split_line = new_batch.first_line_number
return True

def undo(self):
project: SubtitleProject = self.datamodel.project

scene = project.subtitles.GetScene(self.scene_number)

if not scene or not scene.GetBatch(self.batch_number):
raise CommandError(f"Cannot find scene {self.scene_number} batch {self.batch_number}", command=self)

scene.MergeBatches([self.batch_number, self.batch_number + 1])

merged_batch = scene.GetBatch(self.batch_number)

model_update = self.AddModelUpdate()
model_update.batches.remove((self.scene_number, self.batch_number + 1))
model_update.batches.update((self.scene_number, self.batch_number), { 'errors' : merged_batch.errors })

model_update = self.AddModelUpdate()
for line_number in range(self.split_line, merged_batch.last_line_number + 1):
key = (self.scene_number, self.batch_number, line_number)
line = merged_batch.GetOriginalLine(line_number)
line.translated = merged_batch.GetTranslatedLine(line_number)
model_update.lines.add(key, line)

return True
7 changes: 3 additions & 4 deletions GUI/Commands/BatchSubtitlesCommand.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ def __init__(self, project : SubtitleProject, options : Options):
self.project : SubtitleProject = project
self.options : Options = options
self.preprocess_subtitles = options.get('preprocess_subtitles', False)
self.can_undo = False

def execute(self):
logging.info("Executing BatchSubtitlesCommand")
Expand All @@ -42,12 +43,10 @@ def execute(self):
batcher : SubtitleBatcher = SubtitleBatcher(self.options)
project.subtitles.AutoBatch(batcher)

project.WriteProjectFile()
if project.write_project:
project.WriteProjectFile()

self.datamodel : ProjectDataModel = ProjectDataModel(project, self.options)
self.datamodel.CreateViewModel()
return True

def undo(self):
# Do we flatten, or do we cache the previous batches?
pass
Loading