mirror of
https://github.com/RVC-Boss/GPT-SoVITS.git
synced 2025-08-16 06:09:50 +08:00
Add Docker Build pwsh in windows
This commit is contained in:
parent
6a2ab63e18
commit
89438d6001
73
docker_build.ps1
Normal file
73
docker_build.ps1
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
$ErrorActionPreference = "Stop"
|
||||||
|
|
||||||
|
$ScriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||||
|
Set-Location $ScriptDir
|
||||||
|
|
||||||
|
if (-not (Get-Command "docker" -ErrorAction SilentlyContinue)) {
|
||||||
|
Write-Host "Docker Not Found"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
$Lite = $false
|
||||||
|
$CudaVersion = "12.6"
|
||||||
|
|
||||||
|
function Write-Help {
|
||||||
|
Write-Host @"
|
||||||
|
Usage: powershell -File docker_build.ps1 [OPTIONS]
|
||||||
|
|
||||||
|
Options:
|
||||||
|
--cuda 12.6|12.8 Specify the CUDA VERSION (REQUIRED)
|
||||||
|
--lite Build a Lite Image
|
||||||
|
-h, --help Show this help message and exit
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
powershell -File docker_build.ps1 --cuda 12.6 --lite
|
||||||
|
"@
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($args.Count -eq 0) {
|
||||||
|
Write-Help
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
for ($i = 0; $i -lt $args.Count; $i++) {
|
||||||
|
switch ($args[$i]) {
|
||||||
|
'--cuda' {
|
||||||
|
$i++
|
||||||
|
$val = $args[$i]
|
||||||
|
if ($val -ne "12.6" -and $val -ne "12.8") {
|
||||||
|
Write-Host "Error: Invalid CUDA_VERSION: $val"
|
||||||
|
Write-Host "Choose From: [12.6, 12.8]"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
$CudaVersion = $val
|
||||||
|
}
|
||||||
|
'--lite' {
|
||||||
|
$Lite = $true
|
||||||
|
}
|
||||||
|
'-h' { Write-Help; exit 0 }
|
||||||
|
'--help' { Write-Help; exit 0 }
|
||||||
|
default {
|
||||||
|
Write-Host "Unknown Argument: $($args[$i])"
|
||||||
|
Write-Host "Use -h or --help to see available options."
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$arch = (Get-CimInstance Win32_Processor).Architecture
|
||||||
|
$TargetPlatform = if ($arch -eq 9) { "linux/amd64" } else { "linux/arm64" }
|
||||||
|
|
||||||
|
if ($Lite) {
|
||||||
|
$TorchBase = "lite"
|
||||||
|
} else {
|
||||||
|
$TorchBase = "full"
|
||||||
|
}
|
||||||
|
|
||||||
|
docker build `
|
||||||
|
--build-arg CUDA_VERSION=$CudaVersion `
|
||||||
|
--build-arg LITE=$Lite `
|
||||||
|
--build-arg TARGETPLATFORM=$TargetPlatform `
|
||||||
|
--build-arg TORCH_BASE=$TorchBase `
|
||||||
|
-t "$env:USERNAME/gpt-sovits:local" `
|
||||||
|
.
|
@ -25,7 +25,7 @@ print_help() {
|
|||||||
echo " -h, --help Show this help message and exit"
|
echo " -h, --help Show this help message and exit"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Examples:"
|
echo "Examples:"
|
||||||
echo " bash docker_build.sh --cuda 12.6 --funasr --faster-whisper"
|
echo " bash docker_build.sh --cuda 12.6"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Show help if no arguments provided
|
# Show help if no arguments provided
|
||||||
|
@ -34,8 +34,8 @@ print_help() {
|
|||||||
echo " -h, --help Show this help message and exit"
|
echo " -h, --help Show this help message and exit"
|
||||||
echo ""
|
echo ""
|
||||||
echo "Examples:"
|
echo "Examples:"
|
||||||
echo " bash install.sh --source HF --download-uvr5"
|
echo " bash install.sh --device CU128 --source HF --download-uvr5"
|
||||||
echo " bash install.sh --source ModelScope"
|
echo " bash install.sh --device MPS --source ModelScope"
|
||||||
}
|
}
|
||||||
|
|
||||||
# Show help if no arguments provided
|
# Show help if no arguments provided
|
||||||
@ -149,7 +149,6 @@ else
|
|||||||
echo "Installing,Please Wait..."
|
echo "Installing,Please Wait..."
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
conda install -c conda-forge -q -y
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "Installing ffmpeg and cmake..."
|
echo "Installing ffmpeg and cmake..."
|
||||||
|
@ -88,7 +88,7 @@ class Subfix:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def max_index(self):
|
def max_index(self):
|
||||||
return len(self.transcriptions_list)
|
return len(self.transcriptions_list) - 1
|
||||||
|
|
||||||
def load_list(self, list_path: str):
|
def load_list(self, list_path: str):
|
||||||
with open(list_path, mode="r", encoding="utf-8") as f:
|
with open(list_path, mode="r", encoding="utf-8") as f:
|
||||||
@ -126,7 +126,7 @@ class Subfix:
|
|||||||
checkboxs = []
|
checkboxs = []
|
||||||
with LOCK:
|
with LOCK:
|
||||||
for i in range(index, index + self.batch_size):
|
for i in range(index, index + self.batch_size):
|
||||||
if i <= self.max_index - 1:
|
if i <= self.max_index:
|
||||||
audios.append(gr.Audio(value=self.transcriptions_list[i][0]))
|
audios.append(gr.Audio(value=self.transcriptions_list[i][0]))
|
||||||
texts.append(gr.Textbox(value=self.transcriptions_list[i][3], label=self.i18n("Text") + f" {i}"))
|
texts.append(gr.Textbox(value=self.transcriptions_list[i][3], label=self.i18n("Text") + f" {i}"))
|
||||||
languages.append(gr.Dropdown(value=self.transcriptions_list[i][2]))
|
languages.append(gr.Dropdown(value=self.transcriptions_list[i][2]))
|
||||||
@ -140,10 +140,8 @@ class Subfix:
|
|||||||
|
|
||||||
def next_page(self, index: int):
|
def next_page(self, index: int):
|
||||||
batch_size = self.batch_size
|
batch_size = self.batch_size
|
||||||
max_index = self.max_index - batch_size
|
max_index = max(self.max_index - batch_size + 1, 0)
|
||||||
if max_index <= 0:
|
index = min(index + batch_size, max_index)
|
||||||
max_index = 1
|
|
||||||
index = min(index + batch_size, max_index - 1)
|
|
||||||
return gr.Slider(value=index), *self.change_index(index)
|
return gr.Slider(value=index), *self.change_index(index)
|
||||||
|
|
||||||
def previous_page(self, index: int):
|
def previous_page(self, index: int):
|
||||||
@ -153,7 +151,7 @@ class Subfix:
|
|||||||
|
|
||||||
def delete_audio(self, index, *selected):
|
def delete_audio(self, index, *selected):
|
||||||
delete_index = [i + index for i, _ in enumerate(selected) if _]
|
delete_index = [i + index for i, _ in enumerate(selected) if _]
|
||||||
delete_index = [i for i in delete_index if i < self.max_index - 1]
|
delete_index = [i for i in delete_index if i < self.max_index]
|
||||||
for idx in delete_index[::-1]:
|
for idx in delete_index[::-1]:
|
||||||
self.transcriptions_list.pop(idx)
|
self.transcriptions_list.pop(idx)
|
||||||
self.save_list()
|
self.save_list()
|
||||||
@ -167,7 +165,8 @@ class Subfix:
|
|||||||
languages = input[len(input) // 2 :]
|
languages = input[len(input) // 2 :]
|
||||||
if texts is None or languages is None:
|
if texts is None or languages is None:
|
||||||
raise ValueError()
|
raise ValueError()
|
||||||
for idx in range(index, min(index + batch_size, self.max_index - 1)):
|
print(index, min(index + batch_size, self.max_index))
|
||||||
|
for idx in range(index, min(index + batch_size, self.max_index + 1)):
|
||||||
self.transcriptions_list[idx][3] = texts[idx - index].strip().strip("\n")
|
self.transcriptions_list[idx][3] = texts[idx - index].strip().strip("\n")
|
||||||
self.transcriptions_list[idx][2] = languages[idx - index]
|
self.transcriptions_list[idx][2] = languages[idx - index]
|
||||||
result = self.save_list()
|
result = self.save_list()
|
||||||
@ -178,7 +177,7 @@ class Subfix:
|
|||||||
def merge_audio(self, index, *selected):
|
def merge_audio(self, index, *selected):
|
||||||
batch_size = self.batch_size
|
batch_size = self.batch_size
|
||||||
merge_index = [i + index for i, _ in enumerate(selected) if _]
|
merge_index = [i + index for i, _ in enumerate(selected) if _]
|
||||||
merge_index = [i for i in merge_index if i < self.max_index - 1]
|
merge_index = [i for i in merge_index if i < self.max_index]
|
||||||
if len(merge_index) < 2:
|
if len(merge_index) < 2:
|
||||||
return *(gr.skip() for _ in range(batch_size * 3 + 1)), *(gr.Checkbox(False) for _ in range(batch_size))
|
return *(gr.skip() for _ in range(batch_size * 3 + 1)), *(gr.Checkbox(False) for _ in range(batch_size))
|
||||||
else:
|
else:
|
||||||
@ -211,7 +210,7 @@ class Subfix:
|
|||||||
self.batch_size = batch_size
|
self.batch_size = batch_size
|
||||||
for i in range(index, index + batch_size):
|
for i in range(index, index + batch_size):
|
||||||
with gr.Row(equal_height=True):
|
with gr.Row(equal_height=True):
|
||||||
if i <= self.max_index - 1:
|
if i <= self.max_index:
|
||||||
with gr.Column(scale=2, min_width=160):
|
with gr.Column(scale=2, min_width=160):
|
||||||
textbox_tmp = gr.Textbox(
|
textbox_tmp = gr.Textbox(
|
||||||
value=self.transcriptions_list[i][3],
|
value=self.transcriptions_list[i][3],
|
||||||
@ -281,7 +280,7 @@ class Subfix:
|
|||||||
self.selections.append(selection_tmp)
|
self.selections.append(selection_tmp)
|
||||||
with gr.Row(equal_height=True):
|
with gr.Row(equal_height=True):
|
||||||
with gr.Column(scale=2, min_width=160):
|
with gr.Column(scale=2, min_width=160):
|
||||||
self.close_button = gr.Button(value=i18n("关闭打标WebUI"), variant="stop")
|
self.close_button = gr.Button(value=i18n("保存并关闭打标WebUI"), variant="stop")
|
||||||
with gr.Column(scale=1, min_width=160):
|
with gr.Column(scale=1, min_width=160):
|
||||||
self.previous_index_button2 = gr.Button(value=i18n("上一页"))
|
self.previous_index_button2 = gr.Button(value=i18n("上一页"))
|
||||||
with gr.Column(scale=1, min_width=160):
|
with gr.Column(scale=1, min_width=160):
|
||||||
@ -507,12 +506,12 @@ def main(list_path: str = "", i18n_lang="Auto", port=9871, share=False):
|
|||||||
with gr.Blocks(analytics_enabled=False) as app:
|
with gr.Blocks(analytics_enabled=False) as app:
|
||||||
subfix = Subfix(I18nAuto(i18n_lang))
|
subfix = Subfix(I18nAuto(i18n_lang))
|
||||||
subfix.render(list_path=list_path)
|
subfix.render(list_path=list_path)
|
||||||
if subfix.max_index > 0:
|
if subfix.max_index >= 0:
|
||||||
timer = gr.Timer(0.1)
|
timer = gr.Timer(0.1)
|
||||||
|
|
||||||
timer.tick(
|
timer.tick(
|
||||||
fn=lambda: (
|
fn=lambda: (
|
||||||
gr.Slider(value=0, maximum=subfix.max_index),
|
gr.Slider(value=0, maximum=subfix.max_index, step=1),
|
||||||
gr.Slider(value=10),
|
gr.Slider(value=10),
|
||||||
gr.Timer(active=False),
|
gr.Timer(active=False),
|
||||||
),
|
),
|
||||||
@ -531,13 +530,13 @@ def main(list_path: str = "", i18n_lang="Auto", port=9871, share=False):
|
|||||||
inputs=[],
|
inputs=[],
|
||||||
outputs=[],
|
outputs=[],
|
||||||
)
|
)
|
||||||
app.queue().launch(
|
app.queue().launch(
|
||||||
server_name="0.0.0.0",
|
server_name="0.0.0.0",
|
||||||
inbrowser=True,
|
inbrowser=True,
|
||||||
share=share,
|
share=share,
|
||||||
server_port=port,
|
server_port=port,
|
||||||
quiet=False,
|
quiet=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
Loading…
x
Reference in New Issue
Block a user