Detect: Preserve dict key order

This commit is contained in:
Ross Patterson 2019-08-10 16:15:40 -07:00
parent d084c38875
commit 705473c266
4 changed files with 38 additions and 22 deletions

View File

@ -4,6 +4,7 @@ Retrieve and process all the external data for hardware detection.
""" """
import sys import sys
import collections
import math import math
import json import json
@ -51,12 +52,12 @@ def get_hwaccel_data():
platform_cols = api_avail_table.loc[0][1:] platform_cols = api_avail_table.loc[0][1:]
api_rows = api_avail_table[0][2:] api_rows = api_avail_table[0][2:]
hwaccels = {} hwaccels = collections.OrderedDict()
hwaccels['api_avail'] = platforms = {} hwaccels['api_avail'] = platforms = collections.OrderedDict()
for gpu_vendor_idx, gpu_vendor in enumerate(gpu_vendor_cols): for gpu_vendor_idx, gpu_vendor in enumerate(gpu_vendor_cols):
platform = platform_cols[gpu_vendor_idx + 1] platform = platform_cols[gpu_vendor_idx + 1]
platform = PLATFORM_TO_PY.get(platform, platform) platform = PLATFORM_TO_PY.get(platform, platform)
gpu_vendors = platforms.setdefault(platform, {}) gpu_vendors = platforms.setdefault(platform, collections.OrderedDict())
avail_hwaccels = gpu_vendors.setdefault(gpu_vendor, []) avail_hwaccels = gpu_vendors.setdefault(gpu_vendor, [])
for api_idx, api in enumerate(api_rows): for api_idx, api in enumerate(api_rows):
if api_avail_table[gpu_vendor_idx + 1][api_idx + 2] != 'N': if api_avail_table[gpu_vendor_idx + 1][api_idx + 2] != 'N':
@ -74,7 +75,9 @@ def get_nvidia_data():
( (
nvenc_recent, nvenc_consumer, nvenc_workstation, nvenc_virt, nvenc_recent, nvenc_consumer, nvenc_workstation, nvenc_virt,
nvdec_recent, nvdec_consumer, nvdec_workstation, nvdec_virt) = tables nvdec_recent, nvdec_consumer, nvdec_workstation, nvdec_virt) = tables
nvidia = dict(lines=[], model_lines={}, boards={}) nvidia = collections.OrderedDict(
lines=[], model_lines=collections.OrderedDict(),
boards=collections.OrderedDict())
# Compile aggregate data needed to parse individual rows # Compile aggregate data needed to parse individual rows
for nvenc_table in ( for nvenc_table in (
@ -99,7 +102,7 @@ def get_nvidia_data():
continue continue
# Assemble the data for this row to use for each model or range # Assemble the data for this row to use for each model or range
model_data = {} model_data = collections.OrderedDict()
for key, value in nvenc_row.items(): for key, value in nvenc_row.items():
if value in {'YES', 'NO'}: if value in {'YES', 'NO'}:
model_data[key] = value == 'YES' model_data[key] = value == 'YES'
@ -125,7 +128,7 @@ def get_nvidia_data():
# GTX model numbers # GTX model numbers
for model_line, model_line_suffixes in NVIDIA_LINE_SUFFIXES.items(): for model_line, model_line_suffixes in NVIDIA_LINE_SUFFIXES.items():
models_data = nvidia['model_lines'][model_line]['models'] models_data = nvidia['model_lines'][model_line]['models']
for model_num in models_data: for model_num in list(models_data):
for model_line_suffix in model_line_suffixes: for model_line_suffix in model_line_suffixes:
if model_num.startswith(model_line_suffix + ' '): if model_num.startswith(model_line_suffix + ' '):
models_data[model_num[ models_data[model_num[
@ -152,7 +155,7 @@ def main():
""" """
Download ffmpeg detection data. Download ffmpeg detection data.
""" """
data = dict( data = collections.OrderedDict(
hwaccels=get_hwaccel_data(), hwaccels=get_hwaccel_data(),
nvidia=get_nvidia_data(), nvidia=get_nvidia_data(),
) )

View File

@ -24,6 +24,7 @@ import sys
import platform import platform
import os import os
import copy import copy
import collections
import re import re
import json import json
import logging import logging
@ -57,7 +58,7 @@ HWACCEL_OUTPUT_FORMATS = {
'vaapi': 'vaapi'} 'vaapi': 'vaapi'}
GPU_PRODUCT_RE = re.compile(r'(?P<chip>[^[]+)(\[(?P<board>[^]]+)\]|)') GPU_PRODUCT_RE = re.compile(r'(?P<chip>[^[]+)(\[(?P<board>[^]]+)\]|)')
GPU_WMI_PROPERTIES = dict( GPU_WMI_PROPERTIES = collections.OrderedDict(
vendor='AdapterCompatibility', board='VideoProcessor') vendor='AdapterCompatibility', board='VideoProcessor')
# Loaded from JSON # Loaded from JSON
@ -75,12 +76,14 @@ def detect_gpus():
# TODO: Android and other Linux'es that don't have `lshw` # TODO: Android and other Linux'es that don't have `lshw`
display_output = subprocess.check_output( display_output = subprocess.check_output(
['lshw', '-class', 'display', '-json']) ['lshw', '-class', 'display', '-json'])
displays_data = json.loads(display_output.decode().strip().strip(',')) displays_data = json.loads(
display_output.decode().strip().strip(','),
object_pairs_hook=collections.OrderedDict)
if not isinstance(displays_data, list): if not isinstance(displays_data, list):
# TODO: Confirm this is how `lshw` handles multiple GPUs # TODO: Confirm this is how `lshw` handles multiple GPUs
displays_data = [displays_data] displays_data = [displays_data]
for display_data in displays_data: for display_data in displays_data:
gpu = dict( gpu = collections.OrderedDict(
vendor=display_data['vendor'].replace(' Corporation', '')) vendor=display_data['vendor'].replace(' Corporation', ''))
# TODO get multiple GPUs from lshw # TODO get multiple GPUs from lshw
gpus.append(gpu) gpus.append(gpu)
@ -94,7 +97,7 @@ def detect_gpus():
elif plat_sys == 'Windows': elif plat_sys == 'Windows':
import wmi import wmi
for controller in wmi.WMI().Win32_VideoController(): for controller in wmi.WMI().Win32_VideoController():
gpu = {} gpu = collections.OrderedDict()
for key, wmi_prop in GPU_WMI_PROPERTIES.items(): for key, wmi_prop in GPU_WMI_PROPERTIES.items():
value = controller.wmi_property(wmi_prop).value value = controller.wmi_property(wmi_prop).value
if value: if value:
@ -179,7 +182,8 @@ def detect_codecs(decoder, encoder, hwaccels=None, cmd='ffmpeg'):
'Could not detect a supported encoder for {0!r}'.format(encoder)) 'Could not detect a supported encoder for {0!r}'.format(encoder))
codecs_kwargs = [] codecs_kwargs = []
default_kwargs = dict(output=dict(codec=avail_encoders[0])) default_kwargs = collections.OrderedDict(
output=collections.OrderedDict(codec=avail_encoders[0]))
for hwaccel in hwaccels_data['hwaccels']: for hwaccel in hwaccels_data['hwaccels']:
if hwaccel['codecs']: if hwaccel['codecs']:
@ -190,9 +194,9 @@ def detect_codecs(decoder, encoder, hwaccels=None, cmd='ffmpeg'):
# Remove hwaccel codecs from future consideration. # Remove hwaccel codecs from future consideration.
hwaccel_encoder = hwaccel_encoder hwaccel_encoder = hwaccel_encoder
avail_encoders.remove(hwaccel_encoder) avail_encoders.remove(hwaccel_encoder)
hwaccel_kwargs = dict( hwaccel_kwargs = collections.OrderedDict(
input=dict(hwaccel=hwaccel['name']), input=collections.OrderedDict(hwaccel=hwaccel['name']),
output=dict(codec=hwaccel_encoder)) output=collections.OrderedDict(codec=hwaccel_encoder))
if hwaccel['name'] in HWACCEL_OUTPUT_FORMATS: if hwaccel['name'] in HWACCEL_OUTPUT_FORMATS:
hwaccel_kwargs['input']['hwaccel_output_format'] = ( hwaccel_kwargs['input']['hwaccel_output_format'] = (
HWACCEL_OUTPUT_FORMATS[hwaccel['name']]) HWACCEL_OUTPUT_FORMATS[hwaccel['name']])
@ -210,7 +214,8 @@ def detect_codecs(decoder, encoder, hwaccels=None, cmd='ffmpeg'):
else: else:
# This hwaccel doesn't require specific coders. # This hwaccel doesn't require specific coders.
hwaccel_kwargs = copy.deepcopy(default_kwargs) hwaccel_kwargs = copy.deepcopy(default_kwargs)
hwaccel_kwargs['input'] = dict(hwaccel=hwaccel['name']) hwaccel_kwargs['input'] = collections.OrderedDict(
hwaccel=hwaccel['name'])
codecs_kwargs.append(hwaccel_kwargs) codecs_kwargs.append(hwaccel_kwargs)
codecs_kwargs.append(default_kwargs) codecs_kwargs.append(default_kwargs)
@ -232,7 +237,8 @@ def _get_data():
if DATA is None: if DATA is None:
with open(os.path.join( with open(os.path.join(
os.path.dirname(__file__), 'detect.json')) as data_opened: os.path.dirname(__file__), 'detect.json')) as data_opened:
DATA = json.load(data_opened) DATA = json.load(
data_opened, object_pairs_hook=collections.OrderedDict)
return DATA return DATA
@ -243,7 +249,7 @@ def _parse_models(
Parse model lines, sets and ranges from a boards string. Parse model lines, sets and ranges from a boards string.
""" """
if model_lines_data is None: if model_lines_data is None:
model_lines_data = {} model_lines_data = collections.OrderedDict()
boards = boards.strip().lower() boards = boards.strip().lower()
model_line_positions = [ model_line_positions = [
@ -261,7 +267,8 @@ def _parse_models(
if model_group: if model_group:
# First item is a model range for the previous model line # First item is a model range for the previous model line
model_line_data = model_lines_data.setdefault( model_line_data = model_lines_data.setdefault(
model_line, dict(models={}, model_ranges=[])) model_line, collections.OrderedDict(
models=collections.OrderedDict(), model_ranges=[]))
models = [] models = []
for model_split in model_group.split('/'): for model_split in model_group.split('/'):
@ -301,7 +308,7 @@ def main(args=None):
Dump all ffmpeg build data to json. Dump all ffmpeg build data to json.
""" """
args = parser.parse_args(args) args = parser.parse_args(args)
data = dict( data = collections.OrderedDict(
gpus=detect_gpus(), gpus=detect_gpus(),
hwaccels=detect_hwaccels(cmd=args.ffmpeg), hwaccels=detect_hwaccels(cmd=args.ffmpeg),
codecs=detect_codecs(cmd=args.ffmpeg)) codecs=detect_codecs(cmd=args.ffmpeg))

View File

@ -1,3 +1,8 @@
"""
Run ffprobe on the file and return a JSON representation of the output.
"""
import collections
import json import json
import subprocess import subprocess
from ._run import Error from ._run import Error
@ -21,7 +26,8 @@ def probe(filename, cmd='ffprobe', **kwargs):
out, err = p.communicate() out, err = p.communicate()
if p.returncode != 0: if p.returncode != 0:
raise Error('ffprobe', out, err) raise Error('ffprobe', out, err)
return json.loads(out.decode('utf-8')) return json.loads(
out.decode('utf-8'), object_pairs_hook=collections.OrderedDict)
__all__ = ['probe'] __all__ = ['probe']

View File

@ -130,8 +130,8 @@
"920a": "GeForce GTX 910M / 920M / 920A", "920a": "GeForce GTX 910M / 920M / 920A",
"980mx": "GeForce GTX 965M > 980M / 980MX", "980mx": "GeForce GTX 965M > 980M / 980MX",
"980 ti": "GeForce GTX 980 Ti", "980 ti": "GeForce GTX 980 Ti",
"titan xp": "GeForce GTX Titan X / Titan Xp",
"titan black": "GeForce GTX Titan / Titan Black", "titan black": "GeForce GTX Titan / Titan Black",
"titan xp": "GeForce GTX Titan X / Titan Xp",
"titan x": "GeForce GTX Titan X / Titan Xp", "titan x": "GeForce GTX Titan X / Titan Xp",
"titan z": "GeForce GTX Titan Z", "titan z": "GeForce GTX Titan Z",
"titan": "GeForce GTX Titan / Titan Black" "titan": "GeForce GTX Titan / Titan Black"