Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
File size: 6,074 Bytes
5acd9c3 179fbf1 5acd9c3 f26b729 5acd9c3 5ca5f4b 5acd9c3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 |
import 'package:flutter/services.dart';
import 'package:yaml/yaml.dart';
class Configuration {
static Configuration? _instance;
static Configuration get instance => _instance ??= Configuration._();
late Map<String, dynamic> _config;
// Prevent multiple instances
Configuration._();
static const String _defaultConfigPath = 'assets/config/default.yaml';
Future<void> initialize() async {
// Load default config first
final defaultYaml = await rootBundle.loadString(_defaultConfigPath);
_config = _convertYamlToMap(loadYaml(defaultYaml));
// Get custom config path from environment
const customConfigPath = String.fromEnvironment(
'CONFIG_PATH',
defaultValue: 'assets/config/aitube.yaml'
);
try {
// Load and merge custom config
final customYaml = await rootBundle.loadString(customConfigPath);
final customConfig = _convertYamlToMap(loadYaml(customYaml));
_mergeConfig(customConfig);
} catch (e) {
print('Warning: Could not load custom config from $customConfigPath: $e');
}
}
Map<String, dynamic> _convertYamlToMap(YamlMap yamlMap) {
Map<String, dynamic> result = {};
for (var entry in yamlMap.entries) {
if (entry.value is YamlMap) {
result[entry.key.toString()] = _convertYamlToMap(entry.value);
} else {
result[entry.key.toString()] = entry.value;
}
}
return result;
}
void _mergeConfig(Map<String, dynamic> customConfig) {
for (var entry in customConfig.entries) {
if (entry.value is Map<String, dynamic> &&
_config[entry.key] is Map<String, dynamic>) {
_config[entry.key] = {
..._config[entry.key] as Map<String, dynamic>,
...entry.value as Map<String, dynamic>
};
} else {
_config[entry.key] = entry.value;
}
}
}
// Getters for configuration values
String get uiProductName =>
_config['ui']['product_name'];
bool get showChatInVideoView =>
_config['ui']['showChatInVideoView'] ?? true;
// how many clips should be stored in advance
int get renderQueueBufferSize =>
_config['render_queue']['buffer_size'];
// how many requests for clips can be run in parallel
int get renderQueueMaxConcurrentGenerations =>
_config['render_queue']['max_concurrent_generations'];
// start playback as soon as we have a certain number of videoclips in memory (eg 25%)
int get minimumBufferPercentToStartPlayback =>
_config['render_queue']['minimum_buffer_percent_to_start_playback'];
// transition time between each clip
// the exit (older) clip will see its playback time reduced by this amount
Duration get transitionBufferDuration =>
Duration(milliseconds: _config['video']['transition_buffer_duration_ms']);
// how long a generated clip should be, in Duration
Duration get originalClipDuration =>
Duration(seconds: _config['video']['original_clip_duration_seconds']);
// The model works on resolutions that are divisible by 32
// and number of frames that are divisible by 8 + 1 (e.g. 257).
//
// In case the resolution or number of frames are not divisible
// by 32 or 8 + 1, the input will be padded with -1 and then
// cropped to the desired resolution and number of frames.
//
// The model works best on resolutions under 720 x 1280 and
// number of frames below 257.
// number of inference steps
// this has a direct impact in performance obviously,
// you can try to go to low values like 12 or 14 on "safe bet" prompts,
// but if you need a more uncommon topic, you need to go to 18 steps or more
int get numInferenceSteps =>
_config['video']['num_inference_steps'];
int get guidanceScale =>
_config['video']['guidance_scale'];
// original frame-rate of each clip (before we slow them down)
// in frames per second (so an integer)
int get originalClipFrameRate =>
_config['video']['original_clip_frame_rate'];
int get originalClipWidth =>
_config['video']['original_clip_width'];
int get originalClipHeight =>
_config['video']['original_clip_height'];
// to do more with less, we can slow down the videos (a 3s video will become a 4s video)
// but if you are GPU rich feel feel to play them back at 100% of their speed!
double get clipPlaybackSpeed =>
_config['video']['clip_playback_speed'].toDouble();
// Default negative prompt to avoid harmful content
String get defaultNegativePrompt =>
_config['video']['default_negative_prompt'] ?? 'gore, sex, blood, nudity, nude, porn, erotic, worst quality, deformed, distorted, disfigured, blurry, text, watermark';
// Computed properties
// original frame-rate of each clip (before we slow them down)
// in frames (so an integer)
// ----------------------- IMPORTANT --------------------------
// the model has to use a number of frames that can be divided by 8
// so originalClipNumberOfFrames might not be the actual/final value
//
// == TLDR / IMPORTANT / TLDR / IMPORTANT ==
// this is why sometimes a final clip can be longer or shorter!
// =========================================
//
// ------------------------------------------------------------
int get originalClipNumberOfFrames =>
originalClipFrameRate * originalClipDuration.inSeconds;
Duration get originalClipPlaybackDuration =>
originalClipDuration - transitionBufferDuration;
// how long a clip should last during playback, in Duration
// that can be different from its original speed
// for instance if play back a 3 seconds video at 75% speed, we get:
// 3 * (1 / 0.75) = 4
Duration get actualClipDuration => Duration(
// we use millis for greater precision
// important: we internally use double for the calculation
milliseconds: (originalClipDuration.inMilliseconds.toDouble() *
(1.0 / clipPlaybackSpeed)).round()
);
Duration get actualClipPlaybackDuration =>
actualClipDuration - transitionBufferDuration;
} |