change: .cpuAndGPU config

main
Yasuhito Nagatomo 2 years ago
parent 102a6c79df
commit 849e0d1676

@ -25,14 +25,15 @@ You can see how it works through the simple sample code.
![Image](images/ss0_1280.png)
## Change Log
- [1.0.2 (3)] - 2022-12-16 `[Changed]`
- [1.0.3 (4)] - Dec 18, 2022 `[Changed]`
- set `MLModelConfiguration.computeUnits` to `.cpuAndGPU`, when running on mobile devices.
- [1.0.2 (3)] - Dec 16, 2022 `[Changed]`
- The apple/ml-stable-diffusion Swift Package v0.1.0 was released.
- At apple/ml-stable-diffusion Swift Package v0.1.0, `reduceMemory` option of StableDiffusionPipeline(resourcesAt:)
was added. And on iOS, the reduceMemory option should be set to true.
- This option was added and set to true, in `ImageGenerator.swift` when creating StableDiffusionPipeline.
- According to the new apple readme, iPhone requirement was changed to iPhone Pro 12+.
- [1.0.1 (2)] - 2022-12-08 `[Changed]`
- [1.0.1 (2)] - Dec 8, 2022 `[Changed]`
- Changed to delay creation of `StableDiffusionPipeline` until the first image
generation and execute it in a background task.
- This eliminates the freeze when starting the app, but it takes time
@ -98,6 +99,11 @@ Now you can build the project, targeting to iPhone, iPad, or My Mac (Designed fo
## Consideration
### Extended Virtual Address Space and Increased Memory Limit capabilities
- if you encounter the memory limit issues on mobile devices, please try adding `Increase Memory Limit` and `Extended Virtual Address Space` capabilities to your App ID. This adds an entitlement to your Xcode project.
- please make sure that you use the App ID which registered the capabilities, "Extended Virtual Address Space" and "Increased Memory Limit", at Developer - Identifiers site. Or Xcode displays the signing and capabilities errors.
### Large binary file
- Since the model files are very large (about 2.5GB), it causes a large binary of the app.

@ -305,7 +305,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 3;
CURRENT_PROJECT_VERSION = 4;
DEVELOPMENT_ASSET_PATHS = "\"imggensd2/Preview Content\"";
DEVELOPMENT_TEAM = J5CY9Q9UP5;
ENABLE_PREVIEWS = YES;
@ -321,7 +321,7 @@
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0.2;
MARKETING_VERSION = 1.0.3;
PRODUCT_BUNDLE_IDENTIFIER = com.atarayosd.imggensd2;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = YES;
@ -336,7 +336,7 @@
ASSETCATALOG_COMPILER_APPICON_NAME = AppIcon;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 3;
CURRENT_PROJECT_VERSION = 4;
DEVELOPMENT_ASSET_PATHS = "\"imggensd2/Preview Content\"";
DEVELOPMENT_TEAM = J5CY9Q9UP5;
ENABLE_PREVIEWS = YES;
@ -352,7 +352,7 @@
"$(inherited)",
"@executable_path/Frameworks",
);
MARKETING_VERSION = 1.0.2;
MARKETING_VERSION = 1.0.3;
PRODUCT_BUNDLE_IDENTIFIER = com.atarayosd.imggensd2;
PRODUCT_NAME = "$(TARGET_NAME)";
SWIFT_EMIT_LOC_STRINGS = YES;

@ -7,6 +7,7 @@
import UIKit
import StableDiffusion
import CoreML
@MainActor
final class ImageGenerator: ObservableObject {
@ -77,10 +78,17 @@ final class ImageGenerator: ObservableObject {
}
let resourceURL = URL(fileURLWithPath: path)
let config = MLModelConfiguration()
if !ProcessInfo.processInfo.isiOSAppOnMac {
config.computeUnits = .cpuAndGPU
}
// reduceMemory option was added at v0.1.0
// On iOS, the reduceMemory option should be set to true
let reduceMemory = ProcessInfo.processInfo.isiOSAppOnMac ? false : true
if let pipeline = try? StableDiffusionPipeline(resourcesAt: resourceURL,
reduceMemory: true) {
configuration: config,
reduceMemory: reduceMemory) {
await self.setPipeline(pipeline)
} else {
fatalError("Fatal error: failed to create the Stable-Diffusion-Pipeline.")
@ -89,12 +97,10 @@ final class ImageGenerator: ObservableObject {
if let sdPipeline = await self.sdPipeline {
do {
// API:
// generateImages(prompt: String, imageCount: Int = 1, stepCount: Int = 50, seed: Int = 0,
// disableSafety: Bool = false,
// progressHandler: (StableDiffusionPipeline.Progress) -> Bool = { _ in true })
// throws -> [CGImage?]
// TODO: use the progressHandler
// if you would like to use the progressHandler,
// please check the another repo - AR Diffusion Museum:
// https://github.com/ynagatomo/ARDiffMuseum
// It handles the progressHandler and displays the generating images step by step.
let cgImages = try sdPipeline.generateImages(prompt: parameter.prompt,
imageCount: parameter.imageCount,
stepCount: parameter.stepCount,

Loading…
Cancel
Save