File size: 1,197 Bytes
7593332
 
 
 
 
 
2cedb1e
7593332
2cedb1e
7593332
 
 
 
 
 
 
 
 
2cedb1e
 
 
7593332
 
2cedb1e
 
 
 
7593332
2cedb1e
 
 
 
 
 
 
 
 
 
 
7593332
 
 
2cedb1e
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
[build-system]
requires = ["setuptools >= 75.0"]
build-backend = "setuptools.build_meta"

[project]
name = "flux-schnell-edge-inference"
description = "An edge-maxxing model submission by RobertML for the 4090 Flux contest"
requires-python = ">=3.10,<3.13"
version = "8"
dependencies = [
  "diffusers==0.31.0",
  "transformers==4.46.2",
  "accelerate==1.1.0",
  "omegaconf==2.3.0",
  "torch==2.5.1",
  "protobuf==5.28.3",
  "sentencepiece==0.2.0",
  "edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
  "gitpython>=3.1.43",
  "hf_transfer==0.1.8",
  "torchao==0.6.1",
]

[[tool.edge-maxxing.models]]
repository = "golaststep/100260"
revision = "d7b0352a7a7c61694367f59d571050df5deb1065"
exclude = ["transformer"]

[[tool.edge-maxxing.models]]
repository = "golaststep/99261"
revision = "e01dc7862700b0af46af986f2bf08541f6ef1911"

[[tool.edge-maxxing.models]]
repository = "golaststep/98262"
revision = "88f9a80f5a397868b4103b48382bbf292780e1f6"

[[tool.edge-maxxing.models]]
repository = "golaststep/97263"
revision = "f1c19b5bd1838937851bdcacbddfd8bd82d01c22"


[project.scripts]
start_inference = "main:main"