Skip to content

liblaf.flame_pytorch.flame ¤

Classes:

FLAME ¤

FLAME(config: FlameConfig | None = None)

Bases: FLAME


              flowchart TD
              liblaf.flame_pytorch.flame.FLAME[FLAME]
              liblaf.flame_pytorch.upstream.flame.FLAME[FLAME]

                              liblaf.flame_pytorch.upstream.flame.FLAME --> liblaf.flame_pytorch.flame.FLAME
                


              click liblaf.flame_pytorch.flame.FLAME href "" "liblaf.flame_pytorch.flame.FLAME"
              click liblaf.flame_pytorch.upstream.flame.FLAME href "" "liblaf.flame_pytorch.upstream.flame.FLAME"
            

Methods:

Attributes:

Source code in src/liblaf/flame_pytorch/flame.py
14
15
16
17
18
19
20
def __init__(self, config: FlameConfig | None = None) -> None:
    if config is None:
        config = FlameConfig()
    super().__init__(config)
    self.config = config
    if torch.cuda.is_available():
        self.cuda()

NECK_IDX instance-attribute ¤

NECK_IDX = 1

batch_size instance-attribute ¤

batch_size: int

config instance-attribute ¤

config: FlameConfig = config

dtype instance-attribute ¤

dtype: dtype

faces instance-attribute ¤

faces: Integer[ndarray, 'faces 3']

flame_model instance-attribute ¤

flame_model = Struct(**(load(f, encoding='latin1')))

shapedirs instance-attribute ¤

shapedirs: Tensor

use_3D_translation instance-attribute ¤

use_3D_translation: bool

use_face_contour instance-attribute ¤

use_face_contour: bool

__call__ ¤

__call__(
    shape: Float[Tensor, "batch shape"] | None = None,
    expression: Float[Tensor, "batch expression"]
    | None = None,
    pose: Float[Tensor, "batch pose"] | None = None,
    neck_pose: Float[Tensor, "batch 3"] | None = None,
    eye_pose: Float[Tensor, "batch 6"] | None = None,
    translation: Float[Tensor, "batch 3"] | None = None,
) -> tuple[
    Float[Tensor, "batch vertices"],
    Float[Tensor, "batch landmarks 3"],
]
Source code in src/liblaf/flame_pytorch/flame.py
18
19
20
21
22
23
24
25
26
    self.config = config
    if torch.cuda.is_available():
        self.cuda()

def forward(  # pyright: ignore[reportIncompatibleMethodOverride]
    self,
    shape: Float[Tensor, "#batch shape"] | None = None,
    expression: Float[Tensor, "#batch expression"] | None = None,
    pose: Float[Tensor, "#batch pose"] | None = None,

forward ¤

forward(
    shape: Float[Tensor, "batch shape"] | None = None,
    expression: Float[Tensor, "batch expression"]
    | None = None,
    pose: Float[Tensor, "batch pose"] | None = None,
    neck_pose: Float[Tensor, "batch 3"] | None = None,
    eye_pose: Float[Tensor, "batch 6"] | None = None,
    translation: Float[Tensor, "batch 3"] | None = None,
) -> tuple[
    Float[Tensor, "batch vertices"],
    Float[Tensor, "batch landmarks 3"],
]
Input

shape_params: N X number of shape parameters expression_params: N X number of expression parameters pose_params: N X number of pose parameters

return: vertices: N X V X 3 landmarks: N X number of landmarks X 3

Source code in src/liblaf/flame_pytorch/flame.py
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
def forward(  # pyright: ignore[reportIncompatibleMethodOverride]
    self,
    shape: Float[Tensor, "#batch shape"] | None = None,
    expression: Float[Tensor, "#batch expression"] | None = None,
    pose: Float[Tensor, "#batch pose"] | None = None,
    neck_pose: Float[Tensor, "#batch 3"] | None = None,
    eye_pose: Float[Tensor, "#batch 6"] | None = None,
    translation: Float[Tensor, "#batch 3"] | None = None,
) -> tuple[Float[Tensor, "#batch vertices 3"], Float[Tensor, "#batch landmarks 3"]]:
    if shape is None:
        shape = torch.zeros(
            (self.config.batch_size, self.config.shape_params),
            device=self.shapedirs.device,
            requires_grad=False,
        )
    if expression is None:
        expression = torch.zeros(
            (self.config.batch_size, self.config.expression_params),
            device=self.shapedirs.device,
            requires_grad=False,
        )
    if pose is None:
        pose = torch.zeros(
            (self.config.batch_size, self.config.pose_params),
            device=self.shapedirs.device,
            requires_grad=False,
        )
    return super().forward(
        shape_params=shape,
        expression_params=expression,
        pose_params=pose,
        neck_pose=neck_pose,
        eye_pose=eye_pose,
        transl=translation,
    )