Skip to content

liblaf.flame_pytorch.upstream ¤

Modules:

  • config
  • flame

    FLAME Layer: Implementation of the 3D Statistical Face model in PyTorch

Classes:

  • FLAME

    Given flame parameters this class generates a differentiable FLAME function

Functions:

FLAME ¤

FLAME(config)

Bases: Module


              flowchart TD
              liblaf.flame_pytorch.upstream.FLAME[FLAME]

              

              click liblaf.flame_pytorch.upstream.FLAME href "" "liblaf.flame_pytorch.upstream.FLAME"
            

Given flame parameters this class generates a differentiable FLAME function which outputs the a mesh and 3D facial landmarks

Methods:

Attributes:

Source code in src/liblaf/flame_pytorch/upstream/flame.py
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
def __init__(self, config):
    super(FLAME, self).__init__()
    print("creating the FLAME Decoder")
    with open(config.flame_model_path, "rb") as f:
        self.flame_model = Struct(**pickle.load(f, encoding="latin1"))
    self.NECK_IDX = 1
    self.batch_size = config.batch_size
    self.dtype = torch.float32
    self.use_face_contour = config.use_face_contour
    self.faces = self.flame_model.f
    self.register_buffer(
        "faces_tensor",
        to_tensor(to_np(self.faces, dtype=np.int64), dtype=torch.long),
    )

    # Fixing remaining Shape betas
    # There are total 300 shape parameters to control FLAME; But one can use the first few parameters to express
    # the shape. For example 100 shape parameters are used for RingNet project
    default_shape = torch.zeros(
        [self.batch_size, 300 - config.shape_params],
        dtype=self.dtype,
        requires_grad=False,
    )
    self.register_parameter(
        "shape_betas", nn.Parameter(default_shape, requires_grad=False)
    )

    # Fixing remaining expression betas
    # There are total 100 shape expression parameters to control FLAME; But one can use the first few parameters to express
    # the expression. For example 50 expression parameters are used for RingNet project
    default_exp = torch.zeros(
        [self.batch_size, 100 - config.expression_params],
        dtype=self.dtype,
        requires_grad=False,
    )
    self.register_parameter(
        "expression_betas", nn.Parameter(default_exp, requires_grad=False)
    )

    # Eyeball and neck rotation
    default_eyball_pose = torch.zeros(
        [self.batch_size, 6], dtype=self.dtype, requires_grad=False
    )
    self.register_parameter(
        "eye_pose", nn.Parameter(default_eyball_pose, requires_grad=False)
    )

    default_neck_pose = torch.zeros(
        [self.batch_size, 3], dtype=self.dtype, requires_grad=False
    )
    self.register_parameter(
        "neck_pose", nn.Parameter(default_neck_pose, requires_grad=False)
    )

    # Fixing 3D translation since we use translation in the image plane

    self.use_3D_translation = config.use_3D_translation

    default_transl = torch.zeros(
        [self.batch_size, 3], dtype=self.dtype, requires_grad=False
    )
    self.register_parameter(
        "transl", nn.Parameter(default_transl, requires_grad=False)
    )

    # The vertices of the template model
    self.register_buffer(
        "v_template",
        to_tensor(to_np(self.flame_model.v_template), dtype=self.dtype),
    )

    # The shape components
    shapedirs = self.flame_model.shapedirs
    # The shape components
    self.register_buffer("shapedirs", to_tensor(to_np(shapedirs), dtype=self.dtype))

    j_regressor = to_tensor(to_np(self.flame_model.J_regressor), dtype=self.dtype)
    self.register_buffer("J_regressor", j_regressor)

    # Pose blend shape basis
    num_pose_basis = self.flame_model.posedirs.shape[-1]
    posedirs = np.reshape(self.flame_model.posedirs, [-1, num_pose_basis]).T
    self.register_buffer("posedirs", to_tensor(to_np(posedirs), dtype=self.dtype))

    # indices of parents for each joints
    parents = to_tensor(to_np(self.flame_model.kintree_table[0])).long()
    parents[0] = -1
    self.register_buffer("parents", parents)

    self.register_buffer(
        "lbs_weights", to_tensor(to_np(self.flame_model.weights), dtype=self.dtype)
    )

    # Static and Dynamic Landmark embeddings for FLAME

    with open(config.static_landmark_embedding_path, "rb") as f:
        static_embeddings = Struct(**pickle.load(f, encoding="latin1"))

    lmk_faces_idx = (static_embeddings.lmk_face_idx).astype(np.int64)
    self.register_buffer(
        "lmk_faces_idx", torch.tensor(lmk_faces_idx, dtype=torch.long)
    )
    lmk_bary_coords = static_embeddings.lmk_b_coords
    self.register_buffer(
        "lmk_bary_coords", torch.tensor(lmk_bary_coords, dtype=self.dtype)
    )

    if self.use_face_contour:
        conture_embeddings = np.load(
            config.dynamic_landmark_embedding_path,
            allow_pickle=True,
            encoding="latin1",
        )
        conture_embeddings = conture_embeddings[()]
        dynamic_lmk_faces_idx = np.array(conture_embeddings["lmk_face_idx"]).astype(
            np.int64
        )
        dynamic_lmk_faces_idx = torch.tensor(
            dynamic_lmk_faces_idx, dtype=torch.long
        )
        self.register_buffer("dynamic_lmk_faces_idx", dynamic_lmk_faces_idx)

        dynamic_lmk_bary_coords = conture_embeddings["lmk_b_coords"]
        dynamic_lmk_bary_coords = np.array(dynamic_lmk_bary_coords)
        dynamic_lmk_bary_coords = torch.tensor(
            dynamic_lmk_bary_coords, dtype=self.dtype
        )
        self.register_buffer("dynamic_lmk_bary_coords", dynamic_lmk_bary_coords)

        neck_kin_chain = []
        curr_idx = torch.tensor(self.NECK_IDX, dtype=torch.long)
        while curr_idx != -1:
            neck_kin_chain.append(curr_idx)
            curr_idx = self.parents[curr_idx]
        self.register_buffer("neck_kin_chain", torch.stack(neck_kin_chain))

NECK_IDX instance-attribute ¤

NECK_IDX = 1

batch_size instance-attribute ¤

batch_size = batch_size

dtype instance-attribute ¤

dtype = float32

faces instance-attribute ¤

faces = f

flame_model instance-attribute ¤

flame_model = Struct(**(load(f, encoding='latin1')))

use_3D_translation instance-attribute ¤

use_3D_translation = use_3D_translation

use_face_contour instance-attribute ¤

use_face_contour = use_face_contour

forward ¤

forward(
    shape_params=None,
    expression_params=None,
    pose_params=None,
    neck_pose=None,
    eye_pose=None,
    transl=None,
)
Input

shape_params: N X number of shape parameters expression_params: N X number of expression parameters pose_params: N X number of pose parameters

return: vertices: N X V X 3 landmarks: N X number of landmarks X 3

Source code in src/liblaf/flame_pytorch/upstream/flame.py
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
def forward(
    self,
    shape_params=None,
    expression_params=None,
    pose_params=None,
    neck_pose=None,
    eye_pose=None,
    transl=None,
):
    """
    Input:
        shape_params: N X number of shape parameters
        expression_params: N X number of expression parameters
        pose_params: N X number of pose parameters
    return:
        vertices: N X V X 3
        landmarks: N X number of landmarks X 3
    """
    betas = torch.cat(
        [shape_params, self.shape_betas, expression_params, self.expression_betas],
        dim=1,
    )
    neck_pose = neck_pose if neck_pose is not None else self.neck_pose
    eye_pose = eye_pose if eye_pose is not None else self.eye_pose
    transl = transl if transl is not None else self.transl
    full_pose = torch.cat(
        [pose_params[:, :3], neck_pose, pose_params[:, 3:], eye_pose], dim=1
    )
    template_vertices = self.v_template.unsqueeze(0).repeat(self.batch_size, 1, 1)

    vertices, _ = lbs(
        betas,
        full_pose,
        template_vertices,
        self.shapedirs,
        self.posedirs,
        self.J_regressor,
        self.parents,
        self.lbs_weights,
    )

    lmk_faces_idx = self.lmk_faces_idx.unsqueeze(dim=0).repeat(self.batch_size, 1)
    lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).repeat(
        self.batch_size, 1, 1
    )
    if self.use_face_contour:

        (
            dyn_lmk_faces_idx,
            dyn_lmk_bary_coords,
        ) = self._find_dynamic_lmk_idx_and_bcoords(
            vertices,
            full_pose,
            self.dynamic_lmk_faces_idx,
            self.dynamic_lmk_bary_coords,
            self.neck_kin_chain,
            dtype=self.dtype,
        )

        lmk_faces_idx = torch.cat([dyn_lmk_faces_idx, lmk_faces_idx], 1)
        lmk_bary_coords = torch.cat([dyn_lmk_bary_coords, lmk_bary_coords], 1)

    landmarks = vertices2landmarks(
        vertices, self.faces_tensor, lmk_faces_idx, lmk_bary_coords
    )

    if self.use_3D_translation:
        landmarks += transl.unsqueeze(dim=1)
        vertices += transl.unsqueeze(dim=1)

    return vertices, landmarks

get_config ¤

get_config()
Source code in src/liblaf/flame_pytorch/upstream/config.py
84
85
86
def get_config():
    config = parser.parse_args()
    return config