File size: 1,575 Bytes
19c4ddf
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
from abc import ABC, abstractmethod
from typing import Any, Dict, Optional

import torch

from shap_e.models.query import Query
from shap_e.models.renderer import append_tensor
from shap_e.util.collections import AttrDict


class Model(ABC):
    @abstractmethod
    def forward(
        self,
        query: Query,
        params: Optional[Dict[str, torch.Tensor]] = None,
        options: Optional[Dict[str, Any]] = None,
    ) -> AttrDict[str, Any]:
        """
        Predict an attribute given position
        """

    def forward_batched(
        self,
        query: Query,
        query_batch_size: int = 4096,
        params: Optional[Dict[str, torch.Tensor]] = None,
        options: Optional[Dict[str, Any]] = None,
    ) -> AttrDict[str, Any]:
        if not query.position.numel():
            # Avoid torch.cat() of zero tensors.
            return self(query, params=params, options=options)

        if options.cache is None:
            created_cache = True
            options.cache = AttrDict()
        else:
            created_cache = False

        results_list = AttrDict()
        for i in range(0, query.position.shape[1], query_batch_size):
            out = self(
                query=query.map_tensors(lambda x, i=i: x[:, i : i + query_batch_size]),
                params=params,
                options=options,
            )
            results_list = results_list.combine(out, append_tensor)

        if created_cache:
            del options["cache"]

        return results_list.map(lambda key, tensor_list: torch.cat(tensor_list, dim=1))