Stand with Ukraine flag
Try it now Pricing
Trendz Analytics
Documentation > Prediction > Custom Python models
Getting Started
Guides Installation
On this page

Custom python models

You can add new prediction models into Trendz by writing a custom Python code. This code will be executed on the server side and will have access to the whole input dataset that includes required telemetries and attributes data. You can import required Python libraries and use them in your code to forecast required metric based on input data.

Multivariable python model example

This template demonstrates how to create and implement a custom multivariable prediction model in Trendz using Python. Custom models allow you to extend the platform’s built-in prediction capabilities by leveraging specific algorithms, incorporating additional variables, or fine-tuning parameters to meet unique business needs.

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
#####################################################
# Prediction Method: Linear Regression

from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.linear_model import LinearRegression
import pickle
import numpy as np
import os 

class CustomModel(IModel):

    def __init__(self, value_transformer=None, timestamp_transformer=None):
        self.model = None
        self.timestamp_transformer = timestamp_transformer if timestamp_transformer else StandardScaler()
        self.value_transformer = value_transformer if value_transformer else MinMaxScaler()
        self.sum_x = 0
        self.sum_y = 0
        self.sum_xy = 0
        self.sum_xx = 0
        self.n = 0

    def init_state(self):
        self.model = LinearRegression()

    def train(self, data, additionalData=None):
        # Prepare
        ts = np.array([point[0] for point in data]).reshape(-1, 1)
        values = np.array([point[1] for point in data]).reshape(-1, 1)
        self.timestamp_transformer.fit(ts)
        self.value_transformer.fit(values)
        ts_scaled = self.timestamp_transformer.transform(ts)
        values_scaled = self.value_transformer.transform(values)

        # Fit
        self.sum_x = np.sum(ts_scaled)
        self.sum_y = np.sum(values_scaled)
        self.sum_xy = np.sum(ts_scaled * values_scaled)
        self.sum_xx = np.sum(ts_scaled ** 2)
        self.n = len(ts_scaled)

        self.model.fit(ts_scaled, values_scaled)

    def partial_fit(self, data, additionalData=None):
        # Prepare
        ts = np.array([point[0] for point in data]).reshape(-1, 1)
        values = np.array([point[1] for point in data]).reshape(-1, 1)
        # self.timestamp_transformer.partial_fit(ts)
        # self.value_transformer.partial_fit(values)
        ts_scaled = self.timestamp_transformer.transform(ts)
        values_scaled = self.value_transformer.transform(values)

        # Fit
        self.sum_x += np.sum(ts_scaled)
        self.sum_y += np.sum(values_scaled)
        self.sum_xy += np.sum(ts_scaled * values_scaled)
        self.sum_xx += np.sum(ts_scaled ** 2)
        self.n += len(ts_scaled)

        if self.n > 0:
            mean_x = self.sum_x / self.n
            mean_y = self.sum_y / self.n
            slope = (self.sum_xy - self.n * mean_x * mean_y) / (self.sum_xx - self.n * mean_x ** 2)
            intercept = mean_y - slope * mean_x
            self.model.coef_ = np.array([[slope]])
            self.model.intercept_ = np.array([intercept])

    def predict(self, timestamps):
        ts = np.array(timestamps).reshape(-1, 1)
        ts_scaled = self.timestamp_transformer.transform(ts)
        predictions_scaled = self.model.predict(ts_scaled)
        predictions = self.value_transformer.inverse_transform(predictions_scaled)
        return list(zip(timestamps, predictions.flatten()))

    def save_state(self, file_path):
        with open(file_path, 'wb') as file:
            state = {
                'model': self.model,
                'value_transformer': self.value_transformer,
                'timestamp_transformer': self.timestamp_transformer,
                'sum_x': self.sum_x,
                'sum_y': self.sum_y,
                'sum_xy': self.sum_xy,
                'sum_xx': self.sum_xx,
                'n': self.n
            }
            pickle.dump(state, file)

    def load_state(self, file_path):
        with open(file_path, 'rb') as file:
            state = pickle.load(file)
            self.model = state['model']
            self.value_transformer = state['value_transformer']
            self.timestamp_transformer = state['timestamp_transformer']
            self.sum_x = state['sum_x']
            self.sum_y = state['sum_y']
            self.sum_xy = state['sum_xy']
            self.sum_xx = state['sum_xx']
            self.n = state['n']

    def name(self):
        return "LinearRegressionModel"

#####################################################

Next Steps

  • Getting started guide - These guide provide quick overview of main Trendz features.

  • Installation guides - Learn how to setup ThingsBoard on various available operating systems.

  • States - Learn how to define and analyse states for assets based on raw telemetry.

  • Prediction - Learn how to make forecasts and predict telemetry behavior.

  • Filters - Learn how filter dataset during analysis.

  • Available Visualizations - Learn about visualization widgets available in Trendz and how to configure them.

  • Share and embed Visualizations - Learn how to add Trendz visualizations on ThingsBoard dashboard or 3rd party web pages.