class Point: def __init__(self, x: float, y: float): self.x = x self.y = y def interpolation_lagrange(points: list[Point], x: float) -> float: interpolation: float = 0. n = len(points) for i in range(n): term: float = 1. # 0.0 will always give 0, it's a product ! So lets put 1 # Lagrange L(x) for k in range(n): if k != i: term *= ((x - points[k].x) / (points[i].x - points[k].x)) # interpolation with a piece of Lagrange (y_i) interpolation += term * points[i].y return interpolation def interpolation_newton(points: list[Point], x: float) -> float: interpolation = 0. # alpha sum n = len(points) for i in range(n): a_i = 1. for k in range(n-1): a_i *= (points[k+1].y - points[k].y) / (points[k+1].x - points[k].x) return interpolation def ex1() -> None: p1 = Point(-1, 0) p2 = Point(0, -1) p3 = Point(1, 0) p4 = Point(3, 70) points = [] points.append(p1) points.append(p2) points.append(p3) points.append(p4) """ """ racine_x_2 = interpolation_lagrange(points, 2.0) racine_x_3 = interpolation_lagrange(points, 3.0) print(f'racine X = 2 = {racine_x_2}, racine X = 3 = {racine_x_3}') if __name__ == '__main__': ex1()