from pydantic import BaseModel
from tensorlake.applications import application, function
class TotalSum(BaseModel):
value: int = 0
@application()
@function()
def sum_squares(total_numbers: int) -> TotalSum:
# Blocks until all map calls complete.
# The behavior and signature of function.map is very similar to Python's built-in map except it's parallel.
squares: List[int] = square.map([i for i in range(total_numbers)])
# Blocks until all reduce calls complete.
# The behavior and signature of function.reduce is very similar to Python's functools.reduce except it's parallel.
total: TotalSum = sum_total.reduce(squares, TotalSum(value=0))
return total
@function()
def square(number: int) -> int:
return number ** 2
@function()
def sum_total(total: TotalSum, number: int) -> TotalSum:
total.value += number
# This value will be passed to the next sum_total call as the first argument.
# Unless this is the last call, in which case it will be returned as the final
# result of the reduce operation.
return total