I'm trying to run this implementation of Page Rank. This is a part of the code:
import re
import sys
from operator import add
from typing import Iterable, Tuple
from pyspark.resultiterable import ResultIterable
from pyspark.sql import SparkSession
def computeContribs(urls: ResultIterable[str], rank: float) -> Iterable[Tuple[str, float]]:
"""Calculates URL contributions to the rank of other URLs."""
num_urls = len(urls)
for url in urls:
yield (url, rank / num_urls)
def parseNeighbors(urls: str) -> Tuple[str, str]:
"""Parses a urls pair string into urls pair."""
parts = re.split(r'\s+', urls)
return parts[0], parts[1]
But when I run it, I get this error:
10 from collections.abc import Mapping
11
---> 12 def computeContribs(urls: ResultIterable[str], rank: float) -> Iterable[Tuple[str, float]]:
13 """Calculates URL contributions to the rank of other URLs."""
14 num_urls = len(urls)
TypeError: 'ABCMeta' object is not subscriptable
What should I do? I have no idea why should I define types when defining functions.