Skip to content

Commit

Permalink
fix style
Browse files Browse the repository at this point in the history
  • Loading branch information
jnke2016 committed Jan 13, 2025
1 parent af3b31f commit f0e3b0f
Show file tree
Hide file tree
Showing 3 changed files with 18 additions and 35 deletions.
19 changes: 6 additions & 13 deletions python/cugraph/cugraph/dask/sampling/biased_random_walks.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,8 @@


def convert_to_cudf(
cp_paths: cp.ndarray,
number_map=None,
is_vertex_paths: bool = False
) -> cudf.Series:
cp_paths: cp.ndarray, number_map=None, is_vertex_paths: bool = False
) -> cudf.Series:
"""
Creates cudf Series from cupy arrays from pylibcugraph wrapper
"""
Expand All @@ -55,12 +53,8 @@ def convert_to_cudf(


def _call_plc_biased_random_walks(
sID: bytes,
mg_graph_x,
st_x: cudf.Series,
max_depth: int,
random_state: int
) -> Tuple[cp.ndarray, cp.ndarray]:
sID: bytes, mg_graph_x, st_x: cudf.Series, max_depth: int, random_state: int
) -> Tuple[cp.ndarray, cp.ndarray]:

return pylibcugraph_biased_random_walks(
resource_handle=ResourceHandle(Comms.get_handle(sID).getHandle()),
Expand All @@ -73,10 +67,9 @@ def _call_plc_biased_random_walks(

def biased_random_walks(
input_graph,
start_vertices: Union[int, list, cudf.Series, cudf.DataFrame, cudf.Series
] = None,
start_vertices: Union[int, list, cudf.Series, cudf.DataFrame, cudf.Series] = None,
max_depth: int = 1,
random_state: int = None
random_state: int = None,
) -> Tuple[Union[dask_cudf.Series, dask_cudf.DataFrame], dask_cudf.Series, int]:
"""
compute random walks under the biased sampling framework for each nodes in
Expand Down
15 changes: 6 additions & 9 deletions python/cugraph/cugraph/dask/sampling/node2vec_random_walks.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,8 @@


def convert_to_cudf(
cp_paths: cp.ndarray,
number_map=None,
is_vertex_paths: bool = False
) -> cudf.Series:
cp_paths: cp.ndarray, number_map=None, is_vertex_paths: bool = False
) -> cudf.Series:
"""
Creates cudf Series from cupy arrays from pylibcugraph wrapper
"""
Expand All @@ -58,9 +56,9 @@ def _call_plc_node2vec_random_walks(
mg_graph_x,
st_x: cudf.Series,
max_depth: int,
p: float,
p: float,
q: float,
random_state: int
random_state: int,
) -> Tuple[cp.ndarray, cp.ndarray]:

return pylibcugraph_node2vec_random_walks(
Expand All @@ -76,12 +74,11 @@ def _call_plc_node2vec_random_walks(

def node2vec_random_walks(
input_graph,
start_vertices: Union[int, list, cudf.Series, cudf.DataFrame, cudf.Series
] = None,
start_vertices: Union[int, list, cudf.Series, cudf.DataFrame, cudf.Series] = None,
max_depth: int = 1,
p: float = 1.0,
q: float = 1.0,
random_state: int = None
random_state: int = None,
) -> Tuple[Union[dask_cudf.Series, dask_cudf.DataFrame], dask_cudf.Series, int]:
"""
compute random walks under the node2vec sampling framework for each nodes in
Expand Down
19 changes: 6 additions & 13 deletions python/cugraph/cugraph/dask/sampling/uniform_random_walks.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,10 +32,8 @@


def convert_to_cudf(
cp_paths: cp.ndarray,
number_map=None,
is_vertex_paths: bool = False
) -> cudf.Series:
cp_paths: cp.ndarray, number_map=None, is_vertex_paths: bool = False
) -> cudf.Series:
"""
Creates cudf Series from cupy arrays from pylibcugraph wrapper
"""
Expand All @@ -55,12 +53,8 @@ def convert_to_cudf(


def _call_plc_uniform_random_walks(
sID: bytes,
mg_graph_x,
st_x: cudf.Series,
max_depth: int,
random_state: int
) -> Tuple[cp.ndarray, cp.ndarray]:
sID: bytes, mg_graph_x, st_x: cudf.Series, max_depth: int, random_state: int
) -> Tuple[cp.ndarray, cp.ndarray]:

return pylibcugraph_uniform_random_walks(
resource_handle=ResourceHandle(Comms.get_handle(sID).getHandle()),
Expand All @@ -73,10 +67,9 @@ def _call_plc_uniform_random_walks(

def uniform_random_walks(
input_graph,
start_vertices: Union[int, list, cudf.Series, cudf.DataFrame, cudf.Series
] = None,
start_vertices: Union[int, list, cudf.Series, cudf.DataFrame, cudf.Series] = None,
max_depth: int = 1,
random_state: int = None
random_state: int = None,
) -> Tuple[Union[dask_cudf.Series, dask_cudf.DataFrame], dask_cudf.Series, int]:
"""
compute random walks under the uniform sampling framework for each nodes in
Expand Down

0 comments on commit f0e3b0f

Please sign in to comment.